2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1394 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1395 * ============================================
1397 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1398 * instructions set. It is designed to fit the needs of signal, graphical and
1399 * video processing applications. MXU instruction set is used in Xburst family
1400 * of microprocessors by Ingenic.
1402 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1403 * the control register.
1405 * The notation used in MXU assembler mnemonics:
1407 * XRa, XRb, XRc, XRd - MXU registers
1408 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1409 * s12 - a subfield of an instruction code
1410 * strd2 - a subfield of an instruction code
1411 * eptn2 - a subfield of an instruction code
1412 * eptn3 - a subfield of an instruction code
1413 * optn2 - a subfield of an instruction code
1414 * optn3 - a subfield of an instruction code
1415 * sft4 - a subfield of an instruction code
1417 * Load/Store instructions Multiplication instructions
1418 * ----------------------- ---------------------------
1420 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1421 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1422 * S32LDDV XRa, Rb, rc, strd2 S32SUB XRa, XRd, Rs, Rt
1423 * S32STDV XRa, Rb, rc, strd2 S32SUBU XRa, XRd, Rs, Rt
1424 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1425 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1426 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1427 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1428 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1429 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1430 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1431 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1432 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1433 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1434 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1435 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1436 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1437 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1438 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1439 * S16SDI XRa, Rb, s10, eptn2
1440 * S8LDD XRa, Rb, s8, eptn3
1441 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1442 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1443 * S8SDI XRa, Rb, s8, eptn3
1444 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1445 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1446 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1447 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1448 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1449 * S32CPS XRa, XRb, XRc
1450 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1451 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1452 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1453 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1454 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1455 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1456 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1457 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1458 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1459 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1460 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1461 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1462 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1463 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1464 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1465 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1466 * Q8SLT XRa, XRb, XRc
1467 * Q8SLTU XRa, XRb, XRc
1468 * Q8MOVZ XRa, XRb, XRc Shift instructions
1469 * Q8MOVN XRa, XRb, XRc ------------------
1471 * D32SLL XRa, XRb, XRc, XRd, sft4
1472 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1473 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1474 * D32SARL XRa, XRb, XRc, sft4
1475 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1476 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1477 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1478 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1479 * Q16SLL XRa, XRb, XRc, XRd, sft4
1480 * Q16SLR XRa, XRb, XRc, XRd, sft4
1481 * Miscelaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1482 * ------------------------- Q16SLLV XRa, XRb, Rb
1483 * Q16SLRV XRa, XRb, Rb
1484 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1485 * S32ALN XRa, XRb, XRc, Rb
1486 * S32ALNI XRa, XRb, XRc, s3
1487 * S32LUI XRa, s8, optn3 Move instructions
1488 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1489 * S32EXTRV XRa, XRb, Rs, Rt
1490 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1491 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1497 * ┌─ 000000 ─ OPC_MXU_S32MADD
1498 * ├─ 000001 ─ OPC_MXU_S32MADDU
1499 * ├─ 000010 ─ <not assigned>
1501 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1502 * │ ├─ 001 ─ OPC_MXU_S32MIN
1503 * │ ├─ 010 ─ OPC_MXU_D16MAX
1504 * │ ├─ 011 ─ OPC_MXU_D16MIN
1505 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1506 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1507 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1508 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1509 * ├─ 000100 ─ OPC_MXU_S32MSUB
1510 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1511 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1512 * │ ├─ 001 ─ OPC_MXU_D16SLT
1513 * │ ├─ 010 ─ OPC_MXU_D16AVG
1514 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1515 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1516 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1517 * │ └─ 111 ─ OPC_MXU_Q8ADD
1520 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1521 * │ ├─ 010 ─ OPC_MXU_D16CPS
1522 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1523 * │ └─ 110 ─ OPC_MXU_Q16SAT
1524 * ├─ 001000 ─ OPC_MXU_D16MUL
1526 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1527 * │ └─ 01 ─ OPC_MXU_D16MULE
1528 * ├─ 001010 ─ OPC_MXU_D16MAC
1529 * ├─ 001011 ─ OPC_MXU_D16MACF
1530 * ├─ 001100 ─ OPC_MXU_D16MADL
1532 * ├─ 001101 ─ OPC_MXU__POOL04 ─┬─ 00 ─ OPC_MXU_S16MAD
1533 * │ └─ 01 ─ OPC_MXU_S16MAD_1
1534 * ├─ 001110 ─ OPC_MXU_Q16ADD
1535 * ├─ 001111 ─ OPC_MXU_D16MACE
1537 * ├─ 010000 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32LDD
1538 * │ └─ 1 ─ OPC_MXU_S32LDDR
1541 * ├─ 010001 ─ OPC_MXU__POOL06 ─┬─ 0 ─ OPC_MXU_S32STD
1542 * │ └─ 1 ─ OPC_MXU_S32STDR
1545 * ├─ 010010 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1546 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1549 * ├─ 010011 ─ OPC_MXU__POOL08 ─┬─ 0000 ─ OPC_MXU_S32STDV
1550 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1553 * ├─ 010100 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32LDI
1554 * │ └─ 1 ─ OPC_MXU_S32LDIR
1557 * ├─ 010101 ─ OPC_MXU__POOL10 ─┬─ 0 ─ OPC_MXU_S32SDI
1558 * │ └─ 1 ─ OPC_MXU_S32SDIR
1561 * ├─ 010110 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1562 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1565 * ├─ 010111 ─ OPC_MXU__POOL12 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1566 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1567 * ├─ 011000 ─ OPC_MXU_D32ADD
1569 * MXU ├─ 011001 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_D32ACC
1570 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1571 * │ └─ 10 ─ OPC_MXU_D32ASUM
1572 * ├─ 011010 ─ <not assigned>
1574 * ├─ 011011 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q16ACC
1575 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1576 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1579 * ├─ 011100 ─ OPC_MXU__POOL15 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1580 * │ ├─ 01 ─ OPC_MXU_D8SUM
1581 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1582 * ├─ 011110 ─ <not assigned>
1583 * ├─ 011111 ─ <not assigned>
1584 * ├─ 100000 ─ <not assigned>
1585 * ├─ 100001 ─ <not assigned>
1586 * ├─ 100010 ─ OPC_MXU_S8LDD
1587 * ├─ 100011 ─ OPC_MXU_S8STD
1588 * ├─ 100100 ─ OPC_MXU_S8LDI
1589 * ├─ 100101 ─ OPC_MXU_S8SDI
1591 * ├─ 100110 ─ OPC_MXU__POOL16 ─┬─ 00 ─ OPC_MXU_S32MUL
1592 * │ ├─ 00 ─ OPC_MXU_S32MULU
1593 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1594 * │ └─ 00 ─ OPC_MXU_S32EXTRV
1597 * ├─ 100111 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_D32SARW
1598 * │ ├─ 001 ─ OPC_MXU_S32ALN
1599 * ├─ 101000 ─ OPC_MXU_LXB ├─ 010 ─ OPC_MXU_S32ALNI
1600 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_S32NOR
1601 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_S32AND
1602 * ├─ 101011 ─ OPC_MXU_S16STD ├─ 101 ─ OPC_MXU_S32OR
1603 * ├─ 101100 ─ OPC_MXU_S16LDI ├─ 110 ─ OPC_MXU_S32XOR
1604 * ├─ 101101 ─ OPC_MXU_S16SDI └─ 111 ─ OPC_MXU_S32LUI
1605 * ├─ 101000 ─ <not assigned>
1606 * ├─ 101001 ─ <not assigned>
1607 * ├─ 101010 ─ <not assigned>
1608 * ├─ 101011 ─ <not assigned>
1609 * ├─ 101100 ─ <not assigned>
1610 * ├─ 101101 ─ <not assigned>
1611 * ├─ 101110 ─ OPC_MXU_S32M2I
1612 * ├─ 101111 ─ OPC_MXU_S32I2M
1613 * ├─ 110000 ─ OPC_MXU_D32SLL
1614 * ├─ 110001 ─ OPC_MXU_D32SLR
1615 * ├─ 110010 ─ OPC_MXU_D32SARL
1616 * ├─ 110011 ─ OPC_MXU_D32SAR
1617 * ├─ 110100 ─ OPC_MXU_Q16SLL
1618 * ├─ 110101 ─ OPC_MXU_Q16SLR 20..18
1619 * ├─ 110110 ─ OPC_MXU__POOL18 ─┬─ 000 ─ OPC_MXU_D32SLLV
1620 * │ ├─ 001 ─ OPC_MXU_D32SLRV
1621 * │ ├─ 010 ─ OPC_MXU_D32SARV
1622 * │ ├─ 011 ─ OPC_MXU_Q16SLLV
1623 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1624 * │ └─ 101 ─ OPC_MXU_Q16SARV
1625 * ├─ 110111 ─ OPC_MXU_Q16SAR
1627 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1628 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1631 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1632 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1633 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1634 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1635 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1636 * │ └─ 101 ─ OPC_MXU_S32MOV
1639 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1640 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1641 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1642 * ├─ 111100 ─ OPC_MXU_Q8MADL
1643 * ├─ 111101 ─ OPC_MXU_S32SFL
1644 * ├─ 111110 ─ OPC_MXU_Q8SAD
1645 * └─ 111111 ─ <not assigned>
1650 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1651 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1655 OPC_MXU_S32MADD
= 0x00,
1656 OPC_MXU_S32MADDU
= 0x01,
1657 /* not assigned 0x02 */
1658 OPC_MXU__POOL00
= 0x03,
1659 OPC_MXU_S32MSUB
= 0x04,
1660 OPC_MXU_S32MSUBU
= 0x05,
1661 OPC_MXU__POOL01
= 0x06,
1662 OPC_MXU__POOL02
= 0x07,
1663 OPC_MXU_D16MUL
= 0x08,
1664 OPC_MXU__POOL03
= 0x09,
1665 OPC_MXU_D16MAC
= 0x0A,
1666 OPC_MXU_D16MACF
= 0x0B,
1667 OPC_MXU_D16MADL
= 0x0C,
1668 OPC_MXU__POOL04
= 0x0D,
1669 OPC_MXU_Q16ADD
= 0x0E,
1670 OPC_MXU_D16MACE
= 0x0F,
1671 OPC_MXU__POOL05
= 0x10,
1672 OPC_MXU__POOL06
= 0x11,
1673 OPC_MXU__POOL07
= 0x12,
1674 OPC_MXU__POOL08
= 0x13,
1675 OPC_MXU__POOL09
= 0x14,
1676 OPC_MXU__POOL10
= 0x15,
1677 OPC_MXU__POOL11
= 0x16,
1678 OPC_MXU__POOL12
= 0x17,
1679 OPC_MXU_D32ADD
= 0x18,
1680 OPC_MXU__POOL13
= 0x19,
1681 /* not assigned 0x1A */
1682 OPC_MXU__POOL14
= 0x1B,
1683 OPC_MXU__POOL15
= 0x1C,
1684 OPC_MXU_Q8ACCE
= 0x1D,
1685 /* not assigned 0x1E */
1686 /* not assigned 0x1F */
1687 /* not assigned 0x20 */
1688 /* not assigned 0x21 */
1689 OPC_MXU_S8LDD
= 0x22,
1690 OPC_MXU_S8STD
= 0x23,
1691 OPC_MXU_S8LDI
= 0x24,
1692 OPC_MXU_S8SDI
= 0x25,
1693 OPC_MXU__POOL16
= 0x26,
1694 OPC_MXU__POOL17
= 0x27,
1696 /* not assigned 0x29 */
1697 OPC_MXU_S16LDD
= 0x2A,
1698 OPC_MXU_S16STD
= 0x2B,
1699 OPC_MXU_S16LDI
= 0x2C,
1700 OPC_MXU_S16SDI
= 0x2D,
1701 OPC_MXU_S32M2I
= 0x2E,
1702 OPC_MXU_S32I2M
= 0x2F,
1703 OPC_MXU_D32SLL
= 0x30,
1704 OPC_MXU_D32SLR
= 0x31,
1705 OPC_MXU_D32SARL
= 0x32,
1706 OPC_MXU_D32SAR
= 0x33,
1707 OPC_MXU_Q16SLL
= 0x34,
1708 OPC_MXU_Q16SLR
= 0x35,
1709 OPC_MXU__POOL18
= 0x36,
1710 OPC_MXU_Q16SAR
= 0x37,
1711 OPC_MXU__POOL19
= 0x38,
1712 OPC_MXU__POOL20
= 0x39,
1713 OPC_MXU__POOL21
= 0x3A,
1714 OPC_MXU_Q16SCOP
= 0x3B,
1715 OPC_MXU_Q8MADL
= 0x3C,
1716 OPC_MXU_S32SFL
= 0x3D,
1717 OPC_MXU_Q8SAD
= 0x3E,
1718 /* not assigned 0x3F */
1726 OPC_MXU_S32MAX
= 0x00,
1727 OPC_MXU_S32MIN
= 0x01,
1728 OPC_MXU_D16MAX
= 0x02,
1729 OPC_MXU_D16MIN
= 0x03,
1730 OPC_MXU_Q8MAX
= 0x04,
1731 OPC_MXU_Q8MIN
= 0x05,
1732 OPC_MXU_Q8SLT
= 0x06,
1733 OPC_MXU_Q8SLTU
= 0x07,
1740 OPC_MXU_S32SLT
= 0x00,
1741 OPC_MXU_D16SLT
= 0x01,
1742 OPC_MXU_D16AVG
= 0x02,
1743 OPC_MXU_D16AVGR
= 0x03,
1744 OPC_MXU_Q8AVG
= 0x04,
1745 OPC_MXU_Q8AVGR
= 0x05,
1746 OPC_MXU_Q8ADD
= 0x07,
1753 OPC_MXU_S32CPS
= 0x00,
1754 OPC_MXU_D16CPS
= 0x02,
1755 OPC_MXU_Q8ABD
= 0x04,
1756 OPC_MXU_Q16SAT
= 0x06,
1763 OPC_MXU_D16MULF
= 0x00,
1764 OPC_MXU_D16MULE
= 0x01,
1771 OPC_MXU_S16MAD
= 0x00,
1772 OPC_MXU_S16MAD_1
= 0x01,
1779 OPC_MXU_S32LDD
= 0x00,
1780 OPC_MXU_S32LDDR
= 0x01,
1787 OPC_MXU_S32STD
= 0x00,
1788 OPC_MXU_S32STDR
= 0x01,
1795 OPC_MXU_S32LDDV
= 0x00,
1796 OPC_MXU_S32LDDVR
= 0x01,
1803 OPC_MXU_S32STDV
= 0x00,
1804 OPC_MXU_S32STDVR
= 0x01,
1811 OPC_MXU_S32LDI
= 0x00,
1812 OPC_MXU_S32LDIR
= 0x01,
1819 OPC_MXU_S32SDI
= 0x00,
1820 OPC_MXU_S32SDIR
= 0x01,
1827 OPC_MXU_S32LDIV
= 0x00,
1828 OPC_MXU_S32LDIVR
= 0x01,
1835 OPC_MXU_S32SDIV
= 0x00,
1836 OPC_MXU_S32SDIVR
= 0x01,
1843 OPC_MXU_D32ACC
= 0x00,
1844 OPC_MXU_D32ACCM
= 0x01,
1845 OPC_MXU_D32ASUM
= 0x02,
1852 OPC_MXU_Q16ACC
= 0x00,
1853 OPC_MXU_Q16ACCM
= 0x01,
1854 OPC_MXU_Q16ASUM
= 0x02,
1861 OPC_MXU_Q8ADDE
= 0x00,
1862 OPC_MXU_D8SUM
= 0x01,
1863 OPC_MXU_D8SUMC
= 0x02,
1870 OPC_MXU_S32MUL
= 0x00,
1871 OPC_MXU_S32MULU
= 0x01,
1872 OPC_MXU_S32EXTR
= 0x02,
1873 OPC_MXU_S32EXTRV
= 0x03,
1880 OPC_MXU_D32SARW
= 0x00,
1881 OPC_MXU_S32ALN
= 0x01,
1882 OPC_MXU_S32ALNI
= 0x02,
1883 OPC_MXU_S32NOR
= 0x03,
1884 OPC_MXU_S32AND
= 0x04,
1885 OPC_MXU_S32OR
= 0x05,
1886 OPC_MXU_S32XOR
= 0x06,
1887 OPC_MXU_S32LUI
= 0x07,
1894 OPC_MXU_D32SLLV
= 0x00,
1895 OPC_MXU_D32SLRV
= 0x01,
1896 OPC_MXU_D32SARV
= 0x03,
1897 OPC_MXU_Q16SLLV
= 0x04,
1898 OPC_MXU_Q16SLRV
= 0x05,
1899 OPC_MXU_Q16SARV
= 0x07,
1906 OPC_MXU_Q8MUL
= 0x00,
1907 OPC_MXU_Q8MULSU
= 0x01,
1914 OPC_MXU_Q8MOVZ
= 0x00,
1915 OPC_MXU_Q8MOVN
= 0x01,
1916 OPC_MXU_D16MOVZ
= 0x02,
1917 OPC_MXU_D16MOVN
= 0x03,
1918 OPC_MXU_S32MOVZ
= 0x04,
1919 OPC_MXU_S32MOVN
= 0x05,
1926 OPC_MXU_Q8MAC
= 0x00,
1927 OPC_MXU_Q8MACSU
= 0x01,
1931 * Overview of the TX79-specific instruction set
1932 * =============================================
1934 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1935 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1936 * instructions and certain multimedia instructions (MMIs). These MMIs
1937 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1938 * or sixteen 8-bit paths.
1942 * The Toshiba TX System RISC TX79 Core Architecture manual,
1943 * https://wiki.qemu.org/File:C790.pdf
1945 * Three-Operand Multiply and Multiply-Add (4 instructions)
1946 * --------------------------------------------------------
1947 * MADD [rd,] rs, rt Multiply/Add
1948 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1949 * MULT [rd,] rs, rt Multiply (3-operand)
1950 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1952 * Multiply Instructions for Pipeline 1 (10 instructions)
1953 * ------------------------------------------------------
1954 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1955 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1956 * DIV1 rs, rt Divide Pipeline 1
1957 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1958 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1959 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1960 * MFHI1 rd Move From HI1 Register
1961 * MFLO1 rd Move From LO1 Register
1962 * MTHI1 rs Move To HI1 Register
1963 * MTLO1 rs Move To LO1 Register
1965 * Arithmetic (19 instructions)
1966 * ----------------------------
1967 * PADDB rd, rs, rt Parallel Add Byte
1968 * PSUBB rd, rs, rt Parallel Subtract Byte
1969 * PADDH rd, rs, rt Parallel Add Halfword
1970 * PSUBH rd, rs, rt Parallel Subtract Halfword
1971 * PADDW rd, rs, rt Parallel Add Word
1972 * PSUBW rd, rs, rt Parallel Subtract Word
1973 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1974 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1975 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1976 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1977 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1978 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1979 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1980 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1981 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1982 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1983 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1984 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1985 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1987 * Min/Max (4 instructions)
1988 * ------------------------
1989 * PMAXH rd, rs, rt Parallel Maximum Halfword
1990 * PMINH rd, rs, rt Parallel Minimum Halfword
1991 * PMAXW rd, rs, rt Parallel Maximum Word
1992 * PMINW rd, rs, rt Parallel Minimum Word
1994 * Absolute (2 instructions)
1995 * -------------------------
1996 * PABSH rd, rt Parallel Absolute Halfword
1997 * PABSW rd, rt Parallel Absolute Word
1999 * Logical (4 instructions)
2000 * ------------------------
2001 * PAND rd, rs, rt Parallel AND
2002 * POR rd, rs, rt Parallel OR
2003 * PXOR rd, rs, rt Parallel XOR
2004 * PNOR rd, rs, rt Parallel NOR
2006 * Shift (9 instructions)
2007 * ----------------------
2008 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2009 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2010 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2011 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2012 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2013 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2014 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2015 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2016 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2018 * Compare (6 instructions)
2019 * ------------------------
2020 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2021 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2022 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2023 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2024 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2025 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2027 * LZC (1 instruction)
2028 * -------------------
2029 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2031 * Quadword Load and Store (2 instructions)
2032 * ----------------------------------------
2033 * LQ rt, offset(base) Load Quadword
2034 * SQ rt, offset(base) Store Quadword
2036 * Multiply and Divide (19 instructions)
2037 * -------------------------------------
2038 * PMULTW rd, rs, rt Parallel Multiply Word
2039 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2040 * PDIVW rs, rt Parallel Divide Word
2041 * PDIVUW rs, rt Parallel Divide Unsigned Word
2042 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2043 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2044 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2045 * PMULTH rd, rs, rt Parallel Multiply Halfword
2046 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2047 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2048 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2049 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2050 * PDIVBW rs, rt Parallel Divide Broadcast Word
2051 * PMFHI rd Parallel Move From HI Register
2052 * PMFLO rd Parallel Move From LO Register
2053 * PMTHI rs Parallel Move To HI Register
2054 * PMTLO rs Parallel Move To LO Register
2055 * PMFHL rd Parallel Move From HI/LO Register
2056 * PMTHL rs Parallel Move To HI/LO Register
2058 * Pack/Extend (11 instructions)
2059 * -----------------------------
2060 * PPAC5 rd, rt Parallel Pack to 5 bits
2061 * PPACB rd, rs, rt Parallel Pack to Byte
2062 * PPACH rd, rs, rt Parallel Pack to Halfword
2063 * PPACW rd, rs, rt Parallel Pack to Word
2064 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2065 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2066 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2067 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2068 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2069 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2070 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2072 * Others (16 instructions)
2073 * ------------------------
2074 * PCPYH rd, rt Parallel Copy Halfword
2075 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2076 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2077 * PREVH rd, rt Parallel Reverse Halfword
2078 * PINTH rd, rs, rt Parallel Interleave Halfword
2079 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2080 * PEXEH rd, rt Parallel Exchange Even Halfword
2081 * PEXCH rd, rt Parallel Exchange Center Halfword
2082 * PEXEW rd, rt Parallel Exchange Even Word
2083 * PEXCW rd, rt Parallel Exchange Center Word
2084 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2085 * MFSA rd Move from Shift Amount Register
2086 * MTSA rs Move to Shift Amount Register
2087 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2088 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2089 * PROT3W rd, rt Parallel Rotate 3 Words
2091 * The TX79-specific Multimedia Instruction encodings
2092 * ==================================================
2094 * TX79 Multimedia Instruction encoding table keys:
2096 * * This code is reserved for future use. An attempt to execute it
2097 * causes a Reserved Instruction exception.
2098 * % This code indicates an instruction class. The instruction word
2099 * must be further decoded by examining additional tables that show
2100 * the values for other instruction fields.
2101 * # This code is reserved for the unsupported instructions DMULT,
2102 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2103 * to execute it causes a Reserved Instruction exception.
2105 * TX79 Multimedia Instructions encoded by opcode field (MMI, LQ, SQ):
2108 * +--------+----------------------------------------+
2110 * +--------+----------------------------------------+
2112 * opcode bits 28..26
2113 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2114 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2115 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2116 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2117 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2118 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2119 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2120 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2121 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2122 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2123 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2127 TX79_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2128 TX79_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2129 TX79_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2132 /* global register indices */
2133 static TCGv cpu_gpr
[32], cpu_PC
;
2134 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2135 static TCGv cpu_dspctrl
, btarget
, bcond
;
2136 static TCGv_i32 hflags
;
2137 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2138 static TCGv_i64 fpu_f64
[32];
2139 static TCGv_i64 msa_wr_d
[64];
2141 #include "exec/gen-icount.h"
2143 #define gen_helper_0e0i(name, arg) do { \
2144 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2145 gen_helper_##name(cpu_env, helper_tmp); \
2146 tcg_temp_free_i32(helper_tmp); \
2149 #define gen_helper_0e1i(name, arg1, arg2) do { \
2150 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2151 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2152 tcg_temp_free_i32(helper_tmp); \
2155 #define gen_helper_1e0i(name, ret, arg1) do { \
2156 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2157 gen_helper_##name(ret, cpu_env, helper_tmp); \
2158 tcg_temp_free_i32(helper_tmp); \
2161 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2162 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2163 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2164 tcg_temp_free_i32(helper_tmp); \
2167 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2168 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2169 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2170 tcg_temp_free_i32(helper_tmp); \
2173 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2174 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2175 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2176 tcg_temp_free_i32(helper_tmp); \
2179 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2180 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2181 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2182 tcg_temp_free_i32(helper_tmp); \
2185 typedef struct DisasContext
{
2186 DisasContextBase base
;
2187 target_ulong saved_pc
;
2188 target_ulong page_start
;
2190 uint64_t insn_flags
;
2191 int32_t CP0_Config1
;
2192 int32_t CP0_Config2
;
2193 int32_t CP0_Config3
;
2194 int32_t CP0_Config5
;
2195 /* Routine used to access memory */
2197 TCGMemOp default_tcg_memop_mask
;
2198 uint32_t hflags
, saved_hflags
;
2199 target_ulong btarget
;
2210 int CP0_LLAddr_shift
;
2219 #define DISAS_STOP DISAS_TARGET_0
2220 #define DISAS_EXIT DISAS_TARGET_1
2222 static const char * const regnames
[] = {
2223 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2224 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2225 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2226 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2229 static const char * const regnames_HI
[] = {
2230 "HI0", "HI1", "HI2", "HI3",
2233 static const char * const regnames_LO
[] = {
2234 "LO0", "LO1", "LO2", "LO3",
2237 static const char * const fregnames
[] = {
2238 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2239 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2240 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2241 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2244 static const char * const msaregnames
[] = {
2245 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2246 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2247 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2248 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2249 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2250 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2251 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2252 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2253 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2254 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2255 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2256 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2257 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2258 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2259 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2260 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2263 #define LOG_DISAS(...) \
2265 if (MIPS_DEBUG_DISAS) { \
2266 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2270 #define MIPS_INVAL(op) \
2272 if (MIPS_DEBUG_DISAS) { \
2273 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2274 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2275 ctx->base.pc_next, ctx->opcode, op, \
2276 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2277 ((ctx->opcode >> 16) & 0x1F)); \
2281 /* General purpose registers moves. */
2282 static inline void gen_load_gpr (TCGv t
, int reg
)
2285 tcg_gen_movi_tl(t
, 0);
2287 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2290 static inline void gen_store_gpr (TCGv t
, int reg
)
2293 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2296 /* Moves to/from shadow registers. */
2297 static inline void gen_load_srsgpr (int from
, int to
)
2299 TCGv t0
= tcg_temp_new();
2302 tcg_gen_movi_tl(t0
, 0);
2304 TCGv_i32 t2
= tcg_temp_new_i32();
2305 TCGv_ptr addr
= tcg_temp_new_ptr();
2307 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2308 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2309 tcg_gen_andi_i32(t2
, t2
, 0xf);
2310 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2311 tcg_gen_ext_i32_ptr(addr
, t2
);
2312 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2314 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2315 tcg_temp_free_ptr(addr
);
2316 tcg_temp_free_i32(t2
);
2318 gen_store_gpr(t0
, to
);
2322 static inline void gen_store_srsgpr (int from
, int to
)
2325 TCGv t0
= tcg_temp_new();
2326 TCGv_i32 t2
= tcg_temp_new_i32();
2327 TCGv_ptr addr
= tcg_temp_new_ptr();
2329 gen_load_gpr(t0
, from
);
2330 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2331 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2332 tcg_gen_andi_i32(t2
, t2
, 0xf);
2333 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2334 tcg_gen_ext_i32_ptr(addr
, t2
);
2335 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2337 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2338 tcg_temp_free_ptr(addr
);
2339 tcg_temp_free_i32(t2
);
2345 static inline void gen_save_pc(target_ulong pc
)
2347 tcg_gen_movi_tl(cpu_PC
, pc
);
2350 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2352 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2353 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2354 gen_save_pc(ctx
->base
.pc_next
);
2355 ctx
->saved_pc
= ctx
->base
.pc_next
;
2357 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2358 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2359 ctx
->saved_hflags
= ctx
->hflags
;
2360 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2366 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2372 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2374 ctx
->saved_hflags
= ctx
->hflags
;
2375 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2381 ctx
->btarget
= env
->btarget
;
2386 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2388 TCGv_i32 texcp
= tcg_const_i32(excp
);
2389 TCGv_i32 terr
= tcg_const_i32(err
);
2390 save_cpu_state(ctx
, 1);
2391 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2392 tcg_temp_free_i32(terr
);
2393 tcg_temp_free_i32(texcp
);
2394 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2397 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2399 gen_helper_0e0i(raise_exception
, excp
);
2402 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2404 generate_exception_err(ctx
, excp
, 0);
2407 /* Floating point register moves. */
2408 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2410 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2411 generate_exception(ctx
, EXCP_RI
);
2413 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2416 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2419 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2420 generate_exception(ctx
, EXCP_RI
);
2422 t64
= tcg_temp_new_i64();
2423 tcg_gen_extu_i32_i64(t64
, t
);
2424 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2425 tcg_temp_free_i64(t64
);
2428 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2430 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2431 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2433 gen_load_fpr32(ctx
, t
, reg
| 1);
2437 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2439 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2440 TCGv_i64 t64
= tcg_temp_new_i64();
2441 tcg_gen_extu_i32_i64(t64
, t
);
2442 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2443 tcg_temp_free_i64(t64
);
2445 gen_store_fpr32(ctx
, t
, reg
| 1);
2449 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2451 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2452 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2454 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2458 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2460 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2461 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2464 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2465 t0
= tcg_temp_new_i64();
2466 tcg_gen_shri_i64(t0
, t
, 32);
2467 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2468 tcg_temp_free_i64(t0
);
2472 static inline int get_fp_bit (int cc
)
2480 /* Addresses computation */
2481 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2483 tcg_gen_add_tl(ret
, arg0
, arg1
);
2485 #if defined(TARGET_MIPS64)
2486 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2487 tcg_gen_ext32s_i64(ret
, ret
);
2492 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2495 tcg_gen_addi_tl(ret
, base
, ofs
);
2497 #if defined(TARGET_MIPS64)
2498 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2499 tcg_gen_ext32s_i64(ret
, ret
);
2504 /* Addresses computation (translation time) */
2505 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2508 target_long sum
= base
+ offset
;
2510 #if defined(TARGET_MIPS64)
2511 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2518 /* Sign-extract the low 32-bits to a target_long. */
2519 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2521 #if defined(TARGET_MIPS64)
2522 tcg_gen_ext32s_i64(ret
, arg
);
2524 tcg_gen_extrl_i64_i32(ret
, arg
);
2528 /* Sign-extract the high 32-bits to a target_long. */
2529 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2531 #if defined(TARGET_MIPS64)
2532 tcg_gen_sari_i64(ret
, arg
, 32);
2534 tcg_gen_extrh_i64_i32(ret
, arg
);
2538 static inline void check_cp0_enabled(DisasContext
*ctx
)
2540 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2541 generate_exception_err(ctx
, EXCP_CpU
, 0);
2544 static inline void check_cp1_enabled(DisasContext
*ctx
)
2546 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2547 generate_exception_err(ctx
, EXCP_CpU
, 1);
2550 /* Verify that the processor is running with COP1X instructions enabled.
2551 This is associated with the nabla symbol in the MIPS32 and MIPS64
2554 static inline void check_cop1x(DisasContext
*ctx
)
2556 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2557 generate_exception_end(ctx
, EXCP_RI
);
2560 /* Verify that the processor is running with 64-bit floating-point
2561 operations enabled. */
2563 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2565 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2566 generate_exception_end(ctx
, EXCP_RI
);
2570 * Verify if floating point register is valid; an operation is not defined
2571 * if bit 0 of any register specification is set and the FR bit in the
2572 * Status register equals zero, since the register numbers specify an
2573 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2574 * in the Status register equals one, both even and odd register numbers
2575 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2577 * Multiple 64 bit wide registers can be checked by calling
2578 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2580 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2582 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2583 generate_exception_end(ctx
, EXCP_RI
);
2586 /* Verify that the processor is running with DSP instructions enabled.
2587 This is enabled by CP0 Status register MX(24) bit.
2590 static inline void check_dsp(DisasContext
*ctx
)
2592 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2593 if (ctx
->insn_flags
& ASE_DSP
) {
2594 generate_exception_end(ctx
, EXCP_DSPDIS
);
2596 generate_exception_end(ctx
, EXCP_RI
);
2601 static inline void check_dsp_r2(DisasContext
*ctx
)
2603 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2604 if (ctx
->insn_flags
& ASE_DSP
) {
2605 generate_exception_end(ctx
, EXCP_DSPDIS
);
2607 generate_exception_end(ctx
, EXCP_RI
);
2612 static inline void check_dsp_r3(DisasContext
*ctx
)
2614 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2615 if (ctx
->insn_flags
& ASE_DSP
) {
2616 generate_exception_end(ctx
, EXCP_DSPDIS
);
2618 generate_exception_end(ctx
, EXCP_RI
);
2623 /* This code generates a "reserved instruction" exception if the
2624 CPU does not support the instruction set corresponding to flags. */
2625 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2627 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2628 generate_exception_end(ctx
, EXCP_RI
);
2632 /* This code generates a "reserved instruction" exception if the
2633 CPU has corresponding flag set which indicates that the instruction
2634 has been removed. */
2635 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2637 if (unlikely(ctx
->insn_flags
& flags
)) {
2638 generate_exception_end(ctx
, EXCP_RI
);
2642 /* This code generates a "reserved instruction" exception if the
2643 CPU does not support 64-bit paired-single (PS) floating point data type */
2644 static inline void check_ps(DisasContext
*ctx
)
2646 if (unlikely(!ctx
->ps
)) {
2647 generate_exception(ctx
, EXCP_RI
);
2649 check_cp1_64bitmode(ctx
);
2652 #ifdef TARGET_MIPS64
2653 /* This code generates a "reserved instruction" exception if 64-bit
2654 instructions are not enabled. */
2655 static inline void check_mips_64(DisasContext
*ctx
)
2657 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
2658 generate_exception_end(ctx
, EXCP_RI
);
2662 #ifndef CONFIG_USER_ONLY
2663 static inline void check_mvh(DisasContext
*ctx
)
2665 if (unlikely(!ctx
->mvh
)) {
2666 generate_exception(ctx
, EXCP_RI
);
2672 * This code generates a "reserved instruction" exception if the
2673 * Config5 XNP bit is set.
2675 static inline void check_xnp(DisasContext
*ctx
)
2677 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2678 generate_exception_end(ctx
, EXCP_RI
);
2682 #ifndef CONFIG_USER_ONLY
2684 * This code generates a "reserved instruction" exception if the
2685 * Config3 PW bit is NOT set.
2687 static inline void check_pw(DisasContext
*ctx
)
2689 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2690 generate_exception_end(ctx
, EXCP_RI
);
2696 * This code generates a "reserved instruction" exception if the
2697 * Config3 MT bit is NOT set.
2699 static inline void check_mt(DisasContext
*ctx
)
2701 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2702 generate_exception_end(ctx
, EXCP_RI
);
2706 #ifndef CONFIG_USER_ONLY
2708 * This code generates a "coprocessor unusable" exception if CP0 is not
2709 * available, and, if that is not the case, generates a "reserved instruction"
2710 * exception if the Config5 MT bit is NOT set. This is needed for availability
2711 * control of some of MT ASE instructions.
2713 static inline void check_cp0_mt(DisasContext
*ctx
)
2715 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2716 generate_exception_err(ctx
, EXCP_CpU
, 0);
2718 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2719 generate_exception_err(ctx
, EXCP_RI
, 0);
2726 * This code generates a "reserved instruction" exception if the
2727 * Config5 NMS bit is set.
2729 static inline void check_nms(DisasContext
*ctx
)
2731 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2732 generate_exception_end(ctx
, EXCP_RI
);
2737 /* Define small wrappers for gen_load_fpr* so that we have a uniform
2738 calling interface for 32 and 64-bit FPRs. No sense in changing
2739 all callers for gen_load_fpr32 when we need the CTX parameter for
2741 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
2742 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
2743 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
2744 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
2745 int ft, int fs, int cc) \
2747 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
2748 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
2757 check_cp1_registers(ctx, fs | ft); \
2765 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
2766 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
2768 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
2769 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
2770 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
2771 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
2772 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
2773 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
2774 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
2775 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
2776 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
2777 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
2778 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
2779 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
2780 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
2781 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
2782 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
2783 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
2786 tcg_temp_free_i##bits (fp0); \
2787 tcg_temp_free_i##bits (fp1); \
2790 FOP_CONDS(, 0, d
, FMT_D
, 64)
2791 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
2792 FOP_CONDS(, 0, s
, FMT_S
, 32)
2793 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
2794 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
2795 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
2798 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
2799 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
2800 int ft, int fs, int fd) \
2802 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
2803 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
2804 if (ifmt == FMT_D) { \
2805 check_cp1_registers(ctx, fs | ft | fd); \
2807 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
2808 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
2811 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
2814 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
2817 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
2820 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
2823 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
2826 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
2829 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2832 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2835 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2838 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2841 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2844 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2847 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2850 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2853 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2856 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2859 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2862 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2865 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2868 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2871 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2874 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2880 tcg_temp_free_i ## bits (fp0); \
2881 tcg_temp_free_i ## bits (fp1); \
2884 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2885 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2887 #undef gen_ldcmp_fpr32
2888 #undef gen_ldcmp_fpr64
2890 /* load/store instructions. */
2891 #ifdef CONFIG_USER_ONLY
2892 #define OP_LD_ATOMIC(insn,fname) \
2893 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2894 DisasContext *ctx) \
2896 TCGv t0 = tcg_temp_new(); \
2897 tcg_gen_mov_tl(t0, arg1); \
2898 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2899 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2900 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2901 tcg_temp_free(t0); \
2904 #define OP_LD_ATOMIC(insn,fname) \
2905 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2906 DisasContext *ctx) \
2908 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2911 OP_LD_ATOMIC(ll
,ld32s
);
2912 #if defined(TARGET_MIPS64)
2913 OP_LD_ATOMIC(lld
,ld64
);
2917 #ifdef CONFIG_USER_ONLY
2918 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2919 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2920 DisasContext *ctx) \
2922 TCGv t0 = tcg_temp_new(); \
2923 TCGLabel *l1 = gen_new_label(); \
2924 TCGLabel *l2 = gen_new_label(); \
2926 tcg_gen_andi_tl(t0, arg2, almask); \
2927 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2928 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2929 generate_exception(ctx, EXCP_AdES); \
2930 gen_set_label(l1); \
2931 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2932 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2933 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2934 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2935 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2936 generate_exception_end(ctx, EXCP_SC); \
2937 gen_set_label(l2); \
2938 tcg_gen_movi_tl(t0, 0); \
2939 gen_store_gpr(t0, rt); \
2940 tcg_temp_free(t0); \
2943 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2944 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2945 DisasContext *ctx) \
2947 TCGv t0 = tcg_temp_new(); \
2948 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2949 gen_store_gpr(t0, rt); \
2950 tcg_temp_free(t0); \
2953 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2954 #if defined(TARGET_MIPS64)
2955 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2959 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2960 int base
, int offset
)
2963 tcg_gen_movi_tl(addr
, offset
);
2964 } else if (offset
== 0) {
2965 gen_load_gpr(addr
, base
);
2967 tcg_gen_movi_tl(addr
, offset
);
2968 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2972 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2974 target_ulong pc
= ctx
->base
.pc_next
;
2976 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2977 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2982 pc
&= ~(target_ulong
)3;
2987 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2988 int rt
, int base
, int offset
)
2991 int mem_idx
= ctx
->mem_idx
;
2993 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2994 /* Loongson CPU uses a load to zero register for prefetch.
2995 We emulate it as a NOP. On other CPU we must perform the
2996 actual memory access. */
3000 t0
= tcg_temp_new();
3001 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3004 #if defined(TARGET_MIPS64)
3006 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3007 ctx
->default_tcg_memop_mask
);
3008 gen_store_gpr(t0
, rt
);
3011 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3012 ctx
->default_tcg_memop_mask
);
3013 gen_store_gpr(t0
, rt
);
3017 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3018 gen_store_gpr(t0
, rt
);
3021 t1
= tcg_temp_new();
3022 /* Do a byte access to possibly trigger a page
3023 fault with the unaligned address. */
3024 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3025 tcg_gen_andi_tl(t1
, t0
, 7);
3026 #ifndef TARGET_WORDS_BIGENDIAN
3027 tcg_gen_xori_tl(t1
, t1
, 7);
3029 tcg_gen_shli_tl(t1
, t1
, 3);
3030 tcg_gen_andi_tl(t0
, t0
, ~7);
3031 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3032 tcg_gen_shl_tl(t0
, t0
, t1
);
3033 t2
= tcg_const_tl(-1);
3034 tcg_gen_shl_tl(t2
, t2
, t1
);
3035 gen_load_gpr(t1
, rt
);
3036 tcg_gen_andc_tl(t1
, t1
, t2
);
3038 tcg_gen_or_tl(t0
, t0
, t1
);
3040 gen_store_gpr(t0
, rt
);
3043 t1
= tcg_temp_new();
3044 /* Do a byte access to possibly trigger a page
3045 fault with the unaligned address. */
3046 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3047 tcg_gen_andi_tl(t1
, t0
, 7);
3048 #ifdef TARGET_WORDS_BIGENDIAN
3049 tcg_gen_xori_tl(t1
, t1
, 7);
3051 tcg_gen_shli_tl(t1
, t1
, 3);
3052 tcg_gen_andi_tl(t0
, t0
, ~7);
3053 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3054 tcg_gen_shr_tl(t0
, t0
, t1
);
3055 tcg_gen_xori_tl(t1
, t1
, 63);
3056 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3057 tcg_gen_shl_tl(t2
, t2
, t1
);
3058 gen_load_gpr(t1
, rt
);
3059 tcg_gen_and_tl(t1
, t1
, t2
);
3061 tcg_gen_or_tl(t0
, t0
, t1
);
3063 gen_store_gpr(t0
, rt
);
3066 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3067 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3069 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3070 gen_store_gpr(t0
, rt
);
3074 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3075 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3077 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3078 gen_store_gpr(t0
, rt
);
3081 mem_idx
= MIPS_HFLAG_UM
;
3084 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3085 ctx
->default_tcg_memop_mask
);
3086 gen_store_gpr(t0
, rt
);
3089 mem_idx
= MIPS_HFLAG_UM
;
3092 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3093 ctx
->default_tcg_memop_mask
);
3094 gen_store_gpr(t0
, rt
);
3097 mem_idx
= MIPS_HFLAG_UM
;
3100 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3101 ctx
->default_tcg_memop_mask
);
3102 gen_store_gpr(t0
, rt
);
3105 mem_idx
= MIPS_HFLAG_UM
;
3108 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3109 gen_store_gpr(t0
, rt
);
3112 mem_idx
= MIPS_HFLAG_UM
;
3115 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3116 gen_store_gpr(t0
, rt
);
3119 mem_idx
= MIPS_HFLAG_UM
;
3122 t1
= tcg_temp_new();
3123 /* Do a byte access to possibly trigger a page
3124 fault with the unaligned address. */
3125 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3126 tcg_gen_andi_tl(t1
, t0
, 3);
3127 #ifndef TARGET_WORDS_BIGENDIAN
3128 tcg_gen_xori_tl(t1
, t1
, 3);
3130 tcg_gen_shli_tl(t1
, t1
, 3);
3131 tcg_gen_andi_tl(t0
, t0
, ~3);
3132 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3133 tcg_gen_shl_tl(t0
, t0
, t1
);
3134 t2
= tcg_const_tl(-1);
3135 tcg_gen_shl_tl(t2
, t2
, t1
);
3136 gen_load_gpr(t1
, rt
);
3137 tcg_gen_andc_tl(t1
, t1
, t2
);
3139 tcg_gen_or_tl(t0
, t0
, t1
);
3141 tcg_gen_ext32s_tl(t0
, t0
);
3142 gen_store_gpr(t0
, rt
);
3145 mem_idx
= MIPS_HFLAG_UM
;
3148 t1
= tcg_temp_new();
3149 /* Do a byte access to possibly trigger a page
3150 fault with the unaligned address. */
3151 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3152 tcg_gen_andi_tl(t1
, t0
, 3);
3153 #ifdef TARGET_WORDS_BIGENDIAN
3154 tcg_gen_xori_tl(t1
, t1
, 3);
3156 tcg_gen_shli_tl(t1
, t1
, 3);
3157 tcg_gen_andi_tl(t0
, t0
, ~3);
3158 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3159 tcg_gen_shr_tl(t0
, t0
, t1
);
3160 tcg_gen_xori_tl(t1
, t1
, 31);
3161 t2
= tcg_const_tl(0xfffffffeull
);
3162 tcg_gen_shl_tl(t2
, t2
, t1
);
3163 gen_load_gpr(t1
, rt
);
3164 tcg_gen_and_tl(t1
, t1
, t2
);
3166 tcg_gen_or_tl(t0
, t0
, t1
);
3168 tcg_gen_ext32s_tl(t0
, t0
);
3169 gen_store_gpr(t0
, rt
);
3172 mem_idx
= MIPS_HFLAG_UM
;
3176 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3177 gen_store_gpr(t0
, rt
);
3183 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3184 uint32_t reg1
, uint32_t reg2
)
3186 TCGv taddr
= tcg_temp_new();
3187 TCGv_i64 tval
= tcg_temp_new_i64();
3188 TCGv tmp1
= tcg_temp_new();
3189 TCGv tmp2
= tcg_temp_new();
3191 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3192 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3193 #ifdef TARGET_WORDS_BIGENDIAN
3194 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3196 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3198 gen_store_gpr(tmp1
, reg1
);
3199 tcg_temp_free(tmp1
);
3200 gen_store_gpr(tmp2
, reg2
);
3201 tcg_temp_free(tmp2
);
3202 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3203 tcg_temp_free_i64(tval
);
3204 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3205 tcg_temp_free(taddr
);
3209 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3210 int base
, int offset
)
3212 TCGv t0
= tcg_temp_new();
3213 TCGv t1
= tcg_temp_new();
3214 int mem_idx
= ctx
->mem_idx
;
3216 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3217 gen_load_gpr(t1
, rt
);
3219 #if defined(TARGET_MIPS64)
3221 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3222 ctx
->default_tcg_memop_mask
);
3225 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3228 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3232 mem_idx
= MIPS_HFLAG_UM
;
3235 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3236 ctx
->default_tcg_memop_mask
);
3239 mem_idx
= MIPS_HFLAG_UM
;
3242 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3243 ctx
->default_tcg_memop_mask
);
3246 mem_idx
= MIPS_HFLAG_UM
;
3249 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3252 mem_idx
= MIPS_HFLAG_UM
;
3255 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3258 mem_idx
= MIPS_HFLAG_UM
;
3261 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3269 /* Store conditional */
3270 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3271 int base
, int16_t offset
)
3274 int mem_idx
= ctx
->mem_idx
;
3276 #ifdef CONFIG_USER_ONLY
3277 t0
= tcg_temp_local_new();
3278 t1
= tcg_temp_local_new();
3280 t0
= tcg_temp_new();
3281 t1
= tcg_temp_new();
3283 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3284 gen_load_gpr(t1
, rt
);
3286 #if defined(TARGET_MIPS64)
3289 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3293 mem_idx
= MIPS_HFLAG_UM
;
3297 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3304 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3305 uint32_t reg1
, uint32_t reg2
)
3307 TCGv taddr
= tcg_temp_local_new();
3308 TCGv lladdr
= tcg_temp_local_new();
3309 TCGv_i64 tval
= tcg_temp_new_i64();
3310 TCGv_i64 llval
= tcg_temp_new_i64();
3311 TCGv_i64 val
= tcg_temp_new_i64();
3312 TCGv tmp1
= tcg_temp_new();
3313 TCGv tmp2
= tcg_temp_new();
3314 TCGLabel
*lab_fail
= gen_new_label();
3315 TCGLabel
*lab_done
= gen_new_label();
3317 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3319 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3320 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3322 gen_load_gpr(tmp1
, reg1
);
3323 gen_load_gpr(tmp2
, reg2
);
3325 #ifdef TARGET_WORDS_BIGENDIAN
3326 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3328 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3331 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3332 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3333 ctx
->mem_idx
, MO_64
);
3335 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3337 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3339 gen_set_label(lab_fail
);
3342 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3344 gen_set_label(lab_done
);
3345 tcg_gen_movi_tl(lladdr
, -1);
3346 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3349 /* Load and store */
3350 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3353 /* Don't do NOP if destination is zero: we must perform the actual
3358 TCGv_i32 fp0
= tcg_temp_new_i32();
3359 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3360 ctx
->default_tcg_memop_mask
);
3361 gen_store_fpr32(ctx
, fp0
, ft
);
3362 tcg_temp_free_i32(fp0
);
3367 TCGv_i32 fp0
= tcg_temp_new_i32();
3368 gen_load_fpr32(ctx
, fp0
, ft
);
3369 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3370 ctx
->default_tcg_memop_mask
);
3371 tcg_temp_free_i32(fp0
);
3376 TCGv_i64 fp0
= tcg_temp_new_i64();
3377 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3378 ctx
->default_tcg_memop_mask
);
3379 gen_store_fpr64(ctx
, fp0
, ft
);
3380 tcg_temp_free_i64(fp0
);
3385 TCGv_i64 fp0
= tcg_temp_new_i64();
3386 gen_load_fpr64(ctx
, fp0
, ft
);
3387 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3388 ctx
->default_tcg_memop_mask
);
3389 tcg_temp_free_i64(fp0
);
3393 MIPS_INVAL("flt_ldst");
3394 generate_exception_end(ctx
, EXCP_RI
);
3399 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3400 int rs
, int16_t imm
)
3402 TCGv t0
= tcg_temp_new();
3404 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3405 check_cp1_enabled(ctx
);
3409 check_insn(ctx
, ISA_MIPS2
);
3412 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3413 gen_flt_ldst(ctx
, op
, rt
, t0
);
3416 generate_exception_err(ctx
, EXCP_CpU
, 1);
3421 /* Arithmetic with immediate operand */
3422 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3423 int rt
, int rs
, int imm
)
3425 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3427 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3428 /* If no destination, treat it as a NOP.
3429 For addi, we must generate the overflow exception when needed. */
3435 TCGv t0
= tcg_temp_local_new();
3436 TCGv t1
= tcg_temp_new();
3437 TCGv t2
= tcg_temp_new();
3438 TCGLabel
*l1
= gen_new_label();
3440 gen_load_gpr(t1
, rs
);
3441 tcg_gen_addi_tl(t0
, t1
, uimm
);
3442 tcg_gen_ext32s_tl(t0
, t0
);
3444 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3445 tcg_gen_xori_tl(t2
, t0
, uimm
);
3446 tcg_gen_and_tl(t1
, t1
, t2
);
3448 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3450 /* operands of same sign, result different sign */
3451 generate_exception(ctx
, EXCP_OVERFLOW
);
3453 tcg_gen_ext32s_tl(t0
, t0
);
3454 gen_store_gpr(t0
, rt
);
3460 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3461 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3463 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3466 #if defined(TARGET_MIPS64)
3469 TCGv t0
= tcg_temp_local_new();
3470 TCGv t1
= tcg_temp_new();
3471 TCGv t2
= tcg_temp_new();
3472 TCGLabel
*l1
= gen_new_label();
3474 gen_load_gpr(t1
, rs
);
3475 tcg_gen_addi_tl(t0
, t1
, uimm
);
3477 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3478 tcg_gen_xori_tl(t2
, t0
, uimm
);
3479 tcg_gen_and_tl(t1
, t1
, t2
);
3481 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3483 /* operands of same sign, result different sign */
3484 generate_exception(ctx
, EXCP_OVERFLOW
);
3486 gen_store_gpr(t0
, rt
);
3492 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3494 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3501 /* Logic with immediate operand */
3502 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3503 int rt
, int rs
, int16_t imm
)
3508 /* If no destination, treat it as a NOP. */
3511 uimm
= (uint16_t)imm
;
3514 if (likely(rs
!= 0))
3515 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3517 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3521 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3523 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3526 if (likely(rs
!= 0))
3527 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3529 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3532 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3534 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3535 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3537 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3546 /* Set on less than with immediate operand */
3547 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3548 int rt
, int rs
, int16_t imm
)
3550 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3554 /* If no destination, treat it as a NOP. */
3557 t0
= tcg_temp_new();
3558 gen_load_gpr(t0
, rs
);
3561 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3564 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3570 /* Shifts with immediate operand */
3571 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3572 int rt
, int rs
, int16_t imm
)
3574 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3578 /* If no destination, treat it as a NOP. */
3582 t0
= tcg_temp_new();
3583 gen_load_gpr(t0
, rs
);
3586 tcg_gen_shli_tl(t0
, t0
, uimm
);
3587 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3590 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3594 tcg_gen_ext32u_tl(t0
, t0
);
3595 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3597 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3602 TCGv_i32 t1
= tcg_temp_new_i32();
3604 tcg_gen_trunc_tl_i32(t1
, t0
);
3605 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3606 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3607 tcg_temp_free_i32(t1
);
3609 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3612 #if defined(TARGET_MIPS64)
3614 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3617 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3620 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3624 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3626 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3630 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3633 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3636 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3639 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3647 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3648 int rd
, int rs
, int rt
)
3650 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3651 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3652 /* If no destination, treat it as a NOP.
3653 For add & sub, we must generate the overflow exception when needed. */
3660 TCGv t0
= tcg_temp_local_new();
3661 TCGv t1
= tcg_temp_new();
3662 TCGv t2
= tcg_temp_new();
3663 TCGLabel
*l1
= gen_new_label();
3665 gen_load_gpr(t1
, rs
);
3666 gen_load_gpr(t2
, rt
);
3667 tcg_gen_add_tl(t0
, t1
, t2
);
3668 tcg_gen_ext32s_tl(t0
, t0
);
3669 tcg_gen_xor_tl(t1
, t1
, t2
);
3670 tcg_gen_xor_tl(t2
, t0
, t2
);
3671 tcg_gen_andc_tl(t1
, t2
, t1
);
3673 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3675 /* operands of same sign, result different sign */
3676 generate_exception(ctx
, EXCP_OVERFLOW
);
3678 gen_store_gpr(t0
, rd
);
3683 if (rs
!= 0 && rt
!= 0) {
3684 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3685 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3686 } else if (rs
== 0 && rt
!= 0) {
3687 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3688 } else if (rs
!= 0 && rt
== 0) {
3689 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3691 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3696 TCGv t0
= tcg_temp_local_new();
3697 TCGv t1
= tcg_temp_new();
3698 TCGv t2
= tcg_temp_new();
3699 TCGLabel
*l1
= gen_new_label();
3701 gen_load_gpr(t1
, rs
);
3702 gen_load_gpr(t2
, rt
);
3703 tcg_gen_sub_tl(t0
, t1
, t2
);
3704 tcg_gen_ext32s_tl(t0
, t0
);
3705 tcg_gen_xor_tl(t2
, t1
, t2
);
3706 tcg_gen_xor_tl(t1
, t0
, t1
);
3707 tcg_gen_and_tl(t1
, t1
, t2
);
3709 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3711 /* operands of different sign, first operand and result different sign */
3712 generate_exception(ctx
, EXCP_OVERFLOW
);
3714 gen_store_gpr(t0
, rd
);
3719 if (rs
!= 0 && rt
!= 0) {
3720 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3721 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3722 } else if (rs
== 0 && rt
!= 0) {
3723 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3724 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3725 } else if (rs
!= 0 && rt
== 0) {
3726 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3728 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3731 #if defined(TARGET_MIPS64)
3734 TCGv t0
= tcg_temp_local_new();
3735 TCGv t1
= tcg_temp_new();
3736 TCGv t2
= tcg_temp_new();
3737 TCGLabel
*l1
= gen_new_label();
3739 gen_load_gpr(t1
, rs
);
3740 gen_load_gpr(t2
, rt
);
3741 tcg_gen_add_tl(t0
, t1
, t2
);
3742 tcg_gen_xor_tl(t1
, t1
, t2
);
3743 tcg_gen_xor_tl(t2
, t0
, t2
);
3744 tcg_gen_andc_tl(t1
, t2
, t1
);
3746 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3748 /* operands of same sign, result different sign */
3749 generate_exception(ctx
, EXCP_OVERFLOW
);
3751 gen_store_gpr(t0
, rd
);
3756 if (rs
!= 0 && rt
!= 0) {
3757 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3758 } else if (rs
== 0 && rt
!= 0) {
3759 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3760 } else if (rs
!= 0 && rt
== 0) {
3761 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3763 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3768 TCGv t0
= tcg_temp_local_new();
3769 TCGv t1
= tcg_temp_new();
3770 TCGv t2
= tcg_temp_new();
3771 TCGLabel
*l1
= gen_new_label();
3773 gen_load_gpr(t1
, rs
);
3774 gen_load_gpr(t2
, rt
);
3775 tcg_gen_sub_tl(t0
, t1
, t2
);
3776 tcg_gen_xor_tl(t2
, t1
, t2
);
3777 tcg_gen_xor_tl(t1
, t0
, t1
);
3778 tcg_gen_and_tl(t1
, t1
, t2
);
3780 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3782 /* operands of different sign, first operand and result different sign */
3783 generate_exception(ctx
, EXCP_OVERFLOW
);
3785 gen_store_gpr(t0
, rd
);
3790 if (rs
!= 0 && rt
!= 0) {
3791 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3792 } else if (rs
== 0 && rt
!= 0) {
3793 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3794 } else if (rs
!= 0 && rt
== 0) {
3795 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3797 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3802 if (likely(rs
!= 0 && rt
!= 0)) {
3803 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3804 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3812 /* Conditional move */
3813 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
3814 int rd
, int rs
, int rt
)
3819 /* If no destination, treat it as a NOP. */
3823 t0
= tcg_temp_new();
3824 gen_load_gpr(t0
, rt
);
3825 t1
= tcg_const_tl(0);
3826 t2
= tcg_temp_new();
3827 gen_load_gpr(t2
, rs
);
3830 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3833 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3836 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3839 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3848 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
3849 int rd
, int rs
, int rt
)
3852 /* If no destination, treat it as a NOP. */
3858 if (likely(rs
!= 0 && rt
!= 0)) {
3859 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3861 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3865 if (rs
!= 0 && rt
!= 0) {
3866 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3867 } else if (rs
== 0 && rt
!= 0) {
3868 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3869 } else if (rs
!= 0 && rt
== 0) {
3870 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3872 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
3876 if (likely(rs
!= 0 && rt
!= 0)) {
3877 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3878 } else if (rs
== 0 && rt
!= 0) {
3879 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3880 } else if (rs
!= 0 && rt
== 0) {
3881 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3883 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3887 if (likely(rs
!= 0 && rt
!= 0)) {
3888 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3889 } else if (rs
== 0 && rt
!= 0) {
3890 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3891 } else if (rs
!= 0 && rt
== 0) {
3892 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3894 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3900 /* Set on lower than */
3901 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
3902 int rd
, int rs
, int rt
)
3907 /* If no destination, treat it as a NOP. */
3911 t0
= tcg_temp_new();
3912 t1
= tcg_temp_new();
3913 gen_load_gpr(t0
, rs
);
3914 gen_load_gpr(t1
, rt
);
3917 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3920 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3928 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3929 int rd
, int rs
, int rt
)
3934 /* If no destination, treat it as a NOP.
3935 For add & sub, we must generate the overflow exception when needed. */
3939 t0
= tcg_temp_new();
3940 t1
= tcg_temp_new();
3941 gen_load_gpr(t0
, rs
);
3942 gen_load_gpr(t1
, rt
);
3945 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3946 tcg_gen_shl_tl(t0
, t1
, t0
);
3947 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3950 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3951 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3954 tcg_gen_ext32u_tl(t1
, t1
);
3955 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3956 tcg_gen_shr_tl(t0
, t1
, t0
);
3957 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3961 TCGv_i32 t2
= tcg_temp_new_i32();
3962 TCGv_i32 t3
= tcg_temp_new_i32();
3964 tcg_gen_trunc_tl_i32(t2
, t0
);
3965 tcg_gen_trunc_tl_i32(t3
, t1
);
3966 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3967 tcg_gen_rotr_i32(t2
, t3
, t2
);
3968 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3969 tcg_temp_free_i32(t2
);
3970 tcg_temp_free_i32(t3
);
3973 #if defined(TARGET_MIPS64)
3975 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3976 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3979 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3980 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3983 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3984 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3987 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3988 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3996 /* Arithmetic on HI/LO registers */
3997 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3999 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4010 #if defined(TARGET_MIPS64)
4012 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4016 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4020 #if defined(TARGET_MIPS64)
4022 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4026 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4031 #if defined(TARGET_MIPS64)
4033 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4037 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4040 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4045 #if defined(TARGET_MIPS64)
4047 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4051 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4054 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4060 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4063 TCGv t0
= tcg_const_tl(addr
);
4064 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4065 gen_store_gpr(t0
, reg
);
4069 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4075 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4078 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4079 addr
= addr_add(ctx
, pc
, offset
);
4080 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4084 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4085 addr
= addr_add(ctx
, pc
, offset
);
4086 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4088 #if defined(TARGET_MIPS64)
4091 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4092 addr
= addr_add(ctx
, pc
, offset
);
4093 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4097 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4100 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4101 addr
= addr_add(ctx
, pc
, offset
);
4102 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4107 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4108 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4109 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4112 #if defined(TARGET_MIPS64)
4113 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4114 case R6_OPC_LDPC
+ (1 << 16):
4115 case R6_OPC_LDPC
+ (2 << 16):
4116 case R6_OPC_LDPC
+ (3 << 16):
4118 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4119 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4120 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4124 MIPS_INVAL("OPC_PCREL");
4125 generate_exception_end(ctx
, EXCP_RI
);
4132 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4141 t0
= tcg_temp_new();
4142 t1
= tcg_temp_new();
4144 gen_load_gpr(t0
, rs
);
4145 gen_load_gpr(t1
, rt
);
4150 TCGv t2
= tcg_temp_new();
4151 TCGv t3
= tcg_temp_new();
4152 tcg_gen_ext32s_tl(t0
, t0
);
4153 tcg_gen_ext32s_tl(t1
, t1
);
4154 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4155 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4156 tcg_gen_and_tl(t2
, t2
, t3
);
4157 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4158 tcg_gen_or_tl(t2
, t2
, t3
);
4159 tcg_gen_movi_tl(t3
, 0);
4160 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4161 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4162 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4169 TCGv t2
= tcg_temp_new();
4170 TCGv t3
= tcg_temp_new();
4171 tcg_gen_ext32s_tl(t0
, t0
);
4172 tcg_gen_ext32s_tl(t1
, t1
);
4173 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4175 tcg_gen_and_tl(t2
, t2
, t3
);
4176 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4177 tcg_gen_or_tl(t2
, t2
, t3
);
4178 tcg_gen_movi_tl(t3
, 0);
4179 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4180 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4181 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4188 TCGv t2
= tcg_const_tl(0);
4189 TCGv t3
= tcg_const_tl(1);
4190 tcg_gen_ext32u_tl(t0
, t0
);
4191 tcg_gen_ext32u_tl(t1
, t1
);
4192 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4193 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4194 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4201 TCGv t2
= tcg_const_tl(0);
4202 TCGv t3
= tcg_const_tl(1);
4203 tcg_gen_ext32u_tl(t0
, t0
);
4204 tcg_gen_ext32u_tl(t1
, t1
);
4205 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4206 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4207 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4214 TCGv_i32 t2
= tcg_temp_new_i32();
4215 TCGv_i32 t3
= tcg_temp_new_i32();
4216 tcg_gen_trunc_tl_i32(t2
, t0
);
4217 tcg_gen_trunc_tl_i32(t3
, t1
);
4218 tcg_gen_mul_i32(t2
, t2
, t3
);
4219 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4220 tcg_temp_free_i32(t2
);
4221 tcg_temp_free_i32(t3
);
4226 TCGv_i32 t2
= tcg_temp_new_i32();
4227 TCGv_i32 t3
= tcg_temp_new_i32();
4228 tcg_gen_trunc_tl_i32(t2
, t0
);
4229 tcg_gen_trunc_tl_i32(t3
, t1
);
4230 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4231 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4232 tcg_temp_free_i32(t2
);
4233 tcg_temp_free_i32(t3
);
4238 TCGv_i32 t2
= tcg_temp_new_i32();
4239 TCGv_i32 t3
= tcg_temp_new_i32();
4240 tcg_gen_trunc_tl_i32(t2
, t0
);
4241 tcg_gen_trunc_tl_i32(t3
, t1
);
4242 tcg_gen_mul_i32(t2
, t2
, t3
);
4243 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4244 tcg_temp_free_i32(t2
);
4245 tcg_temp_free_i32(t3
);
4250 TCGv_i32 t2
= tcg_temp_new_i32();
4251 TCGv_i32 t3
= tcg_temp_new_i32();
4252 tcg_gen_trunc_tl_i32(t2
, t0
);
4253 tcg_gen_trunc_tl_i32(t3
, t1
);
4254 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4255 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4256 tcg_temp_free_i32(t2
);
4257 tcg_temp_free_i32(t3
);
4260 #if defined(TARGET_MIPS64)
4263 TCGv t2
= tcg_temp_new();
4264 TCGv t3
= tcg_temp_new();
4265 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4266 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4267 tcg_gen_and_tl(t2
, t2
, t3
);
4268 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4269 tcg_gen_or_tl(t2
, t2
, t3
);
4270 tcg_gen_movi_tl(t3
, 0);
4271 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4272 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4279 TCGv t2
= tcg_temp_new();
4280 TCGv t3
= tcg_temp_new();
4281 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4282 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4283 tcg_gen_and_tl(t2
, t2
, t3
);
4284 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4285 tcg_gen_or_tl(t2
, t2
, t3
);
4286 tcg_gen_movi_tl(t3
, 0);
4287 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4288 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4295 TCGv t2
= tcg_const_tl(0);
4296 TCGv t3
= tcg_const_tl(1);
4297 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4298 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4305 TCGv t2
= tcg_const_tl(0);
4306 TCGv t3
= tcg_const_tl(1);
4307 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4308 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4314 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4318 TCGv t2
= tcg_temp_new();
4319 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4324 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4328 TCGv t2
= tcg_temp_new();
4329 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4335 MIPS_INVAL("r6 mul/div");
4336 generate_exception_end(ctx
, EXCP_RI
);
4344 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4345 int acc
, int rs
, int rt
)
4349 t0
= tcg_temp_new();
4350 t1
= tcg_temp_new();
4352 gen_load_gpr(t0
, rs
);
4353 gen_load_gpr(t1
, rt
);
4362 TCGv t2
= tcg_temp_new();
4363 TCGv t3
= tcg_temp_new();
4364 tcg_gen_ext32s_tl(t0
, t0
);
4365 tcg_gen_ext32s_tl(t1
, t1
);
4366 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4367 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4368 tcg_gen_and_tl(t2
, t2
, t3
);
4369 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4370 tcg_gen_or_tl(t2
, t2
, t3
);
4371 tcg_gen_movi_tl(t3
, 0);
4372 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4373 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4374 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4375 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4376 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4383 TCGv t2
= tcg_const_tl(0);
4384 TCGv t3
= tcg_const_tl(1);
4385 tcg_gen_ext32u_tl(t0
, t0
);
4386 tcg_gen_ext32u_tl(t1
, t1
);
4387 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4388 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4389 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4390 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4391 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4398 TCGv_i32 t2
= tcg_temp_new_i32();
4399 TCGv_i32 t3
= tcg_temp_new_i32();
4400 tcg_gen_trunc_tl_i32(t2
, t0
);
4401 tcg_gen_trunc_tl_i32(t3
, t1
);
4402 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4403 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4404 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4405 tcg_temp_free_i32(t2
);
4406 tcg_temp_free_i32(t3
);
4411 TCGv_i32 t2
= tcg_temp_new_i32();
4412 TCGv_i32 t3
= tcg_temp_new_i32();
4413 tcg_gen_trunc_tl_i32(t2
, t0
);
4414 tcg_gen_trunc_tl_i32(t3
, t1
);
4415 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4416 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4417 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4418 tcg_temp_free_i32(t2
);
4419 tcg_temp_free_i32(t3
);
4422 #if defined(TARGET_MIPS64)
4425 TCGv t2
= tcg_temp_new();
4426 TCGv t3
= tcg_temp_new();
4427 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4428 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4429 tcg_gen_and_tl(t2
, t2
, t3
);
4430 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4431 tcg_gen_or_tl(t2
, t2
, t3
);
4432 tcg_gen_movi_tl(t3
, 0);
4433 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4434 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4435 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4442 TCGv t2
= tcg_const_tl(0);
4443 TCGv t3
= tcg_const_tl(1);
4444 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4445 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4446 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4452 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4455 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4460 TCGv_i64 t2
= tcg_temp_new_i64();
4461 TCGv_i64 t3
= tcg_temp_new_i64();
4463 tcg_gen_ext_tl_i64(t2
, t0
);
4464 tcg_gen_ext_tl_i64(t3
, t1
);
4465 tcg_gen_mul_i64(t2
, t2
, t3
);
4466 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4467 tcg_gen_add_i64(t2
, t2
, t3
);
4468 tcg_temp_free_i64(t3
);
4469 gen_move_low32(cpu_LO
[acc
], t2
);
4470 gen_move_high32(cpu_HI
[acc
], t2
);
4471 tcg_temp_free_i64(t2
);
4476 TCGv_i64 t2
= tcg_temp_new_i64();
4477 TCGv_i64 t3
= tcg_temp_new_i64();
4479 tcg_gen_ext32u_tl(t0
, t0
);
4480 tcg_gen_ext32u_tl(t1
, t1
);
4481 tcg_gen_extu_tl_i64(t2
, t0
);
4482 tcg_gen_extu_tl_i64(t3
, t1
);
4483 tcg_gen_mul_i64(t2
, t2
, t3
);
4484 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4485 tcg_gen_add_i64(t2
, t2
, t3
);
4486 tcg_temp_free_i64(t3
);
4487 gen_move_low32(cpu_LO
[acc
], t2
);
4488 gen_move_high32(cpu_HI
[acc
], t2
);
4489 tcg_temp_free_i64(t2
);
4494 TCGv_i64 t2
= tcg_temp_new_i64();
4495 TCGv_i64 t3
= tcg_temp_new_i64();
4497 tcg_gen_ext_tl_i64(t2
, t0
);
4498 tcg_gen_ext_tl_i64(t3
, t1
);
4499 tcg_gen_mul_i64(t2
, t2
, t3
);
4500 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4501 tcg_gen_sub_i64(t2
, t3
, t2
);
4502 tcg_temp_free_i64(t3
);
4503 gen_move_low32(cpu_LO
[acc
], t2
);
4504 gen_move_high32(cpu_HI
[acc
], t2
);
4505 tcg_temp_free_i64(t2
);
4510 TCGv_i64 t2
= tcg_temp_new_i64();
4511 TCGv_i64 t3
= tcg_temp_new_i64();
4513 tcg_gen_ext32u_tl(t0
, t0
);
4514 tcg_gen_ext32u_tl(t1
, t1
);
4515 tcg_gen_extu_tl_i64(t2
, t0
);
4516 tcg_gen_extu_tl_i64(t3
, t1
);
4517 tcg_gen_mul_i64(t2
, t2
, t3
);
4518 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4519 tcg_gen_sub_i64(t2
, t3
, t2
);
4520 tcg_temp_free_i64(t3
);
4521 gen_move_low32(cpu_LO
[acc
], t2
);
4522 gen_move_high32(cpu_HI
[acc
], t2
);
4523 tcg_temp_free_i64(t2
);
4527 MIPS_INVAL("mul/div");
4528 generate_exception_end(ctx
, EXCP_RI
);
4536 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
4537 int rd
, int rs
, int rt
)
4539 TCGv t0
= tcg_temp_new();
4540 TCGv t1
= tcg_temp_new();
4542 gen_load_gpr(t0
, rs
);
4543 gen_load_gpr(t1
, rt
);
4546 case OPC_VR54XX_MULS
:
4547 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4549 case OPC_VR54XX_MULSU
:
4550 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4552 case OPC_VR54XX_MACC
:
4553 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4555 case OPC_VR54XX_MACCU
:
4556 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4558 case OPC_VR54XX_MSAC
:
4559 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4561 case OPC_VR54XX_MSACU
:
4562 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4564 case OPC_VR54XX_MULHI
:
4565 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4567 case OPC_VR54XX_MULHIU
:
4568 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4570 case OPC_VR54XX_MULSHI
:
4571 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4573 case OPC_VR54XX_MULSHIU
:
4574 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4576 case OPC_VR54XX_MACCHI
:
4577 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4579 case OPC_VR54XX_MACCHIU
:
4580 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4582 case OPC_VR54XX_MSACHI
:
4583 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4585 case OPC_VR54XX_MSACHIU
:
4586 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4589 MIPS_INVAL("mul vr54xx");
4590 generate_exception_end(ctx
, EXCP_RI
);
4593 gen_store_gpr(t0
, rd
);
4600 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
4610 gen_load_gpr(t0
, rs
);
4615 #if defined(TARGET_MIPS64)
4619 tcg_gen_not_tl(t0
, t0
);
4628 tcg_gen_ext32u_tl(t0
, t0
);
4629 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4630 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4632 #if defined(TARGET_MIPS64)
4637 tcg_gen_clzi_i64(t0
, t0
, 64);
4643 /* Godson integer instructions */
4644 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
4645 int rd
, int rs
, int rt
)
4657 case OPC_MULTU_G_2E
:
4658 case OPC_MULTU_G_2F
:
4659 #if defined(TARGET_MIPS64)
4660 case OPC_DMULT_G_2E
:
4661 case OPC_DMULT_G_2F
:
4662 case OPC_DMULTU_G_2E
:
4663 case OPC_DMULTU_G_2F
:
4665 t0
= tcg_temp_new();
4666 t1
= tcg_temp_new();
4669 t0
= tcg_temp_local_new();
4670 t1
= tcg_temp_local_new();
4674 gen_load_gpr(t0
, rs
);
4675 gen_load_gpr(t1
, rt
);
4680 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4681 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4683 case OPC_MULTU_G_2E
:
4684 case OPC_MULTU_G_2F
:
4685 tcg_gen_ext32u_tl(t0
, t0
);
4686 tcg_gen_ext32u_tl(t1
, t1
);
4687 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4688 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4693 TCGLabel
*l1
= gen_new_label();
4694 TCGLabel
*l2
= gen_new_label();
4695 TCGLabel
*l3
= gen_new_label();
4696 tcg_gen_ext32s_tl(t0
, t0
);
4697 tcg_gen_ext32s_tl(t1
, t1
);
4698 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4699 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4702 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
4703 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
4704 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4707 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4708 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4715 TCGLabel
*l1
= gen_new_label();
4716 TCGLabel
*l2
= gen_new_label();
4717 tcg_gen_ext32u_tl(t0
, t0
);
4718 tcg_gen_ext32u_tl(t1
, t1
);
4719 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4720 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4723 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4724 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4731 TCGLabel
*l1
= gen_new_label();
4732 TCGLabel
*l2
= gen_new_label();
4733 TCGLabel
*l3
= gen_new_label();
4734 tcg_gen_ext32u_tl(t0
, t0
);
4735 tcg_gen_ext32u_tl(t1
, t1
);
4736 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4737 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
4738 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
4740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4743 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4751 TCGLabel
*l1
= gen_new_label();
4752 TCGLabel
*l2
= gen_new_label();
4753 tcg_gen_ext32u_tl(t0
, t0
);
4754 tcg_gen_ext32u_tl(t1
, t1
);
4755 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4756 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4759 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4760 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4764 #if defined(TARGET_MIPS64)
4765 case OPC_DMULT_G_2E
:
4766 case OPC_DMULT_G_2F
:
4767 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4769 case OPC_DMULTU_G_2E
:
4770 case OPC_DMULTU_G_2F
:
4771 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4776 TCGLabel
*l1
= gen_new_label();
4777 TCGLabel
*l2
= gen_new_label();
4778 TCGLabel
*l3
= gen_new_label();
4779 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4783 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4784 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4785 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4788 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4792 case OPC_DDIVU_G_2E
:
4793 case OPC_DDIVU_G_2F
:
4795 TCGLabel
*l1
= gen_new_label();
4796 TCGLabel
*l2
= gen_new_label();
4797 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4798 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4801 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4808 TCGLabel
*l1
= gen_new_label();
4809 TCGLabel
*l2
= gen_new_label();
4810 TCGLabel
*l3
= gen_new_label();
4811 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4812 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4813 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4818 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4822 case OPC_DMODU_G_2E
:
4823 case OPC_DMODU_G_2F
:
4825 TCGLabel
*l1
= gen_new_label();
4826 TCGLabel
*l2
= gen_new_label();
4827 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4828 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4831 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4842 /* Loongson multimedia instructions */
4843 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
4845 uint32_t opc
, shift_max
;
4848 opc
= MASK_LMI(ctx
->opcode
);
4854 t0
= tcg_temp_local_new_i64();
4855 t1
= tcg_temp_local_new_i64();
4858 t0
= tcg_temp_new_i64();
4859 t1
= tcg_temp_new_i64();
4863 check_cp1_enabled(ctx
);
4864 gen_load_fpr64(ctx
, t0
, rs
);
4865 gen_load_fpr64(ctx
, t1
, rt
);
4867 #define LMI_HELPER(UP, LO) \
4868 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
4869 #define LMI_HELPER_1(UP, LO) \
4870 case OPC_##UP: gen_helper_##LO(t0, t0); break
4871 #define LMI_DIRECT(UP, LO, OP) \
4872 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
4875 LMI_HELPER(PADDSH
, paddsh
);
4876 LMI_HELPER(PADDUSH
, paddush
);
4877 LMI_HELPER(PADDH
, paddh
);
4878 LMI_HELPER(PADDW
, paddw
);
4879 LMI_HELPER(PADDSB
, paddsb
);
4880 LMI_HELPER(PADDUSB
, paddusb
);
4881 LMI_HELPER(PADDB
, paddb
);
4883 LMI_HELPER(PSUBSH
, psubsh
);
4884 LMI_HELPER(PSUBUSH
, psubush
);
4885 LMI_HELPER(PSUBH
, psubh
);
4886 LMI_HELPER(PSUBW
, psubw
);
4887 LMI_HELPER(PSUBSB
, psubsb
);
4888 LMI_HELPER(PSUBUSB
, psubusb
);
4889 LMI_HELPER(PSUBB
, psubb
);
4891 LMI_HELPER(PSHUFH
, pshufh
);
4892 LMI_HELPER(PACKSSWH
, packsswh
);
4893 LMI_HELPER(PACKSSHB
, packsshb
);
4894 LMI_HELPER(PACKUSHB
, packushb
);
4896 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
4897 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
4898 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
4899 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
4900 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
4901 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
4903 LMI_HELPER(PAVGH
, pavgh
);
4904 LMI_HELPER(PAVGB
, pavgb
);
4905 LMI_HELPER(PMAXSH
, pmaxsh
);
4906 LMI_HELPER(PMINSH
, pminsh
);
4907 LMI_HELPER(PMAXUB
, pmaxub
);
4908 LMI_HELPER(PMINUB
, pminub
);
4910 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4911 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4912 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4913 LMI_HELPER(PCMPGTH
, pcmpgth
);
4914 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4915 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4917 LMI_HELPER(PSLLW
, psllw
);
4918 LMI_HELPER(PSLLH
, psllh
);
4919 LMI_HELPER(PSRLW
, psrlw
);
4920 LMI_HELPER(PSRLH
, psrlh
);
4921 LMI_HELPER(PSRAW
, psraw
);
4922 LMI_HELPER(PSRAH
, psrah
);
4924 LMI_HELPER(PMULLH
, pmullh
);
4925 LMI_HELPER(PMULHH
, pmulhh
);
4926 LMI_HELPER(PMULHUH
, pmulhuh
);
4927 LMI_HELPER(PMADDHW
, pmaddhw
);
4929 LMI_HELPER(PASUBUB
, pasubub
);
4930 LMI_HELPER_1(BIADD
, biadd
);
4931 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4933 LMI_DIRECT(PADDD
, paddd
, add
);
4934 LMI_DIRECT(PSUBD
, psubd
, sub
);
4935 LMI_DIRECT(XOR_CP2
, xor, xor);
4936 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4937 LMI_DIRECT(AND_CP2
, and, and);
4938 LMI_DIRECT(OR_CP2
, or, or);
4941 tcg_gen_andc_i64(t0
, t1
, t0
);
4945 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4948 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4951 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4954 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4958 tcg_gen_andi_i64(t1
, t1
, 3);
4959 tcg_gen_shli_i64(t1
, t1
, 4);
4960 tcg_gen_shr_i64(t0
, t0
, t1
);
4961 tcg_gen_ext16u_i64(t0
, t0
);
4965 tcg_gen_add_i64(t0
, t0
, t1
);
4966 tcg_gen_ext32s_i64(t0
, t0
);
4969 tcg_gen_sub_i64(t0
, t0
, t1
);
4970 tcg_gen_ext32s_i64(t0
, t0
);
4992 /* Make sure shift count isn't TCG undefined behaviour. */
4993 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4998 tcg_gen_shl_i64(t0
, t0
, t1
);
5002 /* Since SRA is UndefinedResult without sign-extended inputs,
5003 we can treat SRA and DSRA the same. */
5004 tcg_gen_sar_i64(t0
, t0
, t1
);
5007 /* We want to shift in zeros for SRL; zero-extend first. */
5008 tcg_gen_ext32u_i64(t0
, t0
);
5011 tcg_gen_shr_i64(t0
, t0
, t1
);
5015 if (shift_max
== 32) {
5016 tcg_gen_ext32s_i64(t0
, t0
);
5019 /* Shifts larger than MAX produce zero. */
5020 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5021 tcg_gen_neg_i64(t1
, t1
);
5022 tcg_gen_and_i64(t0
, t0
, t1
);
5028 TCGv_i64 t2
= tcg_temp_new_i64();
5029 TCGLabel
*lab
= gen_new_label();
5031 tcg_gen_mov_i64(t2
, t0
);
5032 tcg_gen_add_i64(t0
, t1
, t2
);
5033 if (opc
== OPC_ADD_CP2
) {
5034 tcg_gen_ext32s_i64(t0
, t0
);
5036 tcg_gen_xor_i64(t1
, t1
, t2
);
5037 tcg_gen_xor_i64(t2
, t2
, t0
);
5038 tcg_gen_andc_i64(t1
, t2
, t1
);
5039 tcg_temp_free_i64(t2
);
5040 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5041 generate_exception(ctx
, EXCP_OVERFLOW
);
5049 TCGv_i64 t2
= tcg_temp_new_i64();
5050 TCGLabel
*lab
= gen_new_label();
5052 tcg_gen_mov_i64(t2
, t0
);
5053 tcg_gen_sub_i64(t0
, t1
, t2
);
5054 if (opc
== OPC_SUB_CP2
) {
5055 tcg_gen_ext32s_i64(t0
, t0
);
5057 tcg_gen_xor_i64(t1
, t1
, t2
);
5058 tcg_gen_xor_i64(t2
, t2
, t0
);
5059 tcg_gen_and_i64(t1
, t1
, t2
);
5060 tcg_temp_free_i64(t2
);
5061 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5062 generate_exception(ctx
, EXCP_OVERFLOW
);
5068 tcg_gen_ext32u_i64(t0
, t0
);
5069 tcg_gen_ext32u_i64(t1
, t1
);
5070 tcg_gen_mul_i64(t0
, t0
, t1
);
5079 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5080 FD field is the CC field? */
5082 MIPS_INVAL("loongson_cp2");
5083 generate_exception_end(ctx
, EXCP_RI
);
5090 gen_store_fpr64(ctx
, t0
, rd
);
5092 tcg_temp_free_i64(t0
);
5093 tcg_temp_free_i64(t1
);
5097 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5098 int rs
, int rt
, int16_t imm
)
5101 TCGv t0
= tcg_temp_new();
5102 TCGv t1
= tcg_temp_new();
5105 /* Load needed operands */
5113 /* Compare two registers */
5115 gen_load_gpr(t0
, rs
);
5116 gen_load_gpr(t1
, rt
);
5126 /* Compare register to immediate */
5127 if (rs
!= 0 || imm
!= 0) {
5128 gen_load_gpr(t0
, rs
);
5129 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5136 case OPC_TEQ
: /* rs == rs */
5137 case OPC_TEQI
: /* r0 == 0 */
5138 case OPC_TGE
: /* rs >= rs */
5139 case OPC_TGEI
: /* r0 >= 0 */
5140 case OPC_TGEU
: /* rs >= rs unsigned */
5141 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5143 generate_exception_end(ctx
, EXCP_TRAP
);
5145 case OPC_TLT
: /* rs < rs */
5146 case OPC_TLTI
: /* r0 < 0 */
5147 case OPC_TLTU
: /* rs < rs unsigned */
5148 case OPC_TLTIU
: /* r0 < 0 unsigned */
5149 case OPC_TNE
: /* rs != rs */
5150 case OPC_TNEI
: /* r0 != 0 */
5151 /* Never trap: treat as NOP. */
5155 TCGLabel
*l1
= gen_new_label();
5160 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5164 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5168 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5172 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5176 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5180 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5183 generate_exception(ctx
, EXCP_TRAP
);
5190 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5192 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5196 #ifndef CONFIG_USER_ONLY
5197 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5203 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5205 if (use_goto_tb(ctx
, dest
)) {
5208 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5211 if (ctx
->base
.singlestep_enabled
) {
5212 save_cpu_state(ctx
, 0);
5213 gen_helper_raise_exception_debug(cpu_env
);
5215 tcg_gen_lookup_and_goto_ptr();
5219 /* Branches (before delay slot) */
5220 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5222 int rs
, int rt
, int32_t offset
,
5225 target_ulong btgt
= -1;
5227 int bcond_compute
= 0;
5228 TCGv t0
= tcg_temp_new();
5229 TCGv t1
= tcg_temp_new();
5231 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5232 #ifdef MIPS_DEBUG_DISAS
5233 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5234 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5236 generate_exception_end(ctx
, EXCP_RI
);
5240 /* Load needed operands */
5246 /* Compare two registers */
5248 gen_load_gpr(t0
, rs
);
5249 gen_load_gpr(t1
, rt
);
5252 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5266 /* Compare to zero */
5268 gen_load_gpr(t0
, rs
);
5271 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5274 #if defined(TARGET_MIPS64)
5276 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5278 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5281 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5286 /* Jump to immediate */
5287 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5292 /* Jump to register */
5293 if (offset
!= 0 && offset
!= 16) {
5294 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5295 others are reserved. */
5296 MIPS_INVAL("jump hint");
5297 generate_exception_end(ctx
, EXCP_RI
);
5300 gen_load_gpr(btarget
, rs
);
5303 MIPS_INVAL("branch/jump");
5304 generate_exception_end(ctx
, EXCP_RI
);
5307 if (bcond_compute
== 0) {
5308 /* No condition to be computed */
5310 case OPC_BEQ
: /* rx == rx */
5311 case OPC_BEQL
: /* rx == rx likely */
5312 case OPC_BGEZ
: /* 0 >= 0 */
5313 case OPC_BGEZL
: /* 0 >= 0 likely */
5314 case OPC_BLEZ
: /* 0 <= 0 */
5315 case OPC_BLEZL
: /* 0 <= 0 likely */
5317 ctx
->hflags
|= MIPS_HFLAG_B
;
5319 case OPC_BGEZAL
: /* 0 >= 0 */
5320 case OPC_BGEZALL
: /* 0 >= 0 likely */
5321 /* Always take and link */
5323 ctx
->hflags
|= MIPS_HFLAG_B
;
5325 case OPC_BNE
: /* rx != rx */
5326 case OPC_BGTZ
: /* 0 > 0 */
5327 case OPC_BLTZ
: /* 0 < 0 */
5330 case OPC_BLTZAL
: /* 0 < 0 */
5331 /* Handle as an unconditional branch to get correct delay
5334 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5335 ctx
->hflags
|= MIPS_HFLAG_B
;
5337 case OPC_BLTZALL
: /* 0 < 0 likely */
5338 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5339 /* Skip the instruction in the delay slot */
5340 ctx
->base
.pc_next
+= 4;
5342 case OPC_BNEL
: /* rx != rx likely */
5343 case OPC_BGTZL
: /* 0 > 0 likely */
5344 case OPC_BLTZL
: /* 0 < 0 likely */
5345 /* Skip the instruction in the delay slot */
5346 ctx
->base
.pc_next
+= 4;
5349 ctx
->hflags
|= MIPS_HFLAG_B
;
5352 ctx
->hflags
|= MIPS_HFLAG_BX
;
5356 ctx
->hflags
|= MIPS_HFLAG_B
;
5359 ctx
->hflags
|= MIPS_HFLAG_BR
;
5363 ctx
->hflags
|= MIPS_HFLAG_BR
;
5366 MIPS_INVAL("branch/jump");
5367 generate_exception_end(ctx
, EXCP_RI
);
5373 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5376 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5379 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5382 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5385 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5388 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5391 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5395 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5399 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5402 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5405 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5408 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5411 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5414 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5417 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5419 #if defined(TARGET_MIPS64)
5421 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5425 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5428 ctx
->hflags
|= MIPS_HFLAG_BC
;
5431 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5434 ctx
->hflags
|= MIPS_HFLAG_BL
;
5437 MIPS_INVAL("conditional branch/jump");
5438 generate_exception_end(ctx
, EXCP_RI
);
5443 ctx
->btarget
= btgt
;
5445 switch (delayslot_size
) {
5447 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5450 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5455 int post_delay
= insn_bytes
+ delayslot_size
;
5456 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5458 tcg_gen_movi_tl(cpu_gpr
[blink
],
5459 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5463 if (insn_bytes
== 2)
5464 ctx
->hflags
|= MIPS_HFLAG_B16
;
5470 /* nanoMIPS Branches */
5471 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
5473 int rs
, int rt
, int32_t offset
)
5475 target_ulong btgt
= -1;
5476 int bcond_compute
= 0;
5477 TCGv t0
= tcg_temp_new();
5478 TCGv t1
= tcg_temp_new();
5480 /* Load needed operands */
5484 /* Compare two registers */
5486 gen_load_gpr(t0
, rs
);
5487 gen_load_gpr(t1
, rt
);
5490 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5493 /* Compare to zero */
5495 gen_load_gpr(t0
, rs
);
5498 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5501 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5503 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5507 /* Jump to register */
5508 if (offset
!= 0 && offset
!= 16) {
5509 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5510 others are reserved. */
5511 MIPS_INVAL("jump hint");
5512 generate_exception_end(ctx
, EXCP_RI
);
5515 gen_load_gpr(btarget
, rs
);
5518 MIPS_INVAL("branch/jump");
5519 generate_exception_end(ctx
, EXCP_RI
);
5522 if (bcond_compute
== 0) {
5523 /* No condition to be computed */
5525 case OPC_BEQ
: /* rx == rx */
5527 ctx
->hflags
|= MIPS_HFLAG_B
;
5529 case OPC_BGEZAL
: /* 0 >= 0 */
5530 /* Always take and link */
5531 tcg_gen_movi_tl(cpu_gpr
[31],
5532 ctx
->base
.pc_next
+ insn_bytes
);
5533 ctx
->hflags
|= MIPS_HFLAG_B
;
5535 case OPC_BNE
: /* rx != rx */
5536 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5537 /* Skip the instruction in the delay slot */
5538 ctx
->base
.pc_next
+= 4;
5541 ctx
->hflags
|= MIPS_HFLAG_BR
;
5545 tcg_gen_movi_tl(cpu_gpr
[rt
],
5546 ctx
->base
.pc_next
+ insn_bytes
);
5548 ctx
->hflags
|= MIPS_HFLAG_BR
;
5551 MIPS_INVAL("branch/jump");
5552 generate_exception_end(ctx
, EXCP_RI
);
5558 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5561 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5564 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5565 tcg_gen_movi_tl(cpu_gpr
[31],
5566 ctx
->base
.pc_next
+ insn_bytes
);
5569 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5571 ctx
->hflags
|= MIPS_HFLAG_BC
;
5574 MIPS_INVAL("conditional branch/jump");
5575 generate_exception_end(ctx
, EXCP_RI
);
5580 ctx
->btarget
= btgt
;
5583 if (insn_bytes
== 2) {
5584 ctx
->hflags
|= MIPS_HFLAG_B16
;
5591 /* special3 bitfield operations */
5592 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
5593 int rs
, int lsb
, int msb
)
5595 TCGv t0
= tcg_temp_new();
5596 TCGv t1
= tcg_temp_new();
5598 gen_load_gpr(t1
, rs
);
5601 if (lsb
+ msb
> 31) {
5605 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5607 /* The two checks together imply that lsb == 0,
5608 so this is a simple sign-extension. */
5609 tcg_gen_ext32s_tl(t0
, t1
);
5612 #if defined(TARGET_MIPS64)
5621 if (lsb
+ msb
> 63) {
5624 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5631 gen_load_gpr(t0
, rt
);
5632 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5633 tcg_gen_ext32s_tl(t0
, t0
);
5635 #if defined(TARGET_MIPS64)
5646 gen_load_gpr(t0
, rt
);
5647 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5652 MIPS_INVAL("bitops");
5653 generate_exception_end(ctx
, EXCP_RI
);
5658 gen_store_gpr(t0
, rt
);
5663 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5668 /* If no destination, treat it as a NOP. */
5672 t0
= tcg_temp_new();
5673 gen_load_gpr(t0
, rt
);
5677 TCGv t1
= tcg_temp_new();
5678 TCGv t2
= tcg_const_tl(0x00FF00FF);
5680 tcg_gen_shri_tl(t1
, t0
, 8);
5681 tcg_gen_and_tl(t1
, t1
, t2
);
5682 tcg_gen_and_tl(t0
, t0
, t2
);
5683 tcg_gen_shli_tl(t0
, t0
, 8);
5684 tcg_gen_or_tl(t0
, t0
, t1
);
5687 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5691 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
5694 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
5696 #if defined(TARGET_MIPS64)
5699 TCGv t1
= tcg_temp_new();
5700 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
5702 tcg_gen_shri_tl(t1
, t0
, 8);
5703 tcg_gen_and_tl(t1
, t1
, t2
);
5704 tcg_gen_and_tl(t0
, t0
, t2
);
5705 tcg_gen_shli_tl(t0
, t0
, 8);
5706 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5713 TCGv t1
= tcg_temp_new();
5714 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
5716 tcg_gen_shri_tl(t1
, t0
, 16);
5717 tcg_gen_and_tl(t1
, t1
, t2
);
5718 tcg_gen_and_tl(t0
, t0
, t2
);
5719 tcg_gen_shli_tl(t0
, t0
, 16);
5720 tcg_gen_or_tl(t0
, t0
, t1
);
5721 tcg_gen_shri_tl(t1
, t0
, 32);
5722 tcg_gen_shli_tl(t0
, t0
, 32);
5723 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5730 MIPS_INVAL("bsfhl");
5731 generate_exception_end(ctx
, EXCP_RI
);
5738 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
5747 t0
= tcg_temp_new();
5748 t1
= tcg_temp_new();
5749 gen_load_gpr(t0
, rs
);
5750 gen_load_gpr(t1
, rt
);
5751 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
5752 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
5753 if (opc
== OPC_LSA
) {
5754 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5763 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
5771 t0
= tcg_temp_new();
5772 if (bits
== 0 || bits
== wordsz
) {
5774 gen_load_gpr(t0
, rt
);
5776 gen_load_gpr(t0
, rs
);
5780 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5782 #if defined(TARGET_MIPS64)
5784 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5789 TCGv t1
= tcg_temp_new();
5790 gen_load_gpr(t0
, rt
);
5791 gen_load_gpr(t1
, rs
);
5795 TCGv_i64 t2
= tcg_temp_new_i64();
5796 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5797 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5798 gen_move_low32(cpu_gpr
[rd
], t2
);
5799 tcg_temp_free_i64(t2
);
5802 #if defined(TARGET_MIPS64)
5804 tcg_gen_shli_tl(t0
, t0
, bits
);
5805 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5806 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5816 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5819 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5822 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5825 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
5828 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5835 t0
= tcg_temp_new();
5836 gen_load_gpr(t0
, rt
);
5839 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5841 #if defined(TARGET_MIPS64)
5843 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5850 #ifndef CONFIG_USER_ONLY
5851 /* CP0 (MMU and control) */
5852 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5854 TCGv_i64 t0
= tcg_temp_new_i64();
5855 TCGv_i64 t1
= tcg_temp_new_i64();
5857 tcg_gen_ext_tl_i64(t0
, arg
);
5858 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5859 #if defined(TARGET_MIPS64)
5860 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5862 tcg_gen_concat32_i64(t1
, t1
, t0
);
5864 tcg_gen_st_i64(t1
, cpu_env
, off
);
5865 tcg_temp_free_i64(t1
);
5866 tcg_temp_free_i64(t0
);
5869 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5871 TCGv_i64 t0
= tcg_temp_new_i64();
5872 TCGv_i64 t1
= tcg_temp_new_i64();
5874 tcg_gen_ext_tl_i64(t0
, arg
);
5875 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5876 tcg_gen_concat32_i64(t1
, t1
, t0
);
5877 tcg_gen_st_i64(t1
, cpu_env
, off
);
5878 tcg_temp_free_i64(t1
);
5879 tcg_temp_free_i64(t0
);
5882 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5884 TCGv_i64 t0
= tcg_temp_new_i64();
5886 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5887 #if defined(TARGET_MIPS64)
5888 tcg_gen_shri_i64(t0
, t0
, 30);
5890 tcg_gen_shri_i64(t0
, t0
, 32);
5892 gen_move_low32(arg
, t0
);
5893 tcg_temp_free_i64(t0
);
5896 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5898 TCGv_i64 t0
= tcg_temp_new_i64();
5900 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5901 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5902 gen_move_low32(arg
, t0
);
5903 tcg_temp_free_i64(t0
);
5906 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
5908 TCGv_i32 t0
= tcg_temp_new_i32();
5910 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5911 tcg_gen_ext_i32_tl(arg
, t0
);
5912 tcg_temp_free_i32(t0
);
5915 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
5917 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5918 tcg_gen_ext32s_tl(arg
, arg
);
5921 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
5923 TCGv_i32 t0
= tcg_temp_new_i32();
5925 tcg_gen_trunc_tl_i32(t0
, arg
);
5926 tcg_gen_st_i32(t0
, cpu_env
, off
);
5927 tcg_temp_free_i32(t0
);
5930 #define CP0_CHECK(c) \
5933 goto cp0_unimplemented; \
5937 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5939 const char *rn
= "invalid";
5945 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5946 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5950 goto cp0_unimplemented
;
5956 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5957 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5961 goto cp0_unimplemented
;
5967 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
5968 ctx
->CP0_LLAddr_shift
);
5972 CP0_CHECK(ctx
->mrp
);
5973 gen_helper_mfhc0_maar(arg
, cpu_env
);
5977 goto cp0_unimplemented
;
5986 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5990 goto cp0_unimplemented
;
5994 goto cp0_unimplemented
;
5996 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6000 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6001 tcg_gen_movi_tl(arg
, 0);
6004 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6006 const char *rn
= "invalid";
6007 uint64_t mask
= ctx
->PAMask
>> 36;
6013 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6014 tcg_gen_andi_tl(arg
, arg
, mask
);
6015 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6019 goto cp0_unimplemented
;
6025 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6026 tcg_gen_andi_tl(arg
, arg
, mask
);
6027 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6031 goto cp0_unimplemented
;
6037 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6038 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6039 relevant for modern MIPS cores supporting MTHC0, therefore
6040 treating MTHC0 to LLAddr as NOP. */
6044 CP0_CHECK(ctx
->mrp
);
6045 gen_helper_mthc0_maar(cpu_env
, arg
);
6049 goto cp0_unimplemented
;
6058 tcg_gen_andi_tl(arg
, arg
, mask
);
6059 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6063 goto cp0_unimplemented
;
6067 goto cp0_unimplemented
;
6069 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6072 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6075 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6077 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6078 tcg_gen_movi_tl(arg
, 0);
6080 tcg_gen_movi_tl(arg
, ~0);
6084 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6086 const char *rn
= "invalid";
6089 check_insn(ctx
, ISA_MIPS32
);
6095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6100 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6105 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6110 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6115 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6119 goto cp0_unimplemented
;
6125 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6126 gen_helper_mfc0_random(arg
, cpu_env
);
6130 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6131 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6135 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6136 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6140 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6145 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6146 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6150 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6151 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6155 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6156 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6157 rn
= "VPEScheFBack";
6160 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6165 goto cp0_unimplemented
;
6172 TCGv_i64 tmp
= tcg_temp_new_i64();
6173 tcg_gen_ld_i64(tmp
, cpu_env
,
6174 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6175 #if defined(TARGET_MIPS64)
6177 /* Move RI/XI fields to bits 31:30 */
6178 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6179 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6182 gen_move_low32(arg
, tmp
);
6183 tcg_temp_free_i64(tmp
);
6188 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6189 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6193 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6194 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6198 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6199 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6203 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6204 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6208 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6209 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6213 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6214 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6218 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6219 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6223 goto cp0_unimplemented
;
6230 TCGv_i64 tmp
= tcg_temp_new_i64();
6231 tcg_gen_ld_i64(tmp
, cpu_env
,
6232 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6233 #if defined(TARGET_MIPS64)
6235 /* Move RI/XI fields to bits 31:30 */
6236 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6237 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6240 gen_move_low32(arg
, tmp
);
6241 tcg_temp_free_i64(tmp
);
6247 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6248 rn
= "GlobalNumber";
6251 goto cp0_unimplemented
;
6257 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6258 tcg_gen_ext32s_tl(arg
, arg
);
6262 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6263 rn
= "ContextConfig";
6264 goto cp0_unimplemented
;
6266 CP0_CHECK(ctx
->ulri
);
6267 tcg_gen_ld_tl(arg
, cpu_env
,
6268 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6269 tcg_gen_ext32s_tl(arg
, arg
);
6273 goto cp0_unimplemented
;
6279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6283 check_insn(ctx
, ISA_MIPS32R2
);
6284 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6289 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6290 tcg_gen_ext32s_tl(arg
, arg
);
6295 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6296 tcg_gen_ext32s_tl(arg
, arg
);
6301 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6302 tcg_gen_ext32s_tl(arg
, arg
);
6307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6317 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6321 goto cp0_unimplemented
;
6327 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6331 check_insn(ctx
, ISA_MIPS32R2
);
6332 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6336 check_insn(ctx
, ISA_MIPS32R2
);
6337 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6341 check_insn(ctx
, ISA_MIPS32R2
);
6342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6346 check_insn(ctx
, ISA_MIPS32R2
);
6347 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6351 check_insn(ctx
, ISA_MIPS32R2
);
6352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6357 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6361 goto cp0_unimplemented
;
6367 check_insn(ctx
, ISA_MIPS32R2
);
6368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6372 goto cp0_unimplemented
;
6378 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6379 tcg_gen_ext32s_tl(arg
, arg
);
6384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
6395 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
6399 goto cp0_unimplemented
;
6405 /* Mark as an IO operation because we read the time. */
6406 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6409 gen_helper_mfc0_count(arg
, cpu_env
);
6410 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6413 /* Break the TB to be able to take timer interrupts immediately
6414 after reading count. DISAS_STOP isn't sufficient, we need to
6415 ensure we break completely out of translated code. */
6416 gen_save_pc(ctx
->base
.pc_next
+ 4);
6417 ctx
->base
.is_jmp
= DISAS_EXIT
;
6420 /* 6,7 are implementation dependent */
6422 goto cp0_unimplemented
;
6428 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6429 tcg_gen_ext32s_tl(arg
, arg
);
6433 goto cp0_unimplemented
;
6439 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6442 /* 6,7 are implementation dependent */
6444 goto cp0_unimplemented
;
6450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6454 check_insn(ctx
, ISA_MIPS32R2
);
6455 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6459 check_insn(ctx
, ISA_MIPS32R2
);
6460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6464 check_insn(ctx
, ISA_MIPS32R2
);
6465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6469 goto cp0_unimplemented
;
6475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6479 goto cp0_unimplemented
;
6485 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6486 tcg_gen_ext32s_tl(arg
, arg
);
6490 goto cp0_unimplemented
;
6496 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6500 check_insn(ctx
, ISA_MIPS32R2
);
6501 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6502 tcg_gen_ext32s_tl(arg
, arg
);
6506 check_insn(ctx
, ISA_MIPS32R2
);
6507 CP0_CHECK(ctx
->cmgcr
);
6508 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6509 tcg_gen_ext32s_tl(arg
, arg
);
6513 goto cp0_unimplemented
;
6519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6542 /* 6,7 are implementation dependent */
6544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6552 goto cp0_unimplemented
;
6558 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6562 CP0_CHECK(ctx
->mrp
);
6563 gen_helper_mfc0_maar(arg
, cpu_env
);
6567 CP0_CHECK(ctx
->mrp
);
6568 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6572 goto cp0_unimplemented
;
6585 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6586 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6590 goto cp0_unimplemented
;
6603 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6604 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6608 goto cp0_unimplemented
;
6614 #if defined(TARGET_MIPS64)
6615 check_insn(ctx
, ISA_MIPS3
);
6616 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6617 tcg_gen_ext32s_tl(arg
, arg
);
6622 goto cp0_unimplemented
;
6626 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6627 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6634 goto cp0_unimplemented
;
6638 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6639 rn
= "'Diagnostic"; /* implementation dependent */
6644 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6648 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
6649 rn
= "TraceControl";
6650 goto cp0_unimplemented
;
6652 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
6653 rn
= "TraceControl2";
6654 goto cp0_unimplemented
;
6656 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
6657 rn
= "UserTraceData";
6658 goto cp0_unimplemented
;
6660 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
6662 goto cp0_unimplemented
;
6664 goto cp0_unimplemented
;
6671 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6672 tcg_gen_ext32s_tl(arg
, arg
);
6676 goto cp0_unimplemented
;
6682 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6683 rn
= "Performance0";
6686 // gen_helper_mfc0_performance1(arg);
6687 rn
= "Performance1";
6688 goto cp0_unimplemented
;
6690 // gen_helper_mfc0_performance2(arg);
6691 rn
= "Performance2";
6692 goto cp0_unimplemented
;
6694 // gen_helper_mfc0_performance3(arg);
6695 rn
= "Performance3";
6696 goto cp0_unimplemented
;
6698 // gen_helper_mfc0_performance4(arg);
6699 rn
= "Performance4";
6700 goto cp0_unimplemented
;
6702 // gen_helper_mfc0_performance5(arg);
6703 rn
= "Performance5";
6704 goto cp0_unimplemented
;
6706 // gen_helper_mfc0_performance6(arg);
6707 rn
= "Performance6";
6708 goto cp0_unimplemented
;
6710 // gen_helper_mfc0_performance7(arg);
6711 rn
= "Performance7";
6712 goto cp0_unimplemented
;
6714 goto cp0_unimplemented
;
6720 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6724 goto cp0_unimplemented
;
6733 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6737 goto cp0_unimplemented
;
6747 TCGv_i64 tmp
= tcg_temp_new_i64();
6748 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
6749 gen_move_low32(arg
, tmp
);
6750 tcg_temp_free_i64(tmp
);
6758 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6762 goto cp0_unimplemented
;
6771 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6782 goto cp0_unimplemented
;
6788 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6789 tcg_gen_ext32s_tl(arg
, arg
);
6793 goto cp0_unimplemented
;
6800 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6809 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6810 tcg_gen_ld_tl(arg
, cpu_env
,
6811 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6812 tcg_gen_ext32s_tl(arg
, arg
);
6816 goto cp0_unimplemented
;
6820 goto cp0_unimplemented
;
6822 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
6826 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6827 gen_mfc0_unimplemented(ctx
, arg
);
6830 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6832 const char *rn
= "invalid";
6835 check_insn(ctx
, ISA_MIPS32
);
6837 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6845 gen_helper_mtc0_index(cpu_env
, arg
);
6849 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6850 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6854 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 goto cp0_unimplemented
;
6879 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6880 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6884 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6885 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6889 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6890 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6894 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6895 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6899 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6900 tcg_gen_st_tl(arg
, cpu_env
,
6901 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6905 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6906 tcg_gen_st_tl(arg
, cpu_env
,
6907 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6908 rn
= "VPEScheFBack";
6911 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6912 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6916 goto cp0_unimplemented
;
6922 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6927 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6931 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6932 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6936 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6937 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6941 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6942 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6947 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6952 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6957 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6961 goto cp0_unimplemented
;
6967 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6973 rn
= "GlobalNumber";
6976 goto cp0_unimplemented
;
6982 gen_helper_mtc0_context(cpu_env
, arg
);
6986 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6987 rn
= "ContextConfig";
6988 goto cp0_unimplemented
;
6990 CP0_CHECK(ctx
->ulri
);
6991 tcg_gen_st_tl(arg
, cpu_env
,
6992 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6996 goto cp0_unimplemented
;
7002 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7006 check_insn(ctx
, ISA_MIPS32R2
);
7007 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7009 ctx
->base
.is_jmp
= DISAS_STOP
;
7013 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7018 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7023 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7028 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7033 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7038 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7042 goto cp0_unimplemented
;
7048 gen_helper_mtc0_wired(cpu_env
, arg
);
7052 check_insn(ctx
, ISA_MIPS32R2
);
7053 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7057 check_insn(ctx
, ISA_MIPS32R2
);
7058 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7062 check_insn(ctx
, ISA_MIPS32R2
);
7063 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7067 check_insn(ctx
, ISA_MIPS32R2
);
7068 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7072 check_insn(ctx
, ISA_MIPS32R2
);
7073 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7078 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7082 goto cp0_unimplemented
;
7088 check_insn(ctx
, ISA_MIPS32R2
);
7089 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7090 ctx
->base
.is_jmp
= DISAS_STOP
;
7094 goto cp0_unimplemented
;
7116 goto cp0_unimplemented
;
7122 gen_helper_mtc0_count(cpu_env
, arg
);
7125 /* 6,7 are implementation dependent */
7127 goto cp0_unimplemented
;
7133 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7137 goto cp0_unimplemented
;
7143 gen_helper_mtc0_compare(cpu_env
, arg
);
7146 /* 6,7 are implementation dependent */
7148 goto cp0_unimplemented
;
7154 save_cpu_state(ctx
, 1);
7155 gen_helper_mtc0_status(cpu_env
, arg
);
7156 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7157 gen_save_pc(ctx
->base
.pc_next
+ 4);
7158 ctx
->base
.is_jmp
= DISAS_EXIT
;
7162 check_insn(ctx
, ISA_MIPS32R2
);
7163 gen_helper_mtc0_intctl(cpu_env
, arg
);
7164 /* Stop translation as we may have switched the execution mode */
7165 ctx
->base
.is_jmp
= DISAS_STOP
;
7169 check_insn(ctx
, ISA_MIPS32R2
);
7170 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7171 /* Stop translation as we may have switched the execution mode */
7172 ctx
->base
.is_jmp
= DISAS_STOP
;
7176 check_insn(ctx
, ISA_MIPS32R2
);
7177 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7178 /* Stop translation as we may have switched the execution mode */
7179 ctx
->base
.is_jmp
= DISAS_STOP
;
7183 goto cp0_unimplemented
;
7189 save_cpu_state(ctx
, 1);
7190 gen_helper_mtc0_cause(cpu_env
, arg
);
7191 /* Stop translation as we may have triggered an interrupt.
7192 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7193 * translated code to check for pending interrupts. */
7194 gen_save_pc(ctx
->base
.pc_next
+ 4);
7195 ctx
->base
.is_jmp
= DISAS_EXIT
;
7199 goto cp0_unimplemented
;
7205 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7209 goto cp0_unimplemented
;
7219 check_insn(ctx
, ISA_MIPS32R2
);
7220 gen_helper_mtc0_ebase(cpu_env
, arg
);
7224 goto cp0_unimplemented
;
7230 gen_helper_mtc0_config0(cpu_env
, arg
);
7232 /* Stop translation as we may have switched the execution mode */
7233 ctx
->base
.is_jmp
= DISAS_STOP
;
7236 /* ignored, read only */
7240 gen_helper_mtc0_config2(cpu_env
, arg
);
7242 /* Stop translation as we may have switched the execution mode */
7243 ctx
->base
.is_jmp
= DISAS_STOP
;
7246 gen_helper_mtc0_config3(cpu_env
, arg
);
7248 /* Stop translation as we may have switched the execution mode */
7249 ctx
->base
.is_jmp
= DISAS_STOP
;
7252 gen_helper_mtc0_config4(cpu_env
, arg
);
7254 ctx
->base
.is_jmp
= DISAS_STOP
;
7257 gen_helper_mtc0_config5(cpu_env
, arg
);
7259 /* Stop translation as we may have switched the execution mode */
7260 ctx
->base
.is_jmp
= DISAS_STOP
;
7262 /* 6,7 are implementation dependent */
7272 rn
= "Invalid config selector";
7273 goto cp0_unimplemented
;
7279 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7283 CP0_CHECK(ctx
->mrp
);
7284 gen_helper_mtc0_maar(cpu_env
, arg
);
7288 CP0_CHECK(ctx
->mrp
);
7289 gen_helper_mtc0_maari(cpu_env
, arg
);
7293 goto cp0_unimplemented
;
7306 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7307 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7311 goto cp0_unimplemented
;
7324 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7325 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7329 goto cp0_unimplemented
;
7335 #if defined(TARGET_MIPS64)
7336 check_insn(ctx
, ISA_MIPS3
);
7337 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7342 goto cp0_unimplemented
;
7346 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7347 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7350 gen_helper_mtc0_framemask(cpu_env
, arg
);
7354 goto cp0_unimplemented
;
7359 rn
= "Diagnostic"; /* implementation dependent */
7364 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7365 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7366 gen_save_pc(ctx
->base
.pc_next
+ 4);
7367 ctx
->base
.is_jmp
= DISAS_EXIT
;
7371 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7372 rn
= "TraceControl";
7373 /* Stop translation as we may have switched the execution mode */
7374 ctx
->base
.is_jmp
= DISAS_STOP
;
7375 goto cp0_unimplemented
;
7377 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7378 rn
= "TraceControl2";
7379 /* Stop translation as we may have switched the execution mode */
7380 ctx
->base
.is_jmp
= DISAS_STOP
;
7381 goto cp0_unimplemented
;
7383 /* Stop translation as we may have switched the execution mode */
7384 ctx
->base
.is_jmp
= DISAS_STOP
;
7385 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7386 rn
= "UserTraceData";
7387 /* Stop translation as we may have switched the execution mode */
7388 ctx
->base
.is_jmp
= DISAS_STOP
;
7389 goto cp0_unimplemented
;
7391 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7392 /* Stop translation as we may have switched the execution mode */
7393 ctx
->base
.is_jmp
= DISAS_STOP
;
7395 goto cp0_unimplemented
;
7397 goto cp0_unimplemented
;
7404 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7408 goto cp0_unimplemented
;
7414 gen_helper_mtc0_performance0(cpu_env
, arg
);
7415 rn
= "Performance0";
7418 // gen_helper_mtc0_performance1(arg);
7419 rn
= "Performance1";
7420 goto cp0_unimplemented
;
7422 // gen_helper_mtc0_performance2(arg);
7423 rn
= "Performance2";
7424 goto cp0_unimplemented
;
7426 // gen_helper_mtc0_performance3(arg);
7427 rn
= "Performance3";
7428 goto cp0_unimplemented
;
7430 // gen_helper_mtc0_performance4(arg);
7431 rn
= "Performance4";
7432 goto cp0_unimplemented
;
7434 // gen_helper_mtc0_performance5(arg);
7435 rn
= "Performance5";
7436 goto cp0_unimplemented
;
7438 // gen_helper_mtc0_performance6(arg);
7439 rn
= "Performance6";
7440 goto cp0_unimplemented
;
7442 // gen_helper_mtc0_performance7(arg);
7443 rn
= "Performance7";
7444 goto cp0_unimplemented
;
7446 goto cp0_unimplemented
;
7452 gen_helper_mtc0_errctl(cpu_env
, arg
);
7453 ctx
->base
.is_jmp
= DISAS_STOP
;
7457 goto cp0_unimplemented
;
7470 goto cp0_unimplemented
;
7479 gen_helper_mtc0_taglo(cpu_env
, arg
);
7486 gen_helper_mtc0_datalo(cpu_env
, arg
);
7490 goto cp0_unimplemented
;
7499 gen_helper_mtc0_taghi(cpu_env
, arg
);
7506 gen_helper_mtc0_datahi(cpu_env
, arg
);
7511 goto cp0_unimplemented
;
7517 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7521 goto cp0_unimplemented
;
7528 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7537 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7538 tcg_gen_st_tl(arg
, cpu_env
,
7539 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7543 goto cp0_unimplemented
;
7547 goto cp0_unimplemented
;
7549 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
7551 /* For simplicity assume that all writes can cause interrupts. */
7552 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7554 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7555 * translated code to check for pending interrupts. */
7556 gen_save_pc(ctx
->base
.pc_next
+ 4);
7557 ctx
->base
.is_jmp
= DISAS_EXIT
;
7562 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7565 #if defined(TARGET_MIPS64)
7566 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7568 const char *rn
= "invalid";
7571 check_insn(ctx
, ISA_MIPS64
);
7577 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7581 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7582 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7586 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7587 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7592 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7597 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7601 goto cp0_unimplemented
;
7607 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7608 gen_helper_mfc0_random(arg
, cpu_env
);
7612 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7617 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7618 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7622 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7623 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7627 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7628 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
7632 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7633 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7638 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7639 rn
= "VPEScheFBack";
7642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7643 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7647 goto cp0_unimplemented
;
7653 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
7657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7658 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7663 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7668 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7672 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7673 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7677 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7678 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
7682 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7683 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
7687 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7688 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
7692 goto cp0_unimplemented
;
7698 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
7703 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
7704 rn
= "GlobalNumber";
7707 goto cp0_unimplemented
;
7713 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
7717 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
7718 rn
= "ContextConfig";
7719 goto cp0_unimplemented
;
7721 CP0_CHECK(ctx
->ulri
);
7722 tcg_gen_ld_tl(arg
, cpu_env
,
7723 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7727 goto cp0_unimplemented
;
7733 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7737 check_insn(ctx
, ISA_MIPS32R2
);
7738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7743 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7748 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7753 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7758 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
7763 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
7768 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
7772 goto cp0_unimplemented
;
7778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7782 check_insn(ctx
, ISA_MIPS32R2
);
7783 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7787 check_insn(ctx
, ISA_MIPS32R2
);
7788 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7792 check_insn(ctx
, ISA_MIPS32R2
);
7793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7797 check_insn(ctx
, ISA_MIPS32R2
);
7798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7802 check_insn(ctx
, ISA_MIPS32R2
);
7803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7808 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7812 goto cp0_unimplemented
;
7818 check_insn(ctx
, ISA_MIPS32R2
);
7819 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7823 goto cp0_unimplemented
;
7829 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7834 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7839 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7844 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7845 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7849 goto cp0_unimplemented
;
7855 /* Mark as an IO operation because we read the time. */
7856 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7859 gen_helper_mfc0_count(arg
, cpu_env
);
7860 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7863 /* Break the TB to be able to take timer interrupts immediately
7864 after reading count. DISAS_STOP isn't sufficient, we need to
7865 ensure we break completely out of translated code. */
7866 gen_save_pc(ctx
->base
.pc_next
+ 4);
7867 ctx
->base
.is_jmp
= DISAS_EXIT
;
7870 /* 6,7 are implementation dependent */
7872 goto cp0_unimplemented
;
7878 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7882 goto cp0_unimplemented
;
7888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7891 /* 6,7 are implementation dependent */
7893 goto cp0_unimplemented
;
7899 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7903 check_insn(ctx
, ISA_MIPS32R2
);
7904 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7908 check_insn(ctx
, ISA_MIPS32R2
);
7909 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7913 check_insn(ctx
, ISA_MIPS32R2
);
7914 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7918 goto cp0_unimplemented
;
7924 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7928 goto cp0_unimplemented
;
7934 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7938 goto cp0_unimplemented
;
7944 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7948 check_insn(ctx
, ISA_MIPS32R2
);
7949 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7953 check_insn(ctx
, ISA_MIPS32R2
);
7954 CP0_CHECK(ctx
->cmgcr
);
7955 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7959 goto cp0_unimplemented
;
7965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7977 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7981 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7988 /* 6,7 are implementation dependent */
7990 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7994 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7998 goto cp0_unimplemented
;
8004 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8008 CP0_CHECK(ctx
->mrp
);
8009 gen_helper_dmfc0_maar(arg
, cpu_env
);
8013 CP0_CHECK(ctx
->mrp
);
8014 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8018 goto cp0_unimplemented
;
8031 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8032 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8036 goto cp0_unimplemented
;
8049 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8050 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8054 goto cp0_unimplemented
;
8060 check_insn(ctx
, ISA_MIPS3
);
8061 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8065 goto cp0_unimplemented
;
8069 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8070 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8073 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8077 goto cp0_unimplemented
;
8081 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8082 rn
= "'Diagnostic"; /* implementation dependent */
8087 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8091 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8092 rn
= "TraceControl";
8093 goto cp0_unimplemented
;
8095 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8096 rn
= "TraceControl2";
8097 goto cp0_unimplemented
;
8099 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8100 rn
= "UserTraceData";
8101 goto cp0_unimplemented
;
8103 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8105 goto cp0_unimplemented
;
8107 goto cp0_unimplemented
;
8114 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8118 goto cp0_unimplemented
;
8124 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8125 rn
= "Performance0";
8128 // gen_helper_dmfc0_performance1(arg);
8129 rn
= "Performance1";
8130 goto cp0_unimplemented
;
8132 // gen_helper_dmfc0_performance2(arg);
8133 rn
= "Performance2";
8134 goto cp0_unimplemented
;
8136 // gen_helper_dmfc0_performance3(arg);
8137 rn
= "Performance3";
8138 goto cp0_unimplemented
;
8140 // gen_helper_dmfc0_performance4(arg);
8141 rn
= "Performance4";
8142 goto cp0_unimplemented
;
8144 // gen_helper_dmfc0_performance5(arg);
8145 rn
= "Performance5";
8146 goto cp0_unimplemented
;
8148 // gen_helper_dmfc0_performance6(arg);
8149 rn
= "Performance6";
8150 goto cp0_unimplemented
;
8152 // gen_helper_dmfc0_performance7(arg);
8153 rn
= "Performance7";
8154 goto cp0_unimplemented
;
8156 goto cp0_unimplemented
;
8162 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8166 goto cp0_unimplemented
;
8176 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8180 goto cp0_unimplemented
;
8189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8200 goto cp0_unimplemented
;
8209 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8216 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8220 goto cp0_unimplemented
;
8226 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8230 goto cp0_unimplemented
;
8237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8246 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8247 tcg_gen_ld_tl(arg
, cpu_env
,
8248 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8252 goto cp0_unimplemented
;
8256 goto cp0_unimplemented
;
8258 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8262 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8263 gen_mfc0_unimplemented(ctx
, arg
);
8266 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8268 const char *rn
= "invalid";
8271 check_insn(ctx
, ISA_MIPS64
);
8273 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8281 gen_helper_mtc0_index(cpu_env
, arg
);
8285 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8286 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8290 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8295 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8305 goto cp0_unimplemented
;
8315 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8316 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8320 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8321 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8325 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8326 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8330 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8331 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8335 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8336 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8340 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8341 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8342 rn
= "VPEScheFBack";
8345 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8346 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8350 goto cp0_unimplemented
;
8356 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8360 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8361 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8365 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8366 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8370 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8371 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8375 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8376 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8380 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8381 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8385 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8386 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8391 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8395 goto cp0_unimplemented
;
8401 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8407 rn
= "GlobalNumber";
8410 goto cp0_unimplemented
;
8416 gen_helper_mtc0_context(cpu_env
, arg
);
8420 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
8421 rn
= "ContextConfig";
8422 goto cp0_unimplemented
;
8424 CP0_CHECK(ctx
->ulri
);
8425 tcg_gen_st_tl(arg
, cpu_env
,
8426 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8430 goto cp0_unimplemented
;
8436 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8440 check_insn(ctx
, ISA_MIPS32R2
);
8441 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8446 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8451 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8456 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8461 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8466 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8471 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8475 goto cp0_unimplemented
;
8481 gen_helper_mtc0_wired(cpu_env
, arg
);
8485 check_insn(ctx
, ISA_MIPS32R2
);
8486 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8490 check_insn(ctx
, ISA_MIPS32R2
);
8491 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8495 check_insn(ctx
, ISA_MIPS32R2
);
8496 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8500 check_insn(ctx
, ISA_MIPS32R2
);
8501 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8505 check_insn(ctx
, ISA_MIPS32R2
);
8506 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8511 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8515 goto cp0_unimplemented
;
8521 check_insn(ctx
, ISA_MIPS32R2
);
8522 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8523 ctx
->base
.is_jmp
= DISAS_STOP
;
8527 goto cp0_unimplemented
;
8549 goto cp0_unimplemented
;
8555 gen_helper_mtc0_count(cpu_env
, arg
);
8558 /* 6,7 are implementation dependent */
8560 goto cp0_unimplemented
;
8562 /* Stop translation as we may have switched the execution mode */
8563 ctx
->base
.is_jmp
= DISAS_STOP
;
8568 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8572 goto cp0_unimplemented
;
8578 gen_helper_mtc0_compare(cpu_env
, arg
);
8581 /* 6,7 are implementation dependent */
8583 goto cp0_unimplemented
;
8585 /* Stop translation as we may have switched the execution mode */
8586 ctx
->base
.is_jmp
= DISAS_STOP
;
8591 save_cpu_state(ctx
, 1);
8592 gen_helper_mtc0_status(cpu_env
, arg
);
8593 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8594 gen_save_pc(ctx
->base
.pc_next
+ 4);
8595 ctx
->base
.is_jmp
= DISAS_EXIT
;
8599 check_insn(ctx
, ISA_MIPS32R2
);
8600 gen_helper_mtc0_intctl(cpu_env
, arg
);
8601 /* Stop translation as we may have switched the execution mode */
8602 ctx
->base
.is_jmp
= DISAS_STOP
;
8606 check_insn(ctx
, ISA_MIPS32R2
);
8607 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8608 /* Stop translation as we may have switched the execution mode */
8609 ctx
->base
.is_jmp
= DISAS_STOP
;
8613 check_insn(ctx
, ISA_MIPS32R2
);
8614 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8615 /* Stop translation as we may have switched the execution mode */
8616 ctx
->base
.is_jmp
= DISAS_STOP
;
8620 goto cp0_unimplemented
;
8626 save_cpu_state(ctx
, 1);
8627 gen_helper_mtc0_cause(cpu_env
, arg
);
8628 /* Stop translation as we may have triggered an interrupt.
8629 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8630 * translated code to check for pending interrupts. */
8631 gen_save_pc(ctx
->base
.pc_next
+ 4);
8632 ctx
->base
.is_jmp
= DISAS_EXIT
;
8636 goto cp0_unimplemented
;
8642 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8646 goto cp0_unimplemented
;
8656 check_insn(ctx
, ISA_MIPS32R2
);
8657 gen_helper_mtc0_ebase(cpu_env
, arg
);
8661 goto cp0_unimplemented
;
8667 gen_helper_mtc0_config0(cpu_env
, arg
);
8669 /* Stop translation as we may have switched the execution mode */
8670 ctx
->base
.is_jmp
= DISAS_STOP
;
8673 /* ignored, read only */
8677 gen_helper_mtc0_config2(cpu_env
, arg
);
8679 /* Stop translation as we may have switched the execution mode */
8680 ctx
->base
.is_jmp
= DISAS_STOP
;
8683 gen_helper_mtc0_config3(cpu_env
, arg
);
8685 /* Stop translation as we may have switched the execution mode */
8686 ctx
->base
.is_jmp
= DISAS_STOP
;
8689 /* currently ignored */
8693 gen_helper_mtc0_config5(cpu_env
, arg
);
8695 /* Stop translation as we may have switched the execution mode */
8696 ctx
->base
.is_jmp
= DISAS_STOP
;
8698 /* 6,7 are implementation dependent */
8700 rn
= "Invalid config selector";
8701 goto cp0_unimplemented
;
8707 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8711 CP0_CHECK(ctx
->mrp
);
8712 gen_helper_mtc0_maar(cpu_env
, arg
);
8716 CP0_CHECK(ctx
->mrp
);
8717 gen_helper_mtc0_maari(cpu_env
, arg
);
8721 goto cp0_unimplemented
;
8734 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8735 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8739 goto cp0_unimplemented
;
8752 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8753 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8757 goto cp0_unimplemented
;
8763 check_insn(ctx
, ISA_MIPS3
);
8764 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8768 goto cp0_unimplemented
;
8772 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8773 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8776 gen_helper_mtc0_framemask(cpu_env
, arg
);
8780 goto cp0_unimplemented
;
8785 rn
= "Diagnostic"; /* implementation dependent */
8790 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8791 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8792 gen_save_pc(ctx
->base
.pc_next
+ 4);
8793 ctx
->base
.is_jmp
= DISAS_EXIT
;
8797 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8798 /* Stop translation as we may have switched the execution mode */
8799 ctx
->base
.is_jmp
= DISAS_STOP
;
8800 rn
= "TraceControl";
8801 goto cp0_unimplemented
;
8803 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8804 /* Stop translation as we may have switched the execution mode */
8805 ctx
->base
.is_jmp
= DISAS_STOP
;
8806 rn
= "TraceControl2";
8807 goto cp0_unimplemented
;
8809 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8810 /* Stop translation as we may have switched the execution mode */
8811 ctx
->base
.is_jmp
= DISAS_STOP
;
8812 rn
= "UserTraceData";
8813 goto cp0_unimplemented
;
8815 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8816 /* Stop translation as we may have switched the execution mode */
8817 ctx
->base
.is_jmp
= DISAS_STOP
;
8819 goto cp0_unimplemented
;
8821 goto cp0_unimplemented
;
8828 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8832 goto cp0_unimplemented
;
8838 gen_helper_mtc0_performance0(cpu_env
, arg
);
8839 rn
= "Performance0";
8842 // gen_helper_mtc0_performance1(cpu_env, arg);
8843 rn
= "Performance1";
8844 goto cp0_unimplemented
;
8846 // gen_helper_mtc0_performance2(cpu_env, arg);
8847 rn
= "Performance2";
8848 goto cp0_unimplemented
;
8850 // gen_helper_mtc0_performance3(cpu_env, arg);
8851 rn
= "Performance3";
8852 goto cp0_unimplemented
;
8854 // gen_helper_mtc0_performance4(cpu_env, arg);
8855 rn
= "Performance4";
8856 goto cp0_unimplemented
;
8858 // gen_helper_mtc0_performance5(cpu_env, arg);
8859 rn
= "Performance5";
8860 goto cp0_unimplemented
;
8862 // gen_helper_mtc0_performance6(cpu_env, arg);
8863 rn
= "Performance6";
8864 goto cp0_unimplemented
;
8866 // gen_helper_mtc0_performance7(cpu_env, arg);
8867 rn
= "Performance7";
8868 goto cp0_unimplemented
;
8870 goto cp0_unimplemented
;
8876 gen_helper_mtc0_errctl(cpu_env
, arg
);
8877 ctx
->base
.is_jmp
= DISAS_STOP
;
8881 goto cp0_unimplemented
;
8894 goto cp0_unimplemented
;
8903 gen_helper_mtc0_taglo(cpu_env
, arg
);
8910 gen_helper_mtc0_datalo(cpu_env
, arg
);
8914 goto cp0_unimplemented
;
8923 gen_helper_mtc0_taghi(cpu_env
, arg
);
8930 gen_helper_mtc0_datahi(cpu_env
, arg
);
8935 goto cp0_unimplemented
;
8941 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8945 goto cp0_unimplemented
;
8952 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8961 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8962 tcg_gen_st_tl(arg
, cpu_env
,
8963 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8967 goto cp0_unimplemented
;
8971 goto cp0_unimplemented
;
8973 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
8975 /* For simplicity assume that all writes can cause interrupts. */
8976 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8978 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8979 * translated code to check for pending interrupts. */
8980 gen_save_pc(ctx
->base
.pc_next
+ 4);
8981 ctx
->base
.is_jmp
= DISAS_EXIT
;
8986 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8988 #endif /* TARGET_MIPS64 */
8990 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8991 int u
, int sel
, int h
)
8993 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8994 TCGv t0
= tcg_temp_local_new();
8996 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8997 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8998 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8999 tcg_gen_movi_tl(t0
, -1);
9000 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9001 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9002 tcg_gen_movi_tl(t0
, -1);
9008 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9011 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9021 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9024 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9027 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9030 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9033 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9036 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9039 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9042 gen_mfc0(ctx
, t0
, rt
, sel
);
9049 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9052 gen_mfc0(ctx
, t0
, rt
, sel
);
9058 gen_helper_mftc0_status(t0
, cpu_env
);
9061 gen_mfc0(ctx
, t0
, rt
, sel
);
9067 gen_helper_mftc0_cause(t0
, cpu_env
);
9077 gen_helper_mftc0_epc(t0
, cpu_env
);
9087 gen_helper_mftc0_ebase(t0
, cpu_env
);
9104 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9114 gen_helper_mftc0_debug(t0
, cpu_env
);
9117 gen_mfc0(ctx
, t0
, rt
, sel
);
9122 gen_mfc0(ctx
, t0
, rt
, sel
);
9124 } else switch (sel
) {
9125 /* GPR registers. */
9127 gen_helper_1e0i(mftgpr
, t0
, rt
);
9129 /* Auxiliary CPU registers */
9133 gen_helper_1e0i(mftlo
, t0
, 0);
9136 gen_helper_1e0i(mfthi
, t0
, 0);
9139 gen_helper_1e0i(mftacx
, t0
, 0);
9142 gen_helper_1e0i(mftlo
, t0
, 1);
9145 gen_helper_1e0i(mfthi
, t0
, 1);
9148 gen_helper_1e0i(mftacx
, t0
, 1);
9151 gen_helper_1e0i(mftlo
, t0
, 2);
9154 gen_helper_1e0i(mfthi
, t0
, 2);
9157 gen_helper_1e0i(mftacx
, t0
, 2);
9160 gen_helper_1e0i(mftlo
, t0
, 3);
9163 gen_helper_1e0i(mfthi
, t0
, 3);
9166 gen_helper_1e0i(mftacx
, t0
, 3);
9169 gen_helper_mftdsp(t0
, cpu_env
);
9175 /* Floating point (COP1). */
9177 /* XXX: For now we support only a single FPU context. */
9179 TCGv_i32 fp0
= tcg_temp_new_i32();
9181 gen_load_fpr32(ctx
, fp0
, rt
);
9182 tcg_gen_ext_i32_tl(t0
, fp0
);
9183 tcg_temp_free_i32(fp0
);
9185 TCGv_i32 fp0
= tcg_temp_new_i32();
9187 gen_load_fpr32h(ctx
, fp0
, rt
);
9188 tcg_gen_ext_i32_tl(t0
, fp0
);
9189 tcg_temp_free_i32(fp0
);
9193 /* XXX: For now we support only a single FPU context. */
9194 gen_helper_1e0i(cfc1
, t0
, rt
);
9196 /* COP2: Not implemented. */
9203 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9204 gen_store_gpr(t0
, rd
);
9210 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9211 generate_exception_end(ctx
, EXCP_RI
);
9214 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9215 int u
, int sel
, int h
)
9217 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9218 TCGv t0
= tcg_temp_local_new();
9220 gen_load_gpr(t0
, rt
);
9221 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9222 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9223 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9225 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9226 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9233 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9236 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9246 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9249 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9252 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9255 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9258 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9261 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9264 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9267 gen_mtc0(ctx
, t0
, rd
, sel
);
9274 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9277 gen_mtc0(ctx
, t0
, rd
, sel
);
9283 gen_helper_mttc0_status(cpu_env
, t0
);
9286 gen_mtc0(ctx
, t0
, rd
, sel
);
9292 gen_helper_mttc0_cause(cpu_env
, t0
);
9302 gen_helper_mttc0_ebase(cpu_env
, t0
);
9312 gen_helper_mttc0_debug(cpu_env
, t0
);
9315 gen_mtc0(ctx
, t0
, rd
, sel
);
9320 gen_mtc0(ctx
, t0
, rd
, sel
);
9322 } else switch (sel
) {
9323 /* GPR registers. */
9325 gen_helper_0e1i(mttgpr
, t0
, rd
);
9327 /* Auxiliary CPU registers */
9331 gen_helper_0e1i(mttlo
, t0
, 0);
9334 gen_helper_0e1i(mtthi
, t0
, 0);
9337 gen_helper_0e1i(mttacx
, t0
, 0);
9340 gen_helper_0e1i(mttlo
, t0
, 1);
9343 gen_helper_0e1i(mtthi
, t0
, 1);
9346 gen_helper_0e1i(mttacx
, t0
, 1);
9349 gen_helper_0e1i(mttlo
, t0
, 2);
9352 gen_helper_0e1i(mtthi
, t0
, 2);
9355 gen_helper_0e1i(mttacx
, t0
, 2);
9358 gen_helper_0e1i(mttlo
, t0
, 3);
9361 gen_helper_0e1i(mtthi
, t0
, 3);
9364 gen_helper_0e1i(mttacx
, t0
, 3);
9367 gen_helper_mttdsp(cpu_env
, t0
);
9373 /* Floating point (COP1). */
9375 /* XXX: For now we support only a single FPU context. */
9377 TCGv_i32 fp0
= tcg_temp_new_i32();
9379 tcg_gen_trunc_tl_i32(fp0
, t0
);
9380 gen_store_fpr32(ctx
, fp0
, rd
);
9381 tcg_temp_free_i32(fp0
);
9383 TCGv_i32 fp0
= tcg_temp_new_i32();
9385 tcg_gen_trunc_tl_i32(fp0
, t0
);
9386 gen_store_fpr32h(ctx
, fp0
, rd
);
9387 tcg_temp_free_i32(fp0
);
9391 /* XXX: For now we support only a single FPU context. */
9393 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
9395 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9396 tcg_temp_free_i32(fs_tmp
);
9398 /* Stop translation as we may have changed hflags */
9399 ctx
->base
.is_jmp
= DISAS_STOP
;
9401 /* COP2: Not implemented. */
9408 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9414 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9415 generate_exception_end(ctx
, EXCP_RI
);
9418 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
9420 const char *opn
= "ldst";
9422 check_cp0_enabled(ctx
);
9429 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9434 TCGv t0
= tcg_temp_new();
9436 gen_load_gpr(t0
, rt
);
9437 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9442 #if defined(TARGET_MIPS64)
9444 check_insn(ctx
, ISA_MIPS3
);
9449 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9453 check_insn(ctx
, ISA_MIPS3
);
9455 TCGv t0
= tcg_temp_new();
9457 gen_load_gpr(t0
, rt
);
9458 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9470 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9476 TCGv t0
= tcg_temp_new();
9477 gen_load_gpr(t0
, rt
);
9478 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9484 check_cp0_enabled(ctx
);
9489 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9490 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9494 check_cp0_enabled(ctx
);
9495 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9496 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9501 if (!env
->tlb
->helper_tlbwi
)
9503 gen_helper_tlbwi(cpu_env
);
9508 if (!env
->tlb
->helper_tlbinv
) {
9511 gen_helper_tlbinv(cpu_env
);
9512 } /* treat as nop if TLBINV not supported */
9517 if (!env
->tlb
->helper_tlbinvf
) {
9520 gen_helper_tlbinvf(cpu_env
);
9521 } /* treat as nop if TLBINV not supported */
9525 if (!env
->tlb
->helper_tlbwr
)
9527 gen_helper_tlbwr(cpu_env
);
9531 if (!env
->tlb
->helper_tlbp
)
9533 gen_helper_tlbp(cpu_env
);
9537 if (!env
->tlb
->helper_tlbr
)
9539 gen_helper_tlbr(cpu_env
);
9541 case OPC_ERET
: /* OPC_ERETNC */
9542 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9543 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9546 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9547 if (ctx
->opcode
& (1 << bit_shift
)) {
9550 check_insn(ctx
, ISA_MIPS32R5
);
9551 gen_helper_eretnc(cpu_env
);
9555 check_insn(ctx
, ISA_MIPS2
);
9556 gen_helper_eret(cpu_env
);
9558 ctx
->base
.is_jmp
= DISAS_EXIT
;
9563 check_insn(ctx
, ISA_MIPS32
);
9564 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9565 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9568 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9570 generate_exception_end(ctx
, EXCP_RI
);
9572 gen_helper_deret(cpu_env
);
9573 ctx
->base
.is_jmp
= DISAS_EXIT
;
9578 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
9579 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
9580 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9583 /* If we get an exception, we want to restart at next instruction */
9584 ctx
->base
.pc_next
+= 4;
9585 save_cpu_state(ctx
, 1);
9586 ctx
->base
.pc_next
-= 4;
9587 gen_helper_wait(cpu_env
);
9588 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9593 generate_exception_end(ctx
, EXCP_RI
);
9596 (void)opn
; /* avoid a compiler warning */
9598 #endif /* !CONFIG_USER_ONLY */
9600 /* CP1 Branches (before delay slot) */
9601 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9602 int32_t cc
, int32_t offset
)
9604 target_ulong btarget
;
9605 TCGv_i32 t0
= tcg_temp_new_i32();
9607 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9608 generate_exception_end(ctx
, EXCP_RI
);
9613 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
9615 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9619 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9620 tcg_gen_not_i32(t0
, t0
);
9621 tcg_gen_andi_i32(t0
, t0
, 1);
9622 tcg_gen_extu_i32_tl(bcond
, t0
);
9625 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9626 tcg_gen_not_i32(t0
, t0
);
9627 tcg_gen_andi_i32(t0
, t0
, 1);
9628 tcg_gen_extu_i32_tl(bcond
, t0
);
9631 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9632 tcg_gen_andi_i32(t0
, t0
, 1);
9633 tcg_gen_extu_i32_tl(bcond
, t0
);
9636 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9637 tcg_gen_andi_i32(t0
, t0
, 1);
9638 tcg_gen_extu_i32_tl(bcond
, t0
);
9640 ctx
->hflags
|= MIPS_HFLAG_BL
;
9644 TCGv_i32 t1
= tcg_temp_new_i32();
9645 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9646 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9647 tcg_gen_nand_i32(t0
, t0
, t1
);
9648 tcg_temp_free_i32(t1
);
9649 tcg_gen_andi_i32(t0
, t0
, 1);
9650 tcg_gen_extu_i32_tl(bcond
, t0
);
9655 TCGv_i32 t1
= tcg_temp_new_i32();
9656 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9657 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9658 tcg_gen_or_i32(t0
, t0
, t1
);
9659 tcg_temp_free_i32(t1
);
9660 tcg_gen_andi_i32(t0
, t0
, 1);
9661 tcg_gen_extu_i32_tl(bcond
, t0
);
9666 TCGv_i32 t1
= tcg_temp_new_i32();
9667 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9668 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9669 tcg_gen_and_i32(t0
, t0
, t1
);
9670 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
9671 tcg_gen_and_i32(t0
, t0
, t1
);
9672 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
9673 tcg_gen_nand_i32(t0
, t0
, t1
);
9674 tcg_temp_free_i32(t1
);
9675 tcg_gen_andi_i32(t0
, t0
, 1);
9676 tcg_gen_extu_i32_tl(bcond
, t0
);
9681 TCGv_i32 t1
= tcg_temp_new_i32();
9682 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9683 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
9684 tcg_gen_or_i32(t0
, t0
, t1
);
9685 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
9686 tcg_gen_or_i32(t0
, t0
, t1
);
9687 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
9688 tcg_gen_or_i32(t0
, t0
, t1
);
9689 tcg_temp_free_i32(t1
);
9690 tcg_gen_andi_i32(t0
, t0
, 1);
9691 tcg_gen_extu_i32_tl(bcond
, t0
);
9694 ctx
->hflags
|= MIPS_HFLAG_BC
;
9697 MIPS_INVAL("cp1 cond branch");
9698 generate_exception_end(ctx
, EXCP_RI
);
9701 ctx
->btarget
= btarget
;
9702 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9704 tcg_temp_free_i32(t0
);
9707 /* R6 CP1 Branches */
9708 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
9709 int32_t ft
, int32_t offset
,
9712 target_ulong btarget
;
9713 TCGv_i64 t0
= tcg_temp_new_i64();
9715 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9716 #ifdef MIPS_DEBUG_DISAS
9717 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
9718 "\n", ctx
->base
.pc_next
);
9720 generate_exception_end(ctx
, EXCP_RI
);
9724 gen_load_fpr64(ctx
, t0
, ft
);
9725 tcg_gen_andi_i64(t0
, t0
, 1);
9727 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
9731 tcg_gen_xori_i64(t0
, t0
, 1);
9732 ctx
->hflags
|= MIPS_HFLAG_BC
;
9735 /* t0 already set */
9736 ctx
->hflags
|= MIPS_HFLAG_BC
;
9739 MIPS_INVAL("cp1 cond branch");
9740 generate_exception_end(ctx
, EXCP_RI
);
9744 tcg_gen_trunc_i64_tl(bcond
, t0
);
9746 ctx
->btarget
= btarget
;
9748 switch (delayslot_size
) {
9750 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
9753 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9758 tcg_temp_free_i64(t0
);
9761 /* Coprocessor 1 (FPU) */
9763 #define FOP(func, fmt) (((fmt) << 21) | (func))
9766 OPC_ADD_S
= FOP(0, FMT_S
),
9767 OPC_SUB_S
= FOP(1, FMT_S
),
9768 OPC_MUL_S
= FOP(2, FMT_S
),
9769 OPC_DIV_S
= FOP(3, FMT_S
),
9770 OPC_SQRT_S
= FOP(4, FMT_S
),
9771 OPC_ABS_S
= FOP(5, FMT_S
),
9772 OPC_MOV_S
= FOP(6, FMT_S
),
9773 OPC_NEG_S
= FOP(7, FMT_S
),
9774 OPC_ROUND_L_S
= FOP(8, FMT_S
),
9775 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
9776 OPC_CEIL_L_S
= FOP(10, FMT_S
),
9777 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
9778 OPC_ROUND_W_S
= FOP(12, FMT_S
),
9779 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
9780 OPC_CEIL_W_S
= FOP(14, FMT_S
),
9781 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
9782 OPC_SEL_S
= FOP(16, FMT_S
),
9783 OPC_MOVCF_S
= FOP(17, FMT_S
),
9784 OPC_MOVZ_S
= FOP(18, FMT_S
),
9785 OPC_MOVN_S
= FOP(19, FMT_S
),
9786 OPC_SELEQZ_S
= FOP(20, FMT_S
),
9787 OPC_RECIP_S
= FOP(21, FMT_S
),
9788 OPC_RSQRT_S
= FOP(22, FMT_S
),
9789 OPC_SELNEZ_S
= FOP(23, FMT_S
),
9790 OPC_MADDF_S
= FOP(24, FMT_S
),
9791 OPC_MSUBF_S
= FOP(25, FMT_S
),
9792 OPC_RINT_S
= FOP(26, FMT_S
),
9793 OPC_CLASS_S
= FOP(27, FMT_S
),
9794 OPC_MIN_S
= FOP(28, FMT_S
),
9795 OPC_RECIP2_S
= FOP(28, FMT_S
),
9796 OPC_MINA_S
= FOP(29, FMT_S
),
9797 OPC_RECIP1_S
= FOP(29, FMT_S
),
9798 OPC_MAX_S
= FOP(30, FMT_S
),
9799 OPC_RSQRT1_S
= FOP(30, FMT_S
),
9800 OPC_MAXA_S
= FOP(31, FMT_S
),
9801 OPC_RSQRT2_S
= FOP(31, FMT_S
),
9802 OPC_CVT_D_S
= FOP(33, FMT_S
),
9803 OPC_CVT_W_S
= FOP(36, FMT_S
),
9804 OPC_CVT_L_S
= FOP(37, FMT_S
),
9805 OPC_CVT_PS_S
= FOP(38, FMT_S
),
9806 OPC_CMP_F_S
= FOP (48, FMT_S
),
9807 OPC_CMP_UN_S
= FOP (49, FMT_S
),
9808 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
9809 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
9810 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
9811 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
9812 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
9813 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
9814 OPC_CMP_SF_S
= FOP (56, FMT_S
),
9815 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
9816 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
9817 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
9818 OPC_CMP_LT_S
= FOP (60, FMT_S
),
9819 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
9820 OPC_CMP_LE_S
= FOP (62, FMT_S
),
9821 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
9823 OPC_ADD_D
= FOP(0, FMT_D
),
9824 OPC_SUB_D
= FOP(1, FMT_D
),
9825 OPC_MUL_D
= FOP(2, FMT_D
),
9826 OPC_DIV_D
= FOP(3, FMT_D
),
9827 OPC_SQRT_D
= FOP(4, FMT_D
),
9828 OPC_ABS_D
= FOP(5, FMT_D
),
9829 OPC_MOV_D
= FOP(6, FMT_D
),
9830 OPC_NEG_D
= FOP(7, FMT_D
),
9831 OPC_ROUND_L_D
= FOP(8, FMT_D
),
9832 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
9833 OPC_CEIL_L_D
= FOP(10, FMT_D
),
9834 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
9835 OPC_ROUND_W_D
= FOP(12, FMT_D
),
9836 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
9837 OPC_CEIL_W_D
= FOP(14, FMT_D
),
9838 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
9839 OPC_SEL_D
= FOP(16, FMT_D
),
9840 OPC_MOVCF_D
= FOP(17, FMT_D
),
9841 OPC_MOVZ_D
= FOP(18, FMT_D
),
9842 OPC_MOVN_D
= FOP(19, FMT_D
),
9843 OPC_SELEQZ_D
= FOP(20, FMT_D
),
9844 OPC_RECIP_D
= FOP(21, FMT_D
),
9845 OPC_RSQRT_D
= FOP(22, FMT_D
),
9846 OPC_SELNEZ_D
= FOP(23, FMT_D
),
9847 OPC_MADDF_D
= FOP(24, FMT_D
),
9848 OPC_MSUBF_D
= FOP(25, FMT_D
),
9849 OPC_RINT_D
= FOP(26, FMT_D
),
9850 OPC_CLASS_D
= FOP(27, FMT_D
),
9851 OPC_MIN_D
= FOP(28, FMT_D
),
9852 OPC_RECIP2_D
= FOP(28, FMT_D
),
9853 OPC_MINA_D
= FOP(29, FMT_D
),
9854 OPC_RECIP1_D
= FOP(29, FMT_D
),
9855 OPC_MAX_D
= FOP(30, FMT_D
),
9856 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9857 OPC_MAXA_D
= FOP(31, FMT_D
),
9858 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9859 OPC_CVT_S_D
= FOP(32, FMT_D
),
9860 OPC_CVT_W_D
= FOP(36, FMT_D
),
9861 OPC_CVT_L_D
= FOP(37, FMT_D
),
9862 OPC_CMP_F_D
= FOP (48, FMT_D
),
9863 OPC_CMP_UN_D
= FOP (49, FMT_D
),
9864 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
9865 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
9866 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
9867 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
9868 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
9869 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
9870 OPC_CMP_SF_D
= FOP (56, FMT_D
),
9871 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
9872 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
9873 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
9874 OPC_CMP_LT_D
= FOP (60, FMT_D
),
9875 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
9876 OPC_CMP_LE_D
= FOP (62, FMT_D
),
9877 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
9879 OPC_CVT_S_W
= FOP(32, FMT_W
),
9880 OPC_CVT_D_W
= FOP(33, FMT_W
),
9881 OPC_CVT_S_L
= FOP(32, FMT_L
),
9882 OPC_CVT_D_L
= FOP(33, FMT_L
),
9883 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9885 OPC_ADD_PS
= FOP(0, FMT_PS
),
9886 OPC_SUB_PS
= FOP(1, FMT_PS
),
9887 OPC_MUL_PS
= FOP(2, FMT_PS
),
9888 OPC_DIV_PS
= FOP(3, FMT_PS
),
9889 OPC_ABS_PS
= FOP(5, FMT_PS
),
9890 OPC_MOV_PS
= FOP(6, FMT_PS
),
9891 OPC_NEG_PS
= FOP(7, FMT_PS
),
9892 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9893 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9894 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9895 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9896 OPC_MULR_PS
= FOP(26, FMT_PS
),
9897 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9898 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9899 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9900 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9902 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9903 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9904 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9905 OPC_PLL_PS
= FOP(44, FMT_PS
),
9906 OPC_PLU_PS
= FOP(45, FMT_PS
),
9907 OPC_PUL_PS
= FOP(46, FMT_PS
),
9908 OPC_PUU_PS
= FOP(47, FMT_PS
),
9909 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
9910 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
9911 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
9912 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
9913 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
9914 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
9915 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
9916 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
9917 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
9918 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
9919 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
9920 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
9921 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
9922 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
9923 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
9924 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
9928 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9929 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9930 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9931 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9932 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9933 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9934 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9935 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9936 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9937 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9938 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9939 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9940 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9941 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9942 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9943 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9944 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9945 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9946 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9947 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9948 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9949 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9951 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9952 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9953 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9954 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9955 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9956 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9957 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9958 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9959 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9960 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9961 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9962 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9963 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9964 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9965 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9966 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9967 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9968 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9969 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9970 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9971 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9972 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9974 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9976 TCGv t0
= tcg_temp_new();
9981 TCGv_i32 fp0
= tcg_temp_new_i32();
9983 gen_load_fpr32(ctx
, fp0
, fs
);
9984 tcg_gen_ext_i32_tl(t0
, fp0
);
9985 tcg_temp_free_i32(fp0
);
9987 gen_store_gpr(t0
, rt
);
9990 gen_load_gpr(t0
, rt
);
9992 TCGv_i32 fp0
= tcg_temp_new_i32();
9994 tcg_gen_trunc_tl_i32(fp0
, t0
);
9995 gen_store_fpr32(ctx
, fp0
, fs
);
9996 tcg_temp_free_i32(fp0
);
10000 gen_helper_1e0i(cfc1
, t0
, fs
);
10001 gen_store_gpr(t0
, rt
);
10004 gen_load_gpr(t0
, rt
);
10005 save_cpu_state(ctx
, 0);
10007 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10009 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10010 tcg_temp_free_i32(fs_tmp
);
10012 /* Stop translation as we may have changed hflags */
10013 ctx
->base
.is_jmp
= DISAS_STOP
;
10015 #if defined(TARGET_MIPS64)
10017 gen_load_fpr64(ctx
, t0
, fs
);
10018 gen_store_gpr(t0
, rt
);
10021 gen_load_gpr(t0
, rt
);
10022 gen_store_fpr64(ctx
, t0
, fs
);
10027 TCGv_i32 fp0
= tcg_temp_new_i32();
10029 gen_load_fpr32h(ctx
, fp0
, fs
);
10030 tcg_gen_ext_i32_tl(t0
, fp0
);
10031 tcg_temp_free_i32(fp0
);
10033 gen_store_gpr(t0
, rt
);
10036 gen_load_gpr(t0
, rt
);
10038 TCGv_i32 fp0
= tcg_temp_new_i32();
10040 tcg_gen_trunc_tl_i32(fp0
, t0
);
10041 gen_store_fpr32h(ctx
, fp0
, fs
);
10042 tcg_temp_free_i32(fp0
);
10046 MIPS_INVAL("cp1 move");
10047 generate_exception_end(ctx
, EXCP_RI
);
10055 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10062 /* Treat as NOP. */
10067 cond
= TCG_COND_EQ
;
10069 cond
= TCG_COND_NE
;
10071 l1
= gen_new_label();
10072 t0
= tcg_temp_new_i32();
10073 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10074 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10075 tcg_temp_free_i32(t0
);
10077 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10079 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10084 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10088 TCGv_i32 t0
= tcg_temp_new_i32();
10089 TCGLabel
*l1
= gen_new_label();
10092 cond
= TCG_COND_EQ
;
10094 cond
= TCG_COND_NE
;
10096 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10097 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10098 gen_load_fpr32(ctx
, t0
, fs
);
10099 gen_store_fpr32(ctx
, t0
, fd
);
10101 tcg_temp_free_i32(t0
);
10104 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10107 TCGv_i32 t0
= tcg_temp_new_i32();
10109 TCGLabel
*l1
= gen_new_label();
10112 cond
= TCG_COND_EQ
;
10114 cond
= TCG_COND_NE
;
10116 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10117 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10118 tcg_temp_free_i32(t0
);
10119 fp0
= tcg_temp_new_i64();
10120 gen_load_fpr64(ctx
, fp0
, fs
);
10121 gen_store_fpr64(ctx
, fp0
, fd
);
10122 tcg_temp_free_i64(fp0
);
10126 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10130 TCGv_i32 t0
= tcg_temp_new_i32();
10131 TCGLabel
*l1
= gen_new_label();
10132 TCGLabel
*l2
= gen_new_label();
10135 cond
= TCG_COND_EQ
;
10137 cond
= TCG_COND_NE
;
10139 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10140 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10141 gen_load_fpr32(ctx
, t0
, fs
);
10142 gen_store_fpr32(ctx
, t0
, fd
);
10145 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10146 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10147 gen_load_fpr32h(ctx
, t0
, fs
);
10148 gen_store_fpr32h(ctx
, t0
, fd
);
10149 tcg_temp_free_i32(t0
);
10153 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10156 TCGv_i32 t1
= tcg_const_i32(0);
10157 TCGv_i32 fp0
= tcg_temp_new_i32();
10158 TCGv_i32 fp1
= tcg_temp_new_i32();
10159 TCGv_i32 fp2
= tcg_temp_new_i32();
10160 gen_load_fpr32(ctx
, fp0
, fd
);
10161 gen_load_fpr32(ctx
, fp1
, ft
);
10162 gen_load_fpr32(ctx
, fp2
, fs
);
10166 tcg_gen_andi_i32(fp0
, fp0
, 1);
10167 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10170 tcg_gen_andi_i32(fp1
, fp1
, 1);
10171 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10174 tcg_gen_andi_i32(fp1
, fp1
, 1);
10175 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10178 MIPS_INVAL("gen_sel_s");
10179 generate_exception_end(ctx
, EXCP_RI
);
10183 gen_store_fpr32(ctx
, fp0
, fd
);
10184 tcg_temp_free_i32(fp2
);
10185 tcg_temp_free_i32(fp1
);
10186 tcg_temp_free_i32(fp0
);
10187 tcg_temp_free_i32(t1
);
10190 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10193 TCGv_i64 t1
= tcg_const_i64(0);
10194 TCGv_i64 fp0
= tcg_temp_new_i64();
10195 TCGv_i64 fp1
= tcg_temp_new_i64();
10196 TCGv_i64 fp2
= tcg_temp_new_i64();
10197 gen_load_fpr64(ctx
, fp0
, fd
);
10198 gen_load_fpr64(ctx
, fp1
, ft
);
10199 gen_load_fpr64(ctx
, fp2
, fs
);
10203 tcg_gen_andi_i64(fp0
, fp0
, 1);
10204 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10207 tcg_gen_andi_i64(fp1
, fp1
, 1);
10208 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10211 tcg_gen_andi_i64(fp1
, fp1
, 1);
10212 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10215 MIPS_INVAL("gen_sel_d");
10216 generate_exception_end(ctx
, EXCP_RI
);
10220 gen_store_fpr64(ctx
, fp0
, fd
);
10221 tcg_temp_free_i64(fp2
);
10222 tcg_temp_free_i64(fp1
);
10223 tcg_temp_free_i64(fp0
);
10224 tcg_temp_free_i64(t1
);
10227 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10228 int ft
, int fs
, int fd
, int cc
)
10230 uint32_t func
= ctx
->opcode
& 0x3f;
10234 TCGv_i32 fp0
= tcg_temp_new_i32();
10235 TCGv_i32 fp1
= tcg_temp_new_i32();
10237 gen_load_fpr32(ctx
, fp0
, fs
);
10238 gen_load_fpr32(ctx
, fp1
, ft
);
10239 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10240 tcg_temp_free_i32(fp1
);
10241 gen_store_fpr32(ctx
, fp0
, fd
);
10242 tcg_temp_free_i32(fp0
);
10247 TCGv_i32 fp0
= tcg_temp_new_i32();
10248 TCGv_i32 fp1
= tcg_temp_new_i32();
10250 gen_load_fpr32(ctx
, fp0
, fs
);
10251 gen_load_fpr32(ctx
, fp1
, ft
);
10252 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10253 tcg_temp_free_i32(fp1
);
10254 gen_store_fpr32(ctx
, fp0
, fd
);
10255 tcg_temp_free_i32(fp0
);
10260 TCGv_i32 fp0
= tcg_temp_new_i32();
10261 TCGv_i32 fp1
= tcg_temp_new_i32();
10263 gen_load_fpr32(ctx
, fp0
, fs
);
10264 gen_load_fpr32(ctx
, fp1
, ft
);
10265 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10266 tcg_temp_free_i32(fp1
);
10267 gen_store_fpr32(ctx
, fp0
, fd
);
10268 tcg_temp_free_i32(fp0
);
10273 TCGv_i32 fp0
= tcg_temp_new_i32();
10274 TCGv_i32 fp1
= tcg_temp_new_i32();
10276 gen_load_fpr32(ctx
, fp0
, fs
);
10277 gen_load_fpr32(ctx
, fp1
, ft
);
10278 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10279 tcg_temp_free_i32(fp1
);
10280 gen_store_fpr32(ctx
, fp0
, fd
);
10281 tcg_temp_free_i32(fp0
);
10286 TCGv_i32 fp0
= tcg_temp_new_i32();
10288 gen_load_fpr32(ctx
, fp0
, fs
);
10289 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10290 gen_store_fpr32(ctx
, fp0
, fd
);
10291 tcg_temp_free_i32(fp0
);
10296 TCGv_i32 fp0
= tcg_temp_new_i32();
10298 gen_load_fpr32(ctx
, fp0
, fs
);
10299 if (ctx
->abs2008
) {
10300 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10302 gen_helper_float_abs_s(fp0
, fp0
);
10304 gen_store_fpr32(ctx
, fp0
, fd
);
10305 tcg_temp_free_i32(fp0
);
10310 TCGv_i32 fp0
= tcg_temp_new_i32();
10312 gen_load_fpr32(ctx
, fp0
, fs
);
10313 gen_store_fpr32(ctx
, fp0
, fd
);
10314 tcg_temp_free_i32(fp0
);
10319 TCGv_i32 fp0
= tcg_temp_new_i32();
10321 gen_load_fpr32(ctx
, fp0
, fs
);
10322 if (ctx
->abs2008
) {
10323 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10325 gen_helper_float_chs_s(fp0
, fp0
);
10327 gen_store_fpr32(ctx
, fp0
, fd
);
10328 tcg_temp_free_i32(fp0
);
10331 case OPC_ROUND_L_S
:
10332 check_cp1_64bitmode(ctx
);
10334 TCGv_i32 fp32
= tcg_temp_new_i32();
10335 TCGv_i64 fp64
= tcg_temp_new_i64();
10337 gen_load_fpr32(ctx
, fp32
, fs
);
10338 if (ctx
->nan2008
) {
10339 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10341 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10343 tcg_temp_free_i32(fp32
);
10344 gen_store_fpr64(ctx
, fp64
, fd
);
10345 tcg_temp_free_i64(fp64
);
10348 case OPC_TRUNC_L_S
:
10349 check_cp1_64bitmode(ctx
);
10351 TCGv_i32 fp32
= tcg_temp_new_i32();
10352 TCGv_i64 fp64
= tcg_temp_new_i64();
10354 gen_load_fpr32(ctx
, fp32
, fs
);
10355 if (ctx
->nan2008
) {
10356 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10358 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10360 tcg_temp_free_i32(fp32
);
10361 gen_store_fpr64(ctx
, fp64
, fd
);
10362 tcg_temp_free_i64(fp64
);
10366 check_cp1_64bitmode(ctx
);
10368 TCGv_i32 fp32
= tcg_temp_new_i32();
10369 TCGv_i64 fp64
= tcg_temp_new_i64();
10371 gen_load_fpr32(ctx
, fp32
, fs
);
10372 if (ctx
->nan2008
) {
10373 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10375 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10377 tcg_temp_free_i32(fp32
);
10378 gen_store_fpr64(ctx
, fp64
, fd
);
10379 tcg_temp_free_i64(fp64
);
10382 case OPC_FLOOR_L_S
:
10383 check_cp1_64bitmode(ctx
);
10385 TCGv_i32 fp32
= tcg_temp_new_i32();
10386 TCGv_i64 fp64
= tcg_temp_new_i64();
10388 gen_load_fpr32(ctx
, fp32
, fs
);
10389 if (ctx
->nan2008
) {
10390 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10392 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10394 tcg_temp_free_i32(fp32
);
10395 gen_store_fpr64(ctx
, fp64
, fd
);
10396 tcg_temp_free_i64(fp64
);
10399 case OPC_ROUND_W_S
:
10401 TCGv_i32 fp0
= tcg_temp_new_i32();
10403 gen_load_fpr32(ctx
, fp0
, fs
);
10404 if (ctx
->nan2008
) {
10405 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10407 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10409 gen_store_fpr32(ctx
, fp0
, fd
);
10410 tcg_temp_free_i32(fp0
);
10413 case OPC_TRUNC_W_S
:
10415 TCGv_i32 fp0
= tcg_temp_new_i32();
10417 gen_load_fpr32(ctx
, fp0
, fs
);
10418 if (ctx
->nan2008
) {
10419 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10421 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10423 gen_store_fpr32(ctx
, fp0
, fd
);
10424 tcg_temp_free_i32(fp0
);
10429 TCGv_i32 fp0
= tcg_temp_new_i32();
10431 gen_load_fpr32(ctx
, fp0
, fs
);
10432 if (ctx
->nan2008
) {
10433 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10435 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10437 gen_store_fpr32(ctx
, fp0
, fd
);
10438 tcg_temp_free_i32(fp0
);
10441 case OPC_FLOOR_W_S
:
10443 TCGv_i32 fp0
= tcg_temp_new_i32();
10445 gen_load_fpr32(ctx
, fp0
, fs
);
10446 if (ctx
->nan2008
) {
10447 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10449 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10451 gen_store_fpr32(ctx
, fp0
, fd
);
10452 tcg_temp_free_i32(fp0
);
10456 check_insn(ctx
, ISA_MIPS32R6
);
10457 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10460 check_insn(ctx
, ISA_MIPS32R6
);
10461 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10464 check_insn(ctx
, ISA_MIPS32R6
);
10465 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10469 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10472 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10474 TCGLabel
*l1
= gen_new_label();
10478 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10480 fp0
= tcg_temp_new_i32();
10481 gen_load_fpr32(ctx
, fp0
, fs
);
10482 gen_store_fpr32(ctx
, fp0
, fd
);
10483 tcg_temp_free_i32(fp0
);
10488 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10490 TCGLabel
*l1
= gen_new_label();
10494 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10495 fp0
= tcg_temp_new_i32();
10496 gen_load_fpr32(ctx
, fp0
, fs
);
10497 gen_store_fpr32(ctx
, fp0
, fd
);
10498 tcg_temp_free_i32(fp0
);
10505 TCGv_i32 fp0
= tcg_temp_new_i32();
10507 gen_load_fpr32(ctx
, fp0
, fs
);
10508 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10509 gen_store_fpr32(ctx
, fp0
, fd
);
10510 tcg_temp_free_i32(fp0
);
10515 TCGv_i32 fp0
= tcg_temp_new_i32();
10517 gen_load_fpr32(ctx
, fp0
, fs
);
10518 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10519 gen_store_fpr32(ctx
, fp0
, fd
);
10520 tcg_temp_free_i32(fp0
);
10524 check_insn(ctx
, ISA_MIPS32R6
);
10526 TCGv_i32 fp0
= tcg_temp_new_i32();
10527 TCGv_i32 fp1
= tcg_temp_new_i32();
10528 TCGv_i32 fp2
= tcg_temp_new_i32();
10529 gen_load_fpr32(ctx
, fp0
, fs
);
10530 gen_load_fpr32(ctx
, fp1
, ft
);
10531 gen_load_fpr32(ctx
, fp2
, fd
);
10532 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10533 gen_store_fpr32(ctx
, fp2
, fd
);
10534 tcg_temp_free_i32(fp2
);
10535 tcg_temp_free_i32(fp1
);
10536 tcg_temp_free_i32(fp0
);
10540 check_insn(ctx
, ISA_MIPS32R6
);
10542 TCGv_i32 fp0
= tcg_temp_new_i32();
10543 TCGv_i32 fp1
= tcg_temp_new_i32();
10544 TCGv_i32 fp2
= tcg_temp_new_i32();
10545 gen_load_fpr32(ctx
, fp0
, fs
);
10546 gen_load_fpr32(ctx
, fp1
, ft
);
10547 gen_load_fpr32(ctx
, fp2
, fd
);
10548 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10549 gen_store_fpr32(ctx
, fp2
, fd
);
10550 tcg_temp_free_i32(fp2
);
10551 tcg_temp_free_i32(fp1
);
10552 tcg_temp_free_i32(fp0
);
10556 check_insn(ctx
, ISA_MIPS32R6
);
10558 TCGv_i32 fp0
= tcg_temp_new_i32();
10559 gen_load_fpr32(ctx
, fp0
, fs
);
10560 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10561 gen_store_fpr32(ctx
, fp0
, fd
);
10562 tcg_temp_free_i32(fp0
);
10566 check_insn(ctx
, ISA_MIPS32R6
);
10568 TCGv_i32 fp0
= tcg_temp_new_i32();
10569 gen_load_fpr32(ctx
, fp0
, fs
);
10570 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10571 gen_store_fpr32(ctx
, fp0
, fd
);
10572 tcg_temp_free_i32(fp0
);
10575 case OPC_MIN_S
: /* OPC_RECIP2_S */
10576 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10578 TCGv_i32 fp0
= tcg_temp_new_i32();
10579 TCGv_i32 fp1
= tcg_temp_new_i32();
10580 TCGv_i32 fp2
= tcg_temp_new_i32();
10581 gen_load_fpr32(ctx
, fp0
, fs
);
10582 gen_load_fpr32(ctx
, fp1
, ft
);
10583 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10584 gen_store_fpr32(ctx
, fp2
, fd
);
10585 tcg_temp_free_i32(fp2
);
10586 tcg_temp_free_i32(fp1
);
10587 tcg_temp_free_i32(fp0
);
10590 check_cp1_64bitmode(ctx
);
10592 TCGv_i32 fp0
= tcg_temp_new_i32();
10593 TCGv_i32 fp1
= tcg_temp_new_i32();
10595 gen_load_fpr32(ctx
, fp0
, fs
);
10596 gen_load_fpr32(ctx
, fp1
, ft
);
10597 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10598 tcg_temp_free_i32(fp1
);
10599 gen_store_fpr32(ctx
, fp0
, fd
);
10600 tcg_temp_free_i32(fp0
);
10604 case OPC_MINA_S
: /* OPC_RECIP1_S */
10605 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10607 TCGv_i32 fp0
= tcg_temp_new_i32();
10608 TCGv_i32 fp1
= tcg_temp_new_i32();
10609 TCGv_i32 fp2
= tcg_temp_new_i32();
10610 gen_load_fpr32(ctx
, fp0
, fs
);
10611 gen_load_fpr32(ctx
, fp1
, ft
);
10612 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10613 gen_store_fpr32(ctx
, fp2
, fd
);
10614 tcg_temp_free_i32(fp2
);
10615 tcg_temp_free_i32(fp1
);
10616 tcg_temp_free_i32(fp0
);
10619 check_cp1_64bitmode(ctx
);
10621 TCGv_i32 fp0
= tcg_temp_new_i32();
10623 gen_load_fpr32(ctx
, fp0
, fs
);
10624 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10625 gen_store_fpr32(ctx
, fp0
, fd
);
10626 tcg_temp_free_i32(fp0
);
10630 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10631 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10633 TCGv_i32 fp0
= tcg_temp_new_i32();
10634 TCGv_i32 fp1
= tcg_temp_new_i32();
10635 gen_load_fpr32(ctx
, fp0
, fs
);
10636 gen_load_fpr32(ctx
, fp1
, ft
);
10637 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10638 gen_store_fpr32(ctx
, fp1
, fd
);
10639 tcg_temp_free_i32(fp1
);
10640 tcg_temp_free_i32(fp0
);
10643 check_cp1_64bitmode(ctx
);
10645 TCGv_i32 fp0
= tcg_temp_new_i32();
10647 gen_load_fpr32(ctx
, fp0
, fs
);
10648 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10649 gen_store_fpr32(ctx
, fp0
, fd
);
10650 tcg_temp_free_i32(fp0
);
10654 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10655 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10657 TCGv_i32 fp0
= tcg_temp_new_i32();
10658 TCGv_i32 fp1
= tcg_temp_new_i32();
10659 gen_load_fpr32(ctx
, fp0
, fs
);
10660 gen_load_fpr32(ctx
, fp1
, ft
);
10661 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10662 gen_store_fpr32(ctx
, fp1
, fd
);
10663 tcg_temp_free_i32(fp1
);
10664 tcg_temp_free_i32(fp0
);
10667 check_cp1_64bitmode(ctx
);
10669 TCGv_i32 fp0
= tcg_temp_new_i32();
10670 TCGv_i32 fp1
= tcg_temp_new_i32();
10672 gen_load_fpr32(ctx
, fp0
, fs
);
10673 gen_load_fpr32(ctx
, fp1
, ft
);
10674 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10675 tcg_temp_free_i32(fp1
);
10676 gen_store_fpr32(ctx
, fp0
, fd
);
10677 tcg_temp_free_i32(fp0
);
10682 check_cp1_registers(ctx
, fd
);
10684 TCGv_i32 fp32
= tcg_temp_new_i32();
10685 TCGv_i64 fp64
= tcg_temp_new_i64();
10687 gen_load_fpr32(ctx
, fp32
, fs
);
10688 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
10689 tcg_temp_free_i32(fp32
);
10690 gen_store_fpr64(ctx
, fp64
, fd
);
10691 tcg_temp_free_i64(fp64
);
10696 TCGv_i32 fp0
= tcg_temp_new_i32();
10698 gen_load_fpr32(ctx
, fp0
, fs
);
10699 if (ctx
->nan2008
) {
10700 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
10702 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
10704 gen_store_fpr32(ctx
, fp0
, fd
);
10705 tcg_temp_free_i32(fp0
);
10709 check_cp1_64bitmode(ctx
);
10711 TCGv_i32 fp32
= tcg_temp_new_i32();
10712 TCGv_i64 fp64
= tcg_temp_new_i64();
10714 gen_load_fpr32(ctx
, fp32
, fs
);
10715 if (ctx
->nan2008
) {
10716 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
10718 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
10720 tcg_temp_free_i32(fp32
);
10721 gen_store_fpr64(ctx
, fp64
, fd
);
10722 tcg_temp_free_i64(fp64
);
10728 TCGv_i64 fp64
= tcg_temp_new_i64();
10729 TCGv_i32 fp32_0
= tcg_temp_new_i32();
10730 TCGv_i32 fp32_1
= tcg_temp_new_i32();
10732 gen_load_fpr32(ctx
, fp32_0
, fs
);
10733 gen_load_fpr32(ctx
, fp32_1
, ft
);
10734 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
10735 tcg_temp_free_i32(fp32_1
);
10736 tcg_temp_free_i32(fp32_0
);
10737 gen_store_fpr64(ctx
, fp64
, fd
);
10738 tcg_temp_free_i64(fp64
);
10744 case OPC_CMP_UEQ_S
:
10745 case OPC_CMP_OLT_S
:
10746 case OPC_CMP_ULT_S
:
10747 case OPC_CMP_OLE_S
:
10748 case OPC_CMP_ULE_S
:
10750 case OPC_CMP_NGLE_S
:
10751 case OPC_CMP_SEQ_S
:
10752 case OPC_CMP_NGL_S
:
10754 case OPC_CMP_NGE_S
:
10756 case OPC_CMP_NGT_S
:
10757 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10758 if (ctx
->opcode
& (1 << 6)) {
10759 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
10761 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
10765 check_cp1_registers(ctx
, fs
| ft
| fd
);
10767 TCGv_i64 fp0
= tcg_temp_new_i64();
10768 TCGv_i64 fp1
= tcg_temp_new_i64();
10770 gen_load_fpr64(ctx
, fp0
, fs
);
10771 gen_load_fpr64(ctx
, fp1
, ft
);
10772 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
10773 tcg_temp_free_i64(fp1
);
10774 gen_store_fpr64(ctx
, fp0
, fd
);
10775 tcg_temp_free_i64(fp0
);
10779 check_cp1_registers(ctx
, fs
| ft
| fd
);
10781 TCGv_i64 fp0
= tcg_temp_new_i64();
10782 TCGv_i64 fp1
= tcg_temp_new_i64();
10784 gen_load_fpr64(ctx
, fp0
, fs
);
10785 gen_load_fpr64(ctx
, fp1
, ft
);
10786 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
10787 tcg_temp_free_i64(fp1
);
10788 gen_store_fpr64(ctx
, fp0
, fd
);
10789 tcg_temp_free_i64(fp0
);
10793 check_cp1_registers(ctx
, fs
| ft
| fd
);
10795 TCGv_i64 fp0
= tcg_temp_new_i64();
10796 TCGv_i64 fp1
= tcg_temp_new_i64();
10798 gen_load_fpr64(ctx
, fp0
, fs
);
10799 gen_load_fpr64(ctx
, fp1
, ft
);
10800 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
10801 tcg_temp_free_i64(fp1
);
10802 gen_store_fpr64(ctx
, fp0
, fd
);
10803 tcg_temp_free_i64(fp0
);
10807 check_cp1_registers(ctx
, fs
| ft
| fd
);
10809 TCGv_i64 fp0
= tcg_temp_new_i64();
10810 TCGv_i64 fp1
= tcg_temp_new_i64();
10812 gen_load_fpr64(ctx
, fp0
, fs
);
10813 gen_load_fpr64(ctx
, fp1
, ft
);
10814 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
10815 tcg_temp_free_i64(fp1
);
10816 gen_store_fpr64(ctx
, fp0
, fd
);
10817 tcg_temp_free_i64(fp0
);
10821 check_cp1_registers(ctx
, fs
| fd
);
10823 TCGv_i64 fp0
= tcg_temp_new_i64();
10825 gen_load_fpr64(ctx
, fp0
, fs
);
10826 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
10827 gen_store_fpr64(ctx
, fp0
, fd
);
10828 tcg_temp_free_i64(fp0
);
10832 check_cp1_registers(ctx
, fs
| fd
);
10834 TCGv_i64 fp0
= tcg_temp_new_i64();
10836 gen_load_fpr64(ctx
, fp0
, fs
);
10837 if (ctx
->abs2008
) {
10838 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
10840 gen_helper_float_abs_d(fp0
, fp0
);
10842 gen_store_fpr64(ctx
, fp0
, fd
);
10843 tcg_temp_free_i64(fp0
);
10847 check_cp1_registers(ctx
, fs
| fd
);
10849 TCGv_i64 fp0
= tcg_temp_new_i64();
10851 gen_load_fpr64(ctx
, fp0
, fs
);
10852 gen_store_fpr64(ctx
, fp0
, fd
);
10853 tcg_temp_free_i64(fp0
);
10857 check_cp1_registers(ctx
, fs
| fd
);
10859 TCGv_i64 fp0
= tcg_temp_new_i64();
10861 gen_load_fpr64(ctx
, fp0
, fs
);
10862 if (ctx
->abs2008
) {
10863 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10865 gen_helper_float_chs_d(fp0
, fp0
);
10867 gen_store_fpr64(ctx
, fp0
, fd
);
10868 tcg_temp_free_i64(fp0
);
10871 case OPC_ROUND_L_D
:
10872 check_cp1_64bitmode(ctx
);
10874 TCGv_i64 fp0
= tcg_temp_new_i64();
10876 gen_load_fpr64(ctx
, fp0
, fs
);
10877 if (ctx
->nan2008
) {
10878 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10880 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10882 gen_store_fpr64(ctx
, fp0
, fd
);
10883 tcg_temp_free_i64(fp0
);
10886 case OPC_TRUNC_L_D
:
10887 check_cp1_64bitmode(ctx
);
10889 TCGv_i64 fp0
= tcg_temp_new_i64();
10891 gen_load_fpr64(ctx
, fp0
, fs
);
10892 if (ctx
->nan2008
) {
10893 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10895 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10897 gen_store_fpr64(ctx
, fp0
, fd
);
10898 tcg_temp_free_i64(fp0
);
10902 check_cp1_64bitmode(ctx
);
10904 TCGv_i64 fp0
= tcg_temp_new_i64();
10906 gen_load_fpr64(ctx
, fp0
, fs
);
10907 if (ctx
->nan2008
) {
10908 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10910 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10912 gen_store_fpr64(ctx
, fp0
, fd
);
10913 tcg_temp_free_i64(fp0
);
10916 case OPC_FLOOR_L_D
:
10917 check_cp1_64bitmode(ctx
);
10919 TCGv_i64 fp0
= tcg_temp_new_i64();
10921 gen_load_fpr64(ctx
, fp0
, fs
);
10922 if (ctx
->nan2008
) {
10923 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10925 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10927 gen_store_fpr64(ctx
, fp0
, fd
);
10928 tcg_temp_free_i64(fp0
);
10931 case OPC_ROUND_W_D
:
10932 check_cp1_registers(ctx
, fs
);
10934 TCGv_i32 fp32
= tcg_temp_new_i32();
10935 TCGv_i64 fp64
= tcg_temp_new_i64();
10937 gen_load_fpr64(ctx
, fp64
, fs
);
10938 if (ctx
->nan2008
) {
10939 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10941 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10943 tcg_temp_free_i64(fp64
);
10944 gen_store_fpr32(ctx
, fp32
, fd
);
10945 tcg_temp_free_i32(fp32
);
10948 case OPC_TRUNC_W_D
:
10949 check_cp1_registers(ctx
, fs
);
10951 TCGv_i32 fp32
= tcg_temp_new_i32();
10952 TCGv_i64 fp64
= tcg_temp_new_i64();
10954 gen_load_fpr64(ctx
, fp64
, fs
);
10955 if (ctx
->nan2008
) {
10956 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10958 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10960 tcg_temp_free_i64(fp64
);
10961 gen_store_fpr32(ctx
, fp32
, fd
);
10962 tcg_temp_free_i32(fp32
);
10966 check_cp1_registers(ctx
, fs
);
10968 TCGv_i32 fp32
= tcg_temp_new_i32();
10969 TCGv_i64 fp64
= tcg_temp_new_i64();
10971 gen_load_fpr64(ctx
, fp64
, fs
);
10972 if (ctx
->nan2008
) {
10973 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10975 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10977 tcg_temp_free_i64(fp64
);
10978 gen_store_fpr32(ctx
, fp32
, fd
);
10979 tcg_temp_free_i32(fp32
);
10982 case OPC_FLOOR_W_D
:
10983 check_cp1_registers(ctx
, fs
);
10985 TCGv_i32 fp32
= tcg_temp_new_i32();
10986 TCGv_i64 fp64
= tcg_temp_new_i64();
10988 gen_load_fpr64(ctx
, fp64
, fs
);
10989 if (ctx
->nan2008
) {
10990 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10992 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10994 tcg_temp_free_i64(fp64
);
10995 gen_store_fpr32(ctx
, fp32
, fd
);
10996 tcg_temp_free_i32(fp32
);
11000 check_insn(ctx
, ISA_MIPS32R6
);
11001 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11004 check_insn(ctx
, ISA_MIPS32R6
);
11005 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11008 check_insn(ctx
, ISA_MIPS32R6
);
11009 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11012 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11013 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11016 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11018 TCGLabel
*l1
= gen_new_label();
11022 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11024 fp0
= tcg_temp_new_i64();
11025 gen_load_fpr64(ctx
, fp0
, fs
);
11026 gen_store_fpr64(ctx
, fp0
, fd
);
11027 tcg_temp_free_i64(fp0
);
11032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11034 TCGLabel
*l1
= gen_new_label();
11038 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11039 fp0
= tcg_temp_new_i64();
11040 gen_load_fpr64(ctx
, fp0
, fs
);
11041 gen_store_fpr64(ctx
, fp0
, fd
);
11042 tcg_temp_free_i64(fp0
);
11048 check_cp1_registers(ctx
, fs
| fd
);
11050 TCGv_i64 fp0
= tcg_temp_new_i64();
11052 gen_load_fpr64(ctx
, fp0
, fs
);
11053 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11054 gen_store_fpr64(ctx
, fp0
, fd
);
11055 tcg_temp_free_i64(fp0
);
11059 check_cp1_registers(ctx
, fs
| fd
);
11061 TCGv_i64 fp0
= tcg_temp_new_i64();
11063 gen_load_fpr64(ctx
, fp0
, fs
);
11064 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11065 gen_store_fpr64(ctx
, fp0
, fd
);
11066 tcg_temp_free_i64(fp0
);
11070 check_insn(ctx
, ISA_MIPS32R6
);
11072 TCGv_i64 fp0
= tcg_temp_new_i64();
11073 TCGv_i64 fp1
= tcg_temp_new_i64();
11074 TCGv_i64 fp2
= tcg_temp_new_i64();
11075 gen_load_fpr64(ctx
, fp0
, fs
);
11076 gen_load_fpr64(ctx
, fp1
, ft
);
11077 gen_load_fpr64(ctx
, fp2
, fd
);
11078 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11079 gen_store_fpr64(ctx
, fp2
, fd
);
11080 tcg_temp_free_i64(fp2
);
11081 tcg_temp_free_i64(fp1
);
11082 tcg_temp_free_i64(fp0
);
11086 check_insn(ctx
, ISA_MIPS32R6
);
11088 TCGv_i64 fp0
= tcg_temp_new_i64();
11089 TCGv_i64 fp1
= tcg_temp_new_i64();
11090 TCGv_i64 fp2
= tcg_temp_new_i64();
11091 gen_load_fpr64(ctx
, fp0
, fs
);
11092 gen_load_fpr64(ctx
, fp1
, ft
);
11093 gen_load_fpr64(ctx
, fp2
, fd
);
11094 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11095 gen_store_fpr64(ctx
, fp2
, fd
);
11096 tcg_temp_free_i64(fp2
);
11097 tcg_temp_free_i64(fp1
);
11098 tcg_temp_free_i64(fp0
);
11102 check_insn(ctx
, ISA_MIPS32R6
);
11104 TCGv_i64 fp0
= tcg_temp_new_i64();
11105 gen_load_fpr64(ctx
, fp0
, fs
);
11106 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11107 gen_store_fpr64(ctx
, fp0
, fd
);
11108 tcg_temp_free_i64(fp0
);
11112 check_insn(ctx
, ISA_MIPS32R6
);
11114 TCGv_i64 fp0
= tcg_temp_new_i64();
11115 gen_load_fpr64(ctx
, fp0
, fs
);
11116 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11117 gen_store_fpr64(ctx
, fp0
, fd
);
11118 tcg_temp_free_i64(fp0
);
11121 case OPC_MIN_D
: /* OPC_RECIP2_D */
11122 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11124 TCGv_i64 fp0
= tcg_temp_new_i64();
11125 TCGv_i64 fp1
= tcg_temp_new_i64();
11126 gen_load_fpr64(ctx
, fp0
, fs
);
11127 gen_load_fpr64(ctx
, fp1
, ft
);
11128 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11129 gen_store_fpr64(ctx
, fp1
, fd
);
11130 tcg_temp_free_i64(fp1
);
11131 tcg_temp_free_i64(fp0
);
11134 check_cp1_64bitmode(ctx
);
11136 TCGv_i64 fp0
= tcg_temp_new_i64();
11137 TCGv_i64 fp1
= tcg_temp_new_i64();
11139 gen_load_fpr64(ctx
, fp0
, fs
);
11140 gen_load_fpr64(ctx
, fp1
, ft
);
11141 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11142 tcg_temp_free_i64(fp1
);
11143 gen_store_fpr64(ctx
, fp0
, fd
);
11144 tcg_temp_free_i64(fp0
);
11148 case OPC_MINA_D
: /* OPC_RECIP1_D */
11149 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11151 TCGv_i64 fp0
= tcg_temp_new_i64();
11152 TCGv_i64 fp1
= tcg_temp_new_i64();
11153 gen_load_fpr64(ctx
, fp0
, fs
);
11154 gen_load_fpr64(ctx
, fp1
, ft
);
11155 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11156 gen_store_fpr64(ctx
, fp1
, fd
);
11157 tcg_temp_free_i64(fp1
);
11158 tcg_temp_free_i64(fp0
);
11161 check_cp1_64bitmode(ctx
);
11163 TCGv_i64 fp0
= tcg_temp_new_i64();
11165 gen_load_fpr64(ctx
, fp0
, fs
);
11166 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11167 gen_store_fpr64(ctx
, fp0
, fd
);
11168 tcg_temp_free_i64(fp0
);
11172 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11173 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11175 TCGv_i64 fp0
= tcg_temp_new_i64();
11176 TCGv_i64 fp1
= tcg_temp_new_i64();
11177 gen_load_fpr64(ctx
, fp0
, fs
);
11178 gen_load_fpr64(ctx
, fp1
, ft
);
11179 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11180 gen_store_fpr64(ctx
, fp1
, fd
);
11181 tcg_temp_free_i64(fp1
);
11182 tcg_temp_free_i64(fp0
);
11185 check_cp1_64bitmode(ctx
);
11187 TCGv_i64 fp0
= tcg_temp_new_i64();
11189 gen_load_fpr64(ctx
, fp0
, fs
);
11190 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11191 gen_store_fpr64(ctx
, fp0
, fd
);
11192 tcg_temp_free_i64(fp0
);
11196 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11197 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11199 TCGv_i64 fp0
= tcg_temp_new_i64();
11200 TCGv_i64 fp1
= tcg_temp_new_i64();
11201 gen_load_fpr64(ctx
, fp0
, fs
);
11202 gen_load_fpr64(ctx
, fp1
, ft
);
11203 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11204 gen_store_fpr64(ctx
, fp1
, fd
);
11205 tcg_temp_free_i64(fp1
);
11206 tcg_temp_free_i64(fp0
);
11209 check_cp1_64bitmode(ctx
);
11211 TCGv_i64 fp0
= tcg_temp_new_i64();
11212 TCGv_i64 fp1
= tcg_temp_new_i64();
11214 gen_load_fpr64(ctx
, fp0
, fs
);
11215 gen_load_fpr64(ctx
, fp1
, ft
);
11216 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11217 tcg_temp_free_i64(fp1
);
11218 gen_store_fpr64(ctx
, fp0
, fd
);
11219 tcg_temp_free_i64(fp0
);
11226 case OPC_CMP_UEQ_D
:
11227 case OPC_CMP_OLT_D
:
11228 case OPC_CMP_ULT_D
:
11229 case OPC_CMP_OLE_D
:
11230 case OPC_CMP_ULE_D
:
11232 case OPC_CMP_NGLE_D
:
11233 case OPC_CMP_SEQ_D
:
11234 case OPC_CMP_NGL_D
:
11236 case OPC_CMP_NGE_D
:
11238 case OPC_CMP_NGT_D
:
11239 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11240 if (ctx
->opcode
& (1 << 6)) {
11241 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11243 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11247 check_cp1_registers(ctx
, fs
);
11249 TCGv_i32 fp32
= tcg_temp_new_i32();
11250 TCGv_i64 fp64
= tcg_temp_new_i64();
11252 gen_load_fpr64(ctx
, fp64
, fs
);
11253 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11254 tcg_temp_free_i64(fp64
);
11255 gen_store_fpr32(ctx
, fp32
, fd
);
11256 tcg_temp_free_i32(fp32
);
11260 check_cp1_registers(ctx
, fs
);
11262 TCGv_i32 fp32
= tcg_temp_new_i32();
11263 TCGv_i64 fp64
= tcg_temp_new_i64();
11265 gen_load_fpr64(ctx
, fp64
, fs
);
11266 if (ctx
->nan2008
) {
11267 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11269 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11271 tcg_temp_free_i64(fp64
);
11272 gen_store_fpr32(ctx
, fp32
, fd
);
11273 tcg_temp_free_i32(fp32
);
11277 check_cp1_64bitmode(ctx
);
11279 TCGv_i64 fp0
= tcg_temp_new_i64();
11281 gen_load_fpr64(ctx
, fp0
, fs
);
11282 if (ctx
->nan2008
) {
11283 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11285 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11287 gen_store_fpr64(ctx
, fp0
, fd
);
11288 tcg_temp_free_i64(fp0
);
11293 TCGv_i32 fp0
= tcg_temp_new_i32();
11295 gen_load_fpr32(ctx
, fp0
, fs
);
11296 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11297 gen_store_fpr32(ctx
, fp0
, fd
);
11298 tcg_temp_free_i32(fp0
);
11302 check_cp1_registers(ctx
, fd
);
11304 TCGv_i32 fp32
= tcg_temp_new_i32();
11305 TCGv_i64 fp64
= tcg_temp_new_i64();
11307 gen_load_fpr32(ctx
, fp32
, fs
);
11308 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11309 tcg_temp_free_i32(fp32
);
11310 gen_store_fpr64(ctx
, fp64
, fd
);
11311 tcg_temp_free_i64(fp64
);
11315 check_cp1_64bitmode(ctx
);
11317 TCGv_i32 fp32
= tcg_temp_new_i32();
11318 TCGv_i64 fp64
= tcg_temp_new_i64();
11320 gen_load_fpr64(ctx
, fp64
, fs
);
11321 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11322 tcg_temp_free_i64(fp64
);
11323 gen_store_fpr32(ctx
, fp32
, fd
);
11324 tcg_temp_free_i32(fp32
);
11328 check_cp1_64bitmode(ctx
);
11330 TCGv_i64 fp0
= tcg_temp_new_i64();
11332 gen_load_fpr64(ctx
, fp0
, fs
);
11333 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11334 gen_store_fpr64(ctx
, fp0
, fd
);
11335 tcg_temp_free_i64(fp0
);
11338 case OPC_CVT_PS_PW
:
11341 TCGv_i64 fp0
= tcg_temp_new_i64();
11343 gen_load_fpr64(ctx
, fp0
, fs
);
11344 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11345 gen_store_fpr64(ctx
, fp0
, fd
);
11346 tcg_temp_free_i64(fp0
);
11352 TCGv_i64 fp0
= tcg_temp_new_i64();
11353 TCGv_i64 fp1
= tcg_temp_new_i64();
11355 gen_load_fpr64(ctx
, fp0
, fs
);
11356 gen_load_fpr64(ctx
, fp1
, ft
);
11357 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11358 tcg_temp_free_i64(fp1
);
11359 gen_store_fpr64(ctx
, fp0
, fd
);
11360 tcg_temp_free_i64(fp0
);
11366 TCGv_i64 fp0
= tcg_temp_new_i64();
11367 TCGv_i64 fp1
= tcg_temp_new_i64();
11369 gen_load_fpr64(ctx
, fp0
, fs
);
11370 gen_load_fpr64(ctx
, fp1
, ft
);
11371 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11372 tcg_temp_free_i64(fp1
);
11373 gen_store_fpr64(ctx
, fp0
, fd
);
11374 tcg_temp_free_i64(fp0
);
11380 TCGv_i64 fp0
= tcg_temp_new_i64();
11381 TCGv_i64 fp1
= tcg_temp_new_i64();
11383 gen_load_fpr64(ctx
, fp0
, fs
);
11384 gen_load_fpr64(ctx
, fp1
, ft
);
11385 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11386 tcg_temp_free_i64(fp1
);
11387 gen_store_fpr64(ctx
, fp0
, fd
);
11388 tcg_temp_free_i64(fp0
);
11394 TCGv_i64 fp0
= tcg_temp_new_i64();
11396 gen_load_fpr64(ctx
, fp0
, fs
);
11397 gen_helper_float_abs_ps(fp0
, fp0
);
11398 gen_store_fpr64(ctx
, fp0
, fd
);
11399 tcg_temp_free_i64(fp0
);
11405 TCGv_i64 fp0
= tcg_temp_new_i64();
11407 gen_load_fpr64(ctx
, fp0
, fs
);
11408 gen_store_fpr64(ctx
, fp0
, fd
);
11409 tcg_temp_free_i64(fp0
);
11415 TCGv_i64 fp0
= tcg_temp_new_i64();
11417 gen_load_fpr64(ctx
, fp0
, fs
);
11418 gen_helper_float_chs_ps(fp0
, fp0
);
11419 gen_store_fpr64(ctx
, fp0
, fd
);
11420 tcg_temp_free_i64(fp0
);
11425 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11430 TCGLabel
*l1
= gen_new_label();
11434 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11435 fp0
= tcg_temp_new_i64();
11436 gen_load_fpr64(ctx
, fp0
, fs
);
11437 gen_store_fpr64(ctx
, fp0
, fd
);
11438 tcg_temp_free_i64(fp0
);
11445 TCGLabel
*l1
= gen_new_label();
11449 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11450 fp0
= tcg_temp_new_i64();
11451 gen_load_fpr64(ctx
, fp0
, fs
);
11452 gen_store_fpr64(ctx
, fp0
, fd
);
11453 tcg_temp_free_i64(fp0
);
11461 TCGv_i64 fp0
= tcg_temp_new_i64();
11462 TCGv_i64 fp1
= tcg_temp_new_i64();
11464 gen_load_fpr64(ctx
, fp0
, ft
);
11465 gen_load_fpr64(ctx
, fp1
, fs
);
11466 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11467 tcg_temp_free_i64(fp1
);
11468 gen_store_fpr64(ctx
, fp0
, fd
);
11469 tcg_temp_free_i64(fp0
);
11475 TCGv_i64 fp0
= tcg_temp_new_i64();
11476 TCGv_i64 fp1
= tcg_temp_new_i64();
11478 gen_load_fpr64(ctx
, fp0
, ft
);
11479 gen_load_fpr64(ctx
, fp1
, fs
);
11480 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11481 tcg_temp_free_i64(fp1
);
11482 gen_store_fpr64(ctx
, fp0
, fd
);
11483 tcg_temp_free_i64(fp0
);
11486 case OPC_RECIP2_PS
:
11489 TCGv_i64 fp0
= tcg_temp_new_i64();
11490 TCGv_i64 fp1
= tcg_temp_new_i64();
11492 gen_load_fpr64(ctx
, fp0
, fs
);
11493 gen_load_fpr64(ctx
, fp1
, ft
);
11494 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11495 tcg_temp_free_i64(fp1
);
11496 gen_store_fpr64(ctx
, fp0
, fd
);
11497 tcg_temp_free_i64(fp0
);
11500 case OPC_RECIP1_PS
:
11503 TCGv_i64 fp0
= tcg_temp_new_i64();
11505 gen_load_fpr64(ctx
, fp0
, fs
);
11506 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11507 gen_store_fpr64(ctx
, fp0
, fd
);
11508 tcg_temp_free_i64(fp0
);
11511 case OPC_RSQRT1_PS
:
11514 TCGv_i64 fp0
= tcg_temp_new_i64();
11516 gen_load_fpr64(ctx
, fp0
, fs
);
11517 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11518 gen_store_fpr64(ctx
, fp0
, fd
);
11519 tcg_temp_free_i64(fp0
);
11522 case OPC_RSQRT2_PS
:
11525 TCGv_i64 fp0
= tcg_temp_new_i64();
11526 TCGv_i64 fp1
= tcg_temp_new_i64();
11528 gen_load_fpr64(ctx
, fp0
, fs
);
11529 gen_load_fpr64(ctx
, fp1
, ft
);
11530 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11531 tcg_temp_free_i64(fp1
);
11532 gen_store_fpr64(ctx
, fp0
, fd
);
11533 tcg_temp_free_i64(fp0
);
11537 check_cp1_64bitmode(ctx
);
11539 TCGv_i32 fp0
= tcg_temp_new_i32();
11541 gen_load_fpr32h(ctx
, fp0
, fs
);
11542 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11543 gen_store_fpr32(ctx
, fp0
, fd
);
11544 tcg_temp_free_i32(fp0
);
11547 case OPC_CVT_PW_PS
:
11550 TCGv_i64 fp0
= tcg_temp_new_i64();
11552 gen_load_fpr64(ctx
, fp0
, fs
);
11553 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11554 gen_store_fpr64(ctx
, fp0
, fd
);
11555 tcg_temp_free_i64(fp0
);
11559 check_cp1_64bitmode(ctx
);
11561 TCGv_i32 fp0
= tcg_temp_new_i32();
11563 gen_load_fpr32(ctx
, fp0
, fs
);
11564 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11565 gen_store_fpr32(ctx
, fp0
, fd
);
11566 tcg_temp_free_i32(fp0
);
11572 TCGv_i32 fp0
= tcg_temp_new_i32();
11573 TCGv_i32 fp1
= tcg_temp_new_i32();
11575 gen_load_fpr32(ctx
, fp0
, fs
);
11576 gen_load_fpr32(ctx
, fp1
, ft
);
11577 gen_store_fpr32h(ctx
, fp0
, fd
);
11578 gen_store_fpr32(ctx
, fp1
, fd
);
11579 tcg_temp_free_i32(fp0
);
11580 tcg_temp_free_i32(fp1
);
11586 TCGv_i32 fp0
= tcg_temp_new_i32();
11587 TCGv_i32 fp1
= tcg_temp_new_i32();
11589 gen_load_fpr32(ctx
, fp0
, fs
);
11590 gen_load_fpr32h(ctx
, fp1
, ft
);
11591 gen_store_fpr32(ctx
, fp1
, fd
);
11592 gen_store_fpr32h(ctx
, fp0
, fd
);
11593 tcg_temp_free_i32(fp0
);
11594 tcg_temp_free_i32(fp1
);
11600 TCGv_i32 fp0
= tcg_temp_new_i32();
11601 TCGv_i32 fp1
= tcg_temp_new_i32();
11603 gen_load_fpr32h(ctx
, fp0
, fs
);
11604 gen_load_fpr32(ctx
, fp1
, ft
);
11605 gen_store_fpr32(ctx
, fp1
, fd
);
11606 gen_store_fpr32h(ctx
, fp0
, fd
);
11607 tcg_temp_free_i32(fp0
);
11608 tcg_temp_free_i32(fp1
);
11614 TCGv_i32 fp0
= tcg_temp_new_i32();
11615 TCGv_i32 fp1
= tcg_temp_new_i32();
11617 gen_load_fpr32h(ctx
, fp0
, fs
);
11618 gen_load_fpr32h(ctx
, fp1
, ft
);
11619 gen_store_fpr32(ctx
, fp1
, fd
);
11620 gen_store_fpr32h(ctx
, fp0
, fd
);
11621 tcg_temp_free_i32(fp0
);
11622 tcg_temp_free_i32(fp1
);
11626 case OPC_CMP_UN_PS
:
11627 case OPC_CMP_EQ_PS
:
11628 case OPC_CMP_UEQ_PS
:
11629 case OPC_CMP_OLT_PS
:
11630 case OPC_CMP_ULT_PS
:
11631 case OPC_CMP_OLE_PS
:
11632 case OPC_CMP_ULE_PS
:
11633 case OPC_CMP_SF_PS
:
11634 case OPC_CMP_NGLE_PS
:
11635 case OPC_CMP_SEQ_PS
:
11636 case OPC_CMP_NGL_PS
:
11637 case OPC_CMP_LT_PS
:
11638 case OPC_CMP_NGE_PS
:
11639 case OPC_CMP_LE_PS
:
11640 case OPC_CMP_NGT_PS
:
11641 if (ctx
->opcode
& (1 << 6)) {
11642 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
11644 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
11648 MIPS_INVAL("farith");
11649 generate_exception_end(ctx
, EXCP_RI
);
11654 /* Coprocessor 3 (FPU) */
11655 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
11656 int fd
, int fs
, int base
, int index
)
11658 TCGv t0
= tcg_temp_new();
11661 gen_load_gpr(t0
, index
);
11662 } else if (index
== 0) {
11663 gen_load_gpr(t0
, base
);
11665 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11667 /* Don't do NOP if destination is zero: we must perform the actual
11673 TCGv_i32 fp0
= tcg_temp_new_i32();
11675 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11676 tcg_gen_trunc_tl_i32(fp0
, t0
);
11677 gen_store_fpr32(ctx
, fp0
, fd
);
11678 tcg_temp_free_i32(fp0
);
11683 check_cp1_registers(ctx
, fd
);
11685 TCGv_i64 fp0
= tcg_temp_new_i64();
11686 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
11687 gen_store_fpr64(ctx
, fp0
, fd
);
11688 tcg_temp_free_i64(fp0
);
11692 check_cp1_64bitmode(ctx
);
11693 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11695 TCGv_i64 fp0
= tcg_temp_new_i64();
11697 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
11698 gen_store_fpr64(ctx
, fp0
, fd
);
11699 tcg_temp_free_i64(fp0
);
11705 TCGv_i32 fp0
= tcg_temp_new_i32();
11706 gen_load_fpr32(ctx
, fp0
, fs
);
11707 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
11708 tcg_temp_free_i32(fp0
);
11713 check_cp1_registers(ctx
, fs
);
11715 TCGv_i64 fp0
= tcg_temp_new_i64();
11716 gen_load_fpr64(ctx
, fp0
, fs
);
11717 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
11718 tcg_temp_free_i64(fp0
);
11722 check_cp1_64bitmode(ctx
);
11723 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11725 TCGv_i64 fp0
= tcg_temp_new_i64();
11726 gen_load_fpr64(ctx
, fp0
, fs
);
11727 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
11728 tcg_temp_free_i64(fp0
);
11735 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
11736 int fd
, int fr
, int fs
, int ft
)
11742 TCGv t0
= tcg_temp_local_new();
11743 TCGv_i32 fp
= tcg_temp_new_i32();
11744 TCGv_i32 fph
= tcg_temp_new_i32();
11745 TCGLabel
*l1
= gen_new_label();
11746 TCGLabel
*l2
= gen_new_label();
11748 gen_load_gpr(t0
, fr
);
11749 tcg_gen_andi_tl(t0
, t0
, 0x7);
11751 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
11752 gen_load_fpr32(ctx
, fp
, fs
);
11753 gen_load_fpr32h(ctx
, fph
, fs
);
11754 gen_store_fpr32(ctx
, fp
, fd
);
11755 gen_store_fpr32h(ctx
, fph
, fd
);
11758 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
11760 #ifdef TARGET_WORDS_BIGENDIAN
11761 gen_load_fpr32(ctx
, fp
, fs
);
11762 gen_load_fpr32h(ctx
, fph
, ft
);
11763 gen_store_fpr32h(ctx
, fp
, fd
);
11764 gen_store_fpr32(ctx
, fph
, fd
);
11766 gen_load_fpr32h(ctx
, fph
, fs
);
11767 gen_load_fpr32(ctx
, fp
, ft
);
11768 gen_store_fpr32(ctx
, fph
, fd
);
11769 gen_store_fpr32h(ctx
, fp
, fd
);
11772 tcg_temp_free_i32(fp
);
11773 tcg_temp_free_i32(fph
);
11779 TCGv_i32 fp0
= tcg_temp_new_i32();
11780 TCGv_i32 fp1
= tcg_temp_new_i32();
11781 TCGv_i32 fp2
= tcg_temp_new_i32();
11783 gen_load_fpr32(ctx
, fp0
, fs
);
11784 gen_load_fpr32(ctx
, fp1
, ft
);
11785 gen_load_fpr32(ctx
, fp2
, fr
);
11786 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11787 tcg_temp_free_i32(fp0
);
11788 tcg_temp_free_i32(fp1
);
11789 gen_store_fpr32(ctx
, fp2
, fd
);
11790 tcg_temp_free_i32(fp2
);
11795 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11797 TCGv_i64 fp0
= tcg_temp_new_i64();
11798 TCGv_i64 fp1
= tcg_temp_new_i64();
11799 TCGv_i64 fp2
= tcg_temp_new_i64();
11801 gen_load_fpr64(ctx
, fp0
, fs
);
11802 gen_load_fpr64(ctx
, fp1
, ft
);
11803 gen_load_fpr64(ctx
, fp2
, fr
);
11804 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11805 tcg_temp_free_i64(fp0
);
11806 tcg_temp_free_i64(fp1
);
11807 gen_store_fpr64(ctx
, fp2
, fd
);
11808 tcg_temp_free_i64(fp2
);
11814 TCGv_i64 fp0
= tcg_temp_new_i64();
11815 TCGv_i64 fp1
= tcg_temp_new_i64();
11816 TCGv_i64 fp2
= tcg_temp_new_i64();
11818 gen_load_fpr64(ctx
, fp0
, fs
);
11819 gen_load_fpr64(ctx
, fp1
, ft
);
11820 gen_load_fpr64(ctx
, fp2
, fr
);
11821 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11822 tcg_temp_free_i64(fp0
);
11823 tcg_temp_free_i64(fp1
);
11824 gen_store_fpr64(ctx
, fp2
, fd
);
11825 tcg_temp_free_i64(fp2
);
11831 TCGv_i32 fp0
= tcg_temp_new_i32();
11832 TCGv_i32 fp1
= tcg_temp_new_i32();
11833 TCGv_i32 fp2
= tcg_temp_new_i32();
11835 gen_load_fpr32(ctx
, fp0
, fs
);
11836 gen_load_fpr32(ctx
, fp1
, ft
);
11837 gen_load_fpr32(ctx
, fp2
, fr
);
11838 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11839 tcg_temp_free_i32(fp0
);
11840 tcg_temp_free_i32(fp1
);
11841 gen_store_fpr32(ctx
, fp2
, fd
);
11842 tcg_temp_free_i32(fp2
);
11847 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11849 TCGv_i64 fp0
= tcg_temp_new_i64();
11850 TCGv_i64 fp1
= tcg_temp_new_i64();
11851 TCGv_i64 fp2
= tcg_temp_new_i64();
11853 gen_load_fpr64(ctx
, fp0
, fs
);
11854 gen_load_fpr64(ctx
, fp1
, ft
);
11855 gen_load_fpr64(ctx
, fp2
, fr
);
11856 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11857 tcg_temp_free_i64(fp0
);
11858 tcg_temp_free_i64(fp1
);
11859 gen_store_fpr64(ctx
, fp2
, fd
);
11860 tcg_temp_free_i64(fp2
);
11866 TCGv_i64 fp0
= tcg_temp_new_i64();
11867 TCGv_i64 fp1
= tcg_temp_new_i64();
11868 TCGv_i64 fp2
= tcg_temp_new_i64();
11870 gen_load_fpr64(ctx
, fp0
, fs
);
11871 gen_load_fpr64(ctx
, fp1
, ft
);
11872 gen_load_fpr64(ctx
, fp2
, fr
);
11873 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11874 tcg_temp_free_i64(fp0
);
11875 tcg_temp_free_i64(fp1
);
11876 gen_store_fpr64(ctx
, fp2
, fd
);
11877 tcg_temp_free_i64(fp2
);
11883 TCGv_i32 fp0
= tcg_temp_new_i32();
11884 TCGv_i32 fp1
= tcg_temp_new_i32();
11885 TCGv_i32 fp2
= tcg_temp_new_i32();
11887 gen_load_fpr32(ctx
, fp0
, fs
);
11888 gen_load_fpr32(ctx
, fp1
, ft
);
11889 gen_load_fpr32(ctx
, fp2
, fr
);
11890 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11891 tcg_temp_free_i32(fp0
);
11892 tcg_temp_free_i32(fp1
);
11893 gen_store_fpr32(ctx
, fp2
, fd
);
11894 tcg_temp_free_i32(fp2
);
11899 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11901 TCGv_i64 fp0
= tcg_temp_new_i64();
11902 TCGv_i64 fp1
= tcg_temp_new_i64();
11903 TCGv_i64 fp2
= tcg_temp_new_i64();
11905 gen_load_fpr64(ctx
, fp0
, fs
);
11906 gen_load_fpr64(ctx
, fp1
, ft
);
11907 gen_load_fpr64(ctx
, fp2
, fr
);
11908 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11909 tcg_temp_free_i64(fp0
);
11910 tcg_temp_free_i64(fp1
);
11911 gen_store_fpr64(ctx
, fp2
, fd
);
11912 tcg_temp_free_i64(fp2
);
11918 TCGv_i64 fp0
= tcg_temp_new_i64();
11919 TCGv_i64 fp1
= tcg_temp_new_i64();
11920 TCGv_i64 fp2
= tcg_temp_new_i64();
11922 gen_load_fpr64(ctx
, fp0
, fs
);
11923 gen_load_fpr64(ctx
, fp1
, ft
);
11924 gen_load_fpr64(ctx
, fp2
, fr
);
11925 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11926 tcg_temp_free_i64(fp0
);
11927 tcg_temp_free_i64(fp1
);
11928 gen_store_fpr64(ctx
, fp2
, fd
);
11929 tcg_temp_free_i64(fp2
);
11935 TCGv_i32 fp0
= tcg_temp_new_i32();
11936 TCGv_i32 fp1
= tcg_temp_new_i32();
11937 TCGv_i32 fp2
= tcg_temp_new_i32();
11939 gen_load_fpr32(ctx
, fp0
, fs
);
11940 gen_load_fpr32(ctx
, fp1
, ft
);
11941 gen_load_fpr32(ctx
, fp2
, fr
);
11942 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11943 tcg_temp_free_i32(fp0
);
11944 tcg_temp_free_i32(fp1
);
11945 gen_store_fpr32(ctx
, fp2
, fd
);
11946 tcg_temp_free_i32(fp2
);
11951 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11953 TCGv_i64 fp0
= tcg_temp_new_i64();
11954 TCGv_i64 fp1
= tcg_temp_new_i64();
11955 TCGv_i64 fp2
= tcg_temp_new_i64();
11957 gen_load_fpr64(ctx
, fp0
, fs
);
11958 gen_load_fpr64(ctx
, fp1
, ft
);
11959 gen_load_fpr64(ctx
, fp2
, fr
);
11960 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11961 tcg_temp_free_i64(fp0
);
11962 tcg_temp_free_i64(fp1
);
11963 gen_store_fpr64(ctx
, fp2
, fd
);
11964 tcg_temp_free_i64(fp2
);
11970 TCGv_i64 fp0
= tcg_temp_new_i64();
11971 TCGv_i64 fp1
= tcg_temp_new_i64();
11972 TCGv_i64 fp2
= tcg_temp_new_i64();
11974 gen_load_fpr64(ctx
, fp0
, fs
);
11975 gen_load_fpr64(ctx
, fp1
, ft
);
11976 gen_load_fpr64(ctx
, fp2
, fr
);
11977 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11978 tcg_temp_free_i64(fp0
);
11979 tcg_temp_free_i64(fp1
);
11980 gen_store_fpr64(ctx
, fp2
, fd
);
11981 tcg_temp_free_i64(fp2
);
11985 MIPS_INVAL("flt3_arith");
11986 generate_exception_end(ctx
, EXCP_RI
);
11991 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11995 #if !defined(CONFIG_USER_ONLY)
11996 /* The Linux kernel will emulate rdhwr if it's not supported natively.
11997 Therefore only check the ISA in system mode. */
11998 check_insn(ctx
, ISA_MIPS32R2
);
12000 t0
= tcg_temp_new();
12004 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12005 gen_store_gpr(t0
, rt
);
12008 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12009 gen_store_gpr(t0
, rt
);
12012 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12015 gen_helper_rdhwr_cc(t0
, cpu_env
);
12016 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12019 gen_store_gpr(t0
, rt
);
12020 /* Break the TB to be able to take timer interrupts immediately
12021 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12022 we break completely out of translated code. */
12023 gen_save_pc(ctx
->base
.pc_next
+ 4);
12024 ctx
->base
.is_jmp
= DISAS_EXIT
;
12027 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12028 gen_store_gpr(t0
, rt
);
12031 check_insn(ctx
, ISA_MIPS32R6
);
12033 /* Performance counter registers are not implemented other than
12034 * control register 0.
12036 generate_exception(ctx
, EXCP_RI
);
12038 gen_helper_rdhwr_performance(t0
, cpu_env
);
12039 gen_store_gpr(t0
, rt
);
12042 check_insn(ctx
, ISA_MIPS32R6
);
12043 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12044 gen_store_gpr(t0
, rt
);
12047 #if defined(CONFIG_USER_ONLY)
12048 tcg_gen_ld_tl(t0
, cpu_env
,
12049 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12050 gen_store_gpr(t0
, rt
);
12053 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12054 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12055 tcg_gen_ld_tl(t0
, cpu_env
,
12056 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12057 gen_store_gpr(t0
, rt
);
12059 generate_exception_end(ctx
, EXCP_RI
);
12063 default: /* Invalid */
12064 MIPS_INVAL("rdhwr");
12065 generate_exception_end(ctx
, EXCP_RI
);
12071 static inline void clear_branch_hflags(DisasContext
*ctx
)
12073 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12074 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12075 save_cpu_state(ctx
, 0);
12077 /* it is not safe to save ctx->hflags as hflags may be changed
12078 in execution time by the instruction in delay / forbidden slot. */
12079 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12083 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12085 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12086 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12087 /* Branches completion */
12088 clear_branch_hflags(ctx
);
12089 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12090 /* FIXME: Need to clear can_do_io. */
12091 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12092 case MIPS_HFLAG_FBNSLOT
:
12093 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12096 /* unconditional branch */
12097 if (proc_hflags
& MIPS_HFLAG_BX
) {
12098 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12100 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12102 case MIPS_HFLAG_BL
:
12103 /* blikely taken case */
12104 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12106 case MIPS_HFLAG_BC
:
12107 /* Conditional branch */
12109 TCGLabel
*l1
= gen_new_label();
12111 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12112 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12114 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12117 case MIPS_HFLAG_BR
:
12118 /* unconditional branch to register */
12119 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12120 TCGv t0
= tcg_temp_new();
12121 TCGv_i32 t1
= tcg_temp_new_i32();
12123 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12124 tcg_gen_trunc_tl_i32(t1
, t0
);
12126 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12127 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12128 tcg_gen_or_i32(hflags
, hflags
, t1
);
12129 tcg_temp_free_i32(t1
);
12131 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12133 tcg_gen_mov_tl(cpu_PC
, btarget
);
12135 if (ctx
->base
.singlestep_enabled
) {
12136 save_cpu_state(ctx
, 0);
12137 gen_helper_raise_exception_debug(cpu_env
);
12139 tcg_gen_lookup_and_goto_ptr();
12142 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12148 /* Compact Branches */
12149 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12150 int rs
, int rt
, int32_t offset
)
12152 int bcond_compute
= 0;
12153 TCGv t0
= tcg_temp_new();
12154 TCGv t1
= tcg_temp_new();
12155 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12157 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12158 #ifdef MIPS_DEBUG_DISAS
12159 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12160 "\n", ctx
->base
.pc_next
);
12162 generate_exception_end(ctx
, EXCP_RI
);
12166 /* Load needed operands and calculate btarget */
12168 /* compact branch */
12169 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12170 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12171 gen_load_gpr(t0
, rs
);
12172 gen_load_gpr(t1
, rt
);
12174 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12175 if (rs
<= rt
&& rs
== 0) {
12176 /* OPC_BEQZALC, OPC_BNEZALC */
12177 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12180 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12181 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12182 gen_load_gpr(t0
, rs
);
12183 gen_load_gpr(t1
, rt
);
12185 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12187 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12188 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12189 if (rs
== 0 || rs
== rt
) {
12190 /* OPC_BLEZALC, OPC_BGEZALC */
12191 /* OPC_BGTZALC, OPC_BLTZALC */
12192 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12194 gen_load_gpr(t0
, rs
);
12195 gen_load_gpr(t1
, rt
);
12197 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12201 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12206 /* OPC_BEQZC, OPC_BNEZC */
12207 gen_load_gpr(t0
, rs
);
12209 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12211 /* OPC_JIC, OPC_JIALC */
12212 TCGv tbase
= tcg_temp_new();
12213 TCGv toffset
= tcg_temp_new();
12215 gen_load_gpr(tbase
, rt
);
12216 tcg_gen_movi_tl(toffset
, offset
);
12217 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12218 tcg_temp_free(tbase
);
12219 tcg_temp_free(toffset
);
12223 MIPS_INVAL("Compact branch/jump");
12224 generate_exception_end(ctx
, EXCP_RI
);
12228 if (bcond_compute
== 0) {
12229 /* Uncoditional compact branch */
12232 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12235 ctx
->hflags
|= MIPS_HFLAG_BR
;
12238 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12241 ctx
->hflags
|= MIPS_HFLAG_B
;
12244 MIPS_INVAL("Compact branch/jump");
12245 generate_exception_end(ctx
, EXCP_RI
);
12249 /* Generating branch here as compact branches don't have delay slot */
12250 gen_branch(ctx
, 4);
12252 /* Conditional compact branch */
12253 TCGLabel
*fs
= gen_new_label();
12254 save_cpu_state(ctx
, 0);
12257 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12258 if (rs
== 0 && rt
!= 0) {
12260 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12261 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12263 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12266 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12269 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12270 if (rs
== 0 && rt
!= 0) {
12272 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12273 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12275 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12278 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12281 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12282 if (rs
== 0 && rt
!= 0) {
12284 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12285 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12287 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12290 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12293 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12294 if (rs
== 0 && rt
!= 0) {
12296 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12297 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12299 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12302 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12305 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12306 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12308 /* OPC_BOVC, OPC_BNVC */
12309 TCGv t2
= tcg_temp_new();
12310 TCGv t3
= tcg_temp_new();
12311 TCGv t4
= tcg_temp_new();
12312 TCGv input_overflow
= tcg_temp_new();
12314 gen_load_gpr(t0
, rs
);
12315 gen_load_gpr(t1
, rt
);
12316 tcg_gen_ext32s_tl(t2
, t0
);
12317 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12318 tcg_gen_ext32s_tl(t3
, t1
);
12319 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12320 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12322 tcg_gen_add_tl(t4
, t2
, t3
);
12323 tcg_gen_ext32s_tl(t4
, t4
);
12324 tcg_gen_xor_tl(t2
, t2
, t3
);
12325 tcg_gen_xor_tl(t3
, t4
, t3
);
12326 tcg_gen_andc_tl(t2
, t3
, t2
);
12327 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12328 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12329 if (opc
== OPC_BOVC
) {
12331 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12334 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12336 tcg_temp_free(input_overflow
);
12340 } else if (rs
< rt
&& rs
== 0) {
12341 /* OPC_BEQZALC, OPC_BNEZALC */
12342 if (opc
== OPC_BEQZALC
) {
12344 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12347 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12350 /* OPC_BEQC, OPC_BNEC */
12351 if (opc
== OPC_BEQC
) {
12353 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12356 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12361 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12364 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12367 MIPS_INVAL("Compact conditional branch/jump");
12368 generate_exception_end(ctx
, EXCP_RI
);
12372 /* Generating branch here as compact branches don't have delay slot */
12373 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12376 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12384 /* ISA extensions (ASEs) */
12385 /* MIPS16 extension to MIPS32 */
12387 /* MIPS16 major opcodes */
12389 M16_OPC_ADDIUSP
= 0x00,
12390 M16_OPC_ADDIUPC
= 0x01,
12392 M16_OPC_JAL
= 0x03,
12393 M16_OPC_BEQZ
= 0x04,
12394 M16_OPC_BNEQZ
= 0x05,
12395 M16_OPC_SHIFT
= 0x06,
12397 M16_OPC_RRIA
= 0x08,
12398 M16_OPC_ADDIU8
= 0x09,
12399 M16_OPC_SLTI
= 0x0a,
12400 M16_OPC_SLTIU
= 0x0b,
12403 M16_OPC_CMPI
= 0x0e,
12407 M16_OPC_LWSP
= 0x12,
12409 M16_OPC_LBU
= 0x14,
12410 M16_OPC_LHU
= 0x15,
12411 M16_OPC_LWPC
= 0x16,
12412 M16_OPC_LWU
= 0x17,
12415 M16_OPC_SWSP
= 0x1a,
12417 M16_OPC_RRR
= 0x1c,
12419 M16_OPC_EXTEND
= 0x1e,
12423 /* I8 funct field */
12442 /* RR funct field */
12476 /* I64 funct field */
12484 I64_DADDIUPC
= 0x6,
12488 /* RR ry field for CNVT */
12490 RR_RY_CNVT_ZEB
= 0x0,
12491 RR_RY_CNVT_ZEH
= 0x1,
12492 RR_RY_CNVT_ZEW
= 0x2,
12493 RR_RY_CNVT_SEB
= 0x4,
12494 RR_RY_CNVT_SEH
= 0x5,
12495 RR_RY_CNVT_SEW
= 0x6,
12498 static int xlat (int r
)
12500 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12505 static void gen_mips16_save (DisasContext
*ctx
,
12506 int xsregs
, int aregs
,
12507 int do_ra
, int do_s0
, int do_s1
,
12510 TCGv t0
= tcg_temp_new();
12511 TCGv t1
= tcg_temp_new();
12512 TCGv t2
= tcg_temp_new();
12542 generate_exception_end(ctx
, EXCP_RI
);
12548 gen_base_offset_addr(ctx
, t0
, 29, 12);
12549 gen_load_gpr(t1
, 7);
12550 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12553 gen_base_offset_addr(ctx
, t0
, 29, 8);
12554 gen_load_gpr(t1
, 6);
12555 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12558 gen_base_offset_addr(ctx
, t0
, 29, 4);
12559 gen_load_gpr(t1
, 5);
12560 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12563 gen_base_offset_addr(ctx
, t0
, 29, 0);
12564 gen_load_gpr(t1
, 4);
12565 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12568 gen_load_gpr(t0
, 29);
12570 #define DECR_AND_STORE(reg) do { \
12571 tcg_gen_movi_tl(t2, -4); \
12572 gen_op_addr_add(ctx, t0, t0, t2); \
12573 gen_load_gpr(t1, reg); \
12574 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
12578 DECR_AND_STORE(31);
12583 DECR_AND_STORE(30);
12586 DECR_AND_STORE(23);
12589 DECR_AND_STORE(22);
12592 DECR_AND_STORE(21);
12595 DECR_AND_STORE(20);
12598 DECR_AND_STORE(19);
12601 DECR_AND_STORE(18);
12605 DECR_AND_STORE(17);
12608 DECR_AND_STORE(16);
12638 generate_exception_end(ctx
, EXCP_RI
);
12654 #undef DECR_AND_STORE
12656 tcg_gen_movi_tl(t2
, -framesize
);
12657 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
12663 static void gen_mips16_restore (DisasContext
*ctx
,
12664 int xsregs
, int aregs
,
12665 int do_ra
, int do_s0
, int do_s1
,
12669 TCGv t0
= tcg_temp_new();
12670 TCGv t1
= tcg_temp_new();
12671 TCGv t2
= tcg_temp_new();
12673 tcg_gen_movi_tl(t2
, framesize
);
12674 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
12676 #define DECR_AND_LOAD(reg) do { \
12677 tcg_gen_movi_tl(t2, -4); \
12678 gen_op_addr_add(ctx, t0, t0, t2); \
12679 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
12680 gen_store_gpr(t1, reg); \
12744 generate_exception_end(ctx
, EXCP_RI
);
12760 #undef DECR_AND_LOAD
12762 tcg_gen_movi_tl(t2
, framesize
);
12763 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
12769 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
12770 int is_64_bit
, int extended
)
12774 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
12775 generate_exception_end(ctx
, EXCP_RI
);
12779 t0
= tcg_temp_new();
12781 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
12782 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
12784 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12790 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
12793 TCGv_i32 t0
= tcg_const_i32(op
);
12794 TCGv t1
= tcg_temp_new();
12795 gen_base_offset_addr(ctx
, t1
, base
, offset
);
12796 gen_helper_cache(cpu_env
, t1
, t0
);
12799 #if defined(TARGET_MIPS64)
12800 static void decode_i64_mips16 (DisasContext
*ctx
,
12801 int ry
, int funct
, int16_t offset
,
12806 check_insn(ctx
, ISA_MIPS3
);
12807 check_mips_64(ctx
);
12808 offset
= extended
? offset
: offset
<< 3;
12809 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
12812 check_insn(ctx
, ISA_MIPS3
);
12813 check_mips_64(ctx
);
12814 offset
= extended
? offset
: offset
<< 3;
12815 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
12818 check_insn(ctx
, ISA_MIPS3
);
12819 check_mips_64(ctx
);
12820 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
12821 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
12824 check_insn(ctx
, ISA_MIPS3
);
12825 check_mips_64(ctx
);
12826 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
12827 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
12830 check_insn(ctx
, ISA_MIPS3
);
12831 check_mips_64(ctx
);
12832 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
12833 generate_exception_end(ctx
, EXCP_RI
);
12835 offset
= extended
? offset
: offset
<< 3;
12836 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
12840 check_insn(ctx
, ISA_MIPS3
);
12841 check_mips_64(ctx
);
12842 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
12843 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
12846 check_insn(ctx
, ISA_MIPS3
);
12847 check_mips_64(ctx
);
12848 offset
= extended
? offset
: offset
<< 2;
12849 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
12852 check_insn(ctx
, ISA_MIPS3
);
12853 check_mips_64(ctx
);
12854 offset
= extended
? offset
: offset
<< 2;
12855 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
12861 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12863 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
12864 int op
, rx
, ry
, funct
, sa
;
12865 int16_t imm
, offset
;
12867 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
12868 op
= (ctx
->opcode
>> 11) & 0x1f;
12869 sa
= (ctx
->opcode
>> 22) & 0x1f;
12870 funct
= (ctx
->opcode
>> 8) & 0x7;
12871 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
12872 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
12873 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
12874 | ((ctx
->opcode
>> 21) & 0x3f) << 5
12875 | (ctx
->opcode
& 0x1f));
12877 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
12880 case M16_OPC_ADDIUSP
:
12881 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
12883 case M16_OPC_ADDIUPC
:
12884 gen_addiupc(ctx
, rx
, imm
, 0, 1);
12887 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
12888 /* No delay slot, so just process as a normal instruction */
12891 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
12892 /* No delay slot, so just process as a normal instruction */
12894 case M16_OPC_BNEQZ
:
12895 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
12896 /* No delay slot, so just process as a normal instruction */
12898 case M16_OPC_SHIFT
:
12899 switch (ctx
->opcode
& 0x3) {
12901 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
12904 #if defined(TARGET_MIPS64)
12905 check_mips_64(ctx
);
12906 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
12908 generate_exception_end(ctx
, EXCP_RI
);
12912 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
12915 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
12919 #if defined(TARGET_MIPS64)
12921 check_insn(ctx
, ISA_MIPS3
);
12922 check_mips_64(ctx
);
12923 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
12927 imm
= ctx
->opcode
& 0xf;
12928 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
12929 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
12930 imm
= (int16_t) (imm
<< 1) >> 1;
12931 if ((ctx
->opcode
>> 4) & 0x1) {
12932 #if defined(TARGET_MIPS64)
12933 check_mips_64(ctx
);
12934 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12936 generate_exception_end(ctx
, EXCP_RI
);
12939 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12942 case M16_OPC_ADDIU8
:
12943 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12946 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12948 case M16_OPC_SLTIU
:
12949 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12954 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
12957 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
12960 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
12963 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
12966 check_insn(ctx
, ISA_MIPS32
);
12968 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
12969 int aregs
= (ctx
->opcode
>> 16) & 0xf;
12970 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
12971 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
12972 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
12973 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
12974 | (ctx
->opcode
& 0xf)) << 3;
12976 if (ctx
->opcode
& (1 << 7)) {
12977 gen_mips16_save(ctx
, xsregs
, aregs
,
12978 do_ra
, do_s0
, do_s1
,
12981 gen_mips16_restore(ctx
, xsregs
, aregs
,
12982 do_ra
, do_s0
, do_s1
,
12988 generate_exception_end(ctx
, EXCP_RI
);
12993 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
12996 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
12998 #if defined(TARGET_MIPS64)
13000 check_insn(ctx
, ISA_MIPS3
);
13001 check_mips_64(ctx
);
13002 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13006 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13009 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13012 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13015 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13018 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13021 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13024 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13026 #if defined(TARGET_MIPS64)
13028 check_insn(ctx
, ISA_MIPS3
);
13029 check_mips_64(ctx
);
13030 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13034 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13037 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13040 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13043 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13045 #if defined(TARGET_MIPS64)
13047 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13051 generate_exception_end(ctx
, EXCP_RI
);
13058 static inline bool is_uhi(int sdbbp_code
)
13060 #ifdef CONFIG_USER_ONLY
13063 return semihosting_enabled() && sdbbp_code
== 1;
13067 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13071 int op
, cnvt_op
, op1
, offset
;
13075 op
= (ctx
->opcode
>> 11) & 0x1f;
13076 sa
= (ctx
->opcode
>> 2) & 0x7;
13077 sa
= sa
== 0 ? 8 : sa
;
13078 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13079 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13080 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13081 op1
= offset
= ctx
->opcode
& 0x1f;
13086 case M16_OPC_ADDIUSP
:
13088 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13090 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13093 case M16_OPC_ADDIUPC
:
13094 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13097 offset
= (ctx
->opcode
& 0x7ff) << 1;
13098 offset
= (int16_t)(offset
<< 4) >> 4;
13099 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13100 /* No delay slot, so just process as a normal instruction */
13103 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13104 offset
= (((ctx
->opcode
& 0x1f) << 21)
13105 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13107 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13108 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13112 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13113 ((int8_t)ctx
->opcode
) << 1, 0);
13114 /* No delay slot, so just process as a normal instruction */
13116 case M16_OPC_BNEQZ
:
13117 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13118 ((int8_t)ctx
->opcode
) << 1, 0);
13119 /* No delay slot, so just process as a normal instruction */
13121 case M16_OPC_SHIFT
:
13122 switch (ctx
->opcode
& 0x3) {
13124 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13127 #if defined(TARGET_MIPS64)
13128 check_insn(ctx
, ISA_MIPS3
);
13129 check_mips_64(ctx
);
13130 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13132 generate_exception_end(ctx
, EXCP_RI
);
13136 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13139 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13143 #if defined(TARGET_MIPS64)
13145 check_insn(ctx
, ISA_MIPS3
);
13146 check_mips_64(ctx
);
13147 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13152 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13154 if ((ctx
->opcode
>> 4) & 1) {
13155 #if defined(TARGET_MIPS64)
13156 check_insn(ctx
, ISA_MIPS3
);
13157 check_mips_64(ctx
);
13158 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13160 generate_exception_end(ctx
, EXCP_RI
);
13163 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13167 case M16_OPC_ADDIU8
:
13169 int16_t imm
= (int8_t) ctx
->opcode
;
13171 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13176 int16_t imm
= (uint8_t) ctx
->opcode
;
13177 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13180 case M16_OPC_SLTIU
:
13182 int16_t imm
= (uint8_t) ctx
->opcode
;
13183 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13190 funct
= (ctx
->opcode
>> 8) & 0x7;
13193 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13194 ((int8_t)ctx
->opcode
) << 1, 0);
13197 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13198 ((int8_t)ctx
->opcode
) << 1, 0);
13201 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13204 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13205 ((int8_t)ctx
->opcode
) << 3);
13208 check_insn(ctx
, ISA_MIPS32
);
13210 int do_ra
= ctx
->opcode
& (1 << 6);
13211 int do_s0
= ctx
->opcode
& (1 << 5);
13212 int do_s1
= ctx
->opcode
& (1 << 4);
13213 int framesize
= ctx
->opcode
& 0xf;
13215 if (framesize
== 0) {
13218 framesize
= framesize
<< 3;
13221 if (ctx
->opcode
& (1 << 7)) {
13222 gen_mips16_save(ctx
, 0, 0,
13223 do_ra
, do_s0
, do_s1
, framesize
);
13225 gen_mips16_restore(ctx
, 0, 0,
13226 do_ra
, do_s0
, do_s1
, framesize
);
13232 int rz
= xlat(ctx
->opcode
& 0x7);
13234 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13235 ((ctx
->opcode
>> 5) & 0x7);
13236 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13240 reg32
= ctx
->opcode
& 0x1f;
13241 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13244 generate_exception_end(ctx
, EXCP_RI
);
13251 int16_t imm
= (uint8_t) ctx
->opcode
;
13253 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13258 int16_t imm
= (uint8_t) ctx
->opcode
;
13259 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13262 #if defined(TARGET_MIPS64)
13264 check_insn(ctx
, ISA_MIPS3
);
13265 check_mips_64(ctx
);
13266 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13270 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13273 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13276 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13279 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13282 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13285 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13288 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13290 #if defined (TARGET_MIPS64)
13292 check_insn(ctx
, ISA_MIPS3
);
13293 check_mips_64(ctx
);
13294 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13298 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13301 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13304 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13307 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13311 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13314 switch (ctx
->opcode
& 0x3) {
13316 mips32_op
= OPC_ADDU
;
13319 mips32_op
= OPC_SUBU
;
13321 #if defined(TARGET_MIPS64)
13323 mips32_op
= OPC_DADDU
;
13324 check_insn(ctx
, ISA_MIPS3
);
13325 check_mips_64(ctx
);
13328 mips32_op
= OPC_DSUBU
;
13329 check_insn(ctx
, ISA_MIPS3
);
13330 check_mips_64(ctx
);
13334 generate_exception_end(ctx
, EXCP_RI
);
13338 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13347 int nd
= (ctx
->opcode
>> 7) & 0x1;
13348 int link
= (ctx
->opcode
>> 6) & 0x1;
13349 int ra
= (ctx
->opcode
>> 5) & 0x1;
13352 check_insn(ctx
, ISA_MIPS32
);
13361 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13366 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13367 gen_helper_do_semihosting(cpu_env
);
13369 /* XXX: not clear which exception should be raised
13370 * when in debug mode...
13372 check_insn(ctx
, ISA_MIPS32
);
13373 generate_exception_end(ctx
, EXCP_DBp
);
13377 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
13380 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
13383 generate_exception_end(ctx
, EXCP_BREAK
);
13386 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
13389 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
13392 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
13394 #if defined (TARGET_MIPS64)
13396 check_insn(ctx
, ISA_MIPS3
);
13397 check_mips_64(ctx
);
13398 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
13402 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
13405 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
13408 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
13411 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
13414 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
13417 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
13420 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
13423 check_insn(ctx
, ISA_MIPS32
);
13425 case RR_RY_CNVT_ZEB
:
13426 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13428 case RR_RY_CNVT_ZEH
:
13429 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13431 case RR_RY_CNVT_SEB
:
13432 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13434 case RR_RY_CNVT_SEH
:
13435 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13437 #if defined (TARGET_MIPS64)
13438 case RR_RY_CNVT_ZEW
:
13439 check_insn(ctx
, ISA_MIPS64
);
13440 check_mips_64(ctx
);
13441 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13443 case RR_RY_CNVT_SEW
:
13444 check_insn(ctx
, ISA_MIPS64
);
13445 check_mips_64(ctx
);
13446 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13450 generate_exception_end(ctx
, EXCP_RI
);
13455 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
13457 #if defined (TARGET_MIPS64)
13459 check_insn(ctx
, ISA_MIPS3
);
13460 check_mips_64(ctx
);
13461 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
13464 check_insn(ctx
, ISA_MIPS3
);
13465 check_mips_64(ctx
);
13466 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
13469 check_insn(ctx
, ISA_MIPS3
);
13470 check_mips_64(ctx
);
13471 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
13474 check_insn(ctx
, ISA_MIPS3
);
13475 check_mips_64(ctx
);
13476 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
13480 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
13483 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
13486 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
13489 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
13491 #if defined (TARGET_MIPS64)
13493 check_insn(ctx
, ISA_MIPS3
);
13494 check_mips_64(ctx
);
13495 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
13498 check_insn(ctx
, ISA_MIPS3
);
13499 check_mips_64(ctx
);
13500 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
13503 check_insn(ctx
, ISA_MIPS3
);
13504 check_mips_64(ctx
);
13505 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
13508 check_insn(ctx
, ISA_MIPS3
);
13509 check_mips_64(ctx
);
13510 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
13514 generate_exception_end(ctx
, EXCP_RI
);
13518 case M16_OPC_EXTEND
:
13519 decode_extended_mips16_opc(env
, ctx
);
13522 #if defined(TARGET_MIPS64)
13524 funct
= (ctx
->opcode
>> 8) & 0x7;
13525 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
13529 generate_exception_end(ctx
, EXCP_RI
);
13536 /* microMIPS extension to MIPS32/MIPS64 */
13539 * microMIPS32/microMIPS64 major opcodes
13541 * 1. MIPS Architecture for Programmers Volume II-B:
13542 * The microMIPS32 Instruction Set (Revision 3.05)
13544 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
13546 * 2. MIPS Architecture For Programmers Volume II-A:
13547 * The MIPS64 Instruction Set (Revision 3.51)
13577 POOL32S
= 0x16, /* MIPS64 */
13578 DADDIU32
= 0x17, /* MIPS64 */
13607 /* 0x29 is reserved */
13620 /* 0x31 is reserved */
13633 SD32
= 0x36, /* MIPS64 */
13634 LD32
= 0x37, /* MIPS64 */
13636 /* 0x39 is reserved */
13652 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
13674 /* POOL32A encoding of minor opcode field */
13677 /* These opcodes are distinguished only by bits 9..6; those bits are
13678 * what are recorded below. */
13715 /* The following can be distinguished by their lower 6 bits. */
13725 /* POOL32AXF encoding of minor opcode field extension */
13728 * 1. MIPS Architecture for Programmers Volume II-B:
13729 * The microMIPS32 Instruction Set (Revision 3.05)
13731 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
13733 * 2. MIPS Architecture for Programmers VolumeIV-e:
13734 * The MIPS DSP Application-Specific Extension
13735 * to the microMIPS32 Architecture (Revision 2.34)
13737 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
13752 /* begin of microMIPS32 DSP */
13754 /* bits 13..12 for 0x01 */
13760 /* bits 13..12 for 0x2a */
13766 /* bits 13..12 for 0x32 */
13770 /* end of microMIPS32 DSP */
13772 /* bits 15..12 for 0x2c */
13789 /* bits 15..12 for 0x34 */
13797 /* bits 15..12 for 0x3c */
13799 JR
= 0x0, /* alias */
13807 /* bits 15..12 for 0x05 */
13811 /* bits 15..12 for 0x0d */
13823 /* bits 15..12 for 0x15 */
13829 /* bits 15..12 for 0x1d */
13833 /* bits 15..12 for 0x2d */
13838 /* bits 15..12 for 0x35 */
13845 /* POOL32B encoding of minor opcode field (bits 15..12) */
13861 /* POOL32C encoding of minor opcode field (bits 15..12) */
13882 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
13895 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
13908 /* POOL32F encoding of minor opcode field (bits 5..0) */
13911 /* These are the bit 7..6 values */
13920 /* These are the bit 8..6 values */
13945 MOVZ_FMT_05
= 0x05,
13979 CABS_COND_FMT
= 0x1c, /* MIPS3D */
13986 /* POOL32Fxf encoding of minor opcode extension field */
14024 /* POOL32I encoding of minor opcode field (bits 25..21) */
14054 /* These overlap and are distinguished by bit16 of the instruction */
14063 /* POOL16A encoding of minor opcode field */
14070 /* POOL16B encoding of minor opcode field */
14077 /* POOL16C encoding of minor opcode field */
14097 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14121 /* POOL16D encoding of minor opcode field */
14128 /* POOL16E encoding of minor opcode field */
14135 static int mmreg (int r
)
14137 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14142 /* Used for 16-bit store instructions. */
14143 static int mmreg2 (int r
)
14145 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14150 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14151 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14152 #define uMIPS_RS2(op) uMIPS_RS(op)
14153 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14154 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14155 #define uMIPS_RS5(op) (op & 0x1f)
14157 /* Signed immediate */
14158 #define SIMM(op, start, width) \
14159 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14162 /* Zero-extended immediate */
14163 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14165 static void gen_addiur1sp(DisasContext
*ctx
)
14167 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14169 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14172 static void gen_addiur2(DisasContext
*ctx
)
14174 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14175 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14176 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14178 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14181 static void gen_addiusp(DisasContext
*ctx
)
14183 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14186 if (encoded
<= 1) {
14187 decoded
= 256 + encoded
;
14188 } else if (encoded
<= 255) {
14190 } else if (encoded
<= 509) {
14191 decoded
= encoded
- 512;
14193 decoded
= encoded
- 768;
14196 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14199 static void gen_addius5(DisasContext
*ctx
)
14201 int imm
= SIMM(ctx
->opcode
, 1, 4);
14202 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14204 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14207 static void gen_andi16(DisasContext
*ctx
)
14209 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14210 31, 32, 63, 64, 255, 32768, 65535 };
14211 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14212 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14213 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14215 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14218 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14219 int base
, int16_t offset
)
14224 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14225 generate_exception_end(ctx
, EXCP_RI
);
14229 t0
= tcg_temp_new();
14231 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14233 t1
= tcg_const_tl(reglist
);
14234 t2
= tcg_const_i32(ctx
->mem_idx
);
14236 save_cpu_state(ctx
, 1);
14239 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14242 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14244 #ifdef TARGET_MIPS64
14246 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14249 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14255 tcg_temp_free_i32(t2
);
14259 static void gen_pool16c_insn(DisasContext
*ctx
)
14261 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14262 int rs
= mmreg(ctx
->opcode
& 0x7);
14264 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14269 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14275 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14281 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14287 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14294 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14295 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14297 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14306 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14307 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14309 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14316 int reg
= ctx
->opcode
& 0x1f;
14318 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14324 int reg
= ctx
->opcode
& 0x1f;
14325 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14326 /* Let normal delay slot handling in our caller take us
14327 to the branch target. */
14332 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14333 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14337 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14338 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14342 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14346 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14349 generate_exception_end(ctx
, EXCP_BREAK
);
14352 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14353 gen_helper_do_semihosting(cpu_env
);
14355 /* XXX: not clear which exception should be raised
14356 * when in debug mode...
14358 check_insn(ctx
, ISA_MIPS32
);
14359 generate_exception_end(ctx
, EXCP_DBp
);
14362 case JRADDIUSP
+ 0:
14363 case JRADDIUSP
+ 1:
14365 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14366 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14367 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14368 /* Let normal delay slot handling in our caller take us
14369 to the branch target. */
14373 generate_exception_end(ctx
, EXCP_RI
);
14378 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
14381 int rd
, rs
, re
, rt
;
14382 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
14383 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
14384 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
14385 rd
= rd_enc
[enc_dest
];
14386 re
= re_enc
[enc_dest
];
14387 rs
= rs_rt_enc
[enc_rs
];
14388 rt
= rs_rt_enc
[enc_rt
];
14390 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
14392 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
14395 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
14397 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
14401 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
14403 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
14404 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
14406 switch (ctx
->opcode
& 0xf) {
14408 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
14411 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
14415 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14416 int offset
= extract32(ctx
->opcode
, 4, 4);
14417 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
14420 case R6_JRC16
: /* JRCADDIUSP */
14421 if ((ctx
->opcode
>> 4) & 1) {
14423 int imm
= extract32(ctx
->opcode
, 5, 5);
14424 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14425 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14428 rs
= extract32(ctx
->opcode
, 5, 5);
14429 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
14441 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14442 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14443 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
14444 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14448 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
14451 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
14455 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
14456 int offset
= extract32(ctx
->opcode
, 4, 4);
14457 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
14460 case JALRC16
: /* BREAK16, SDBBP16 */
14461 switch (ctx
->opcode
& 0x3f) {
14463 case JALRC16
+ 0x20:
14465 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
14470 generate_exception(ctx
, EXCP_BREAK
);
14474 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
14475 gen_helper_do_semihosting(cpu_env
);
14477 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14478 generate_exception(ctx
, EXCP_RI
);
14480 generate_exception(ctx
, EXCP_DBp
);
14487 generate_exception(ctx
, EXCP_RI
);
14492 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
14494 TCGv t0
= tcg_temp_new();
14495 TCGv t1
= tcg_temp_new();
14497 gen_load_gpr(t0
, base
);
14500 gen_load_gpr(t1
, index
);
14501 tcg_gen_shli_tl(t1
, t1
, 2);
14502 gen_op_addr_add(ctx
, t0
, t1
, t0
);
14505 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14506 gen_store_gpr(t1
, rd
);
14512 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
14513 int base
, int16_t offset
)
14517 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
14518 generate_exception_end(ctx
, EXCP_RI
);
14522 t0
= tcg_temp_new();
14523 t1
= tcg_temp_new();
14525 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14530 generate_exception_end(ctx
, EXCP_RI
);
14533 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14534 gen_store_gpr(t1
, rd
);
14535 tcg_gen_movi_tl(t1
, 4);
14536 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14537 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
14538 gen_store_gpr(t1
, rd
+1);
14541 gen_load_gpr(t1
, rd
);
14542 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14543 tcg_gen_movi_tl(t1
, 4);
14544 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14545 gen_load_gpr(t1
, rd
+1);
14546 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
14548 #ifdef TARGET_MIPS64
14551 generate_exception_end(ctx
, EXCP_RI
);
14554 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14555 gen_store_gpr(t1
, rd
);
14556 tcg_gen_movi_tl(t1
, 8);
14557 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14558 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14559 gen_store_gpr(t1
, rd
+1);
14562 gen_load_gpr(t1
, rd
);
14563 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14564 tcg_gen_movi_tl(t1
, 8);
14565 gen_op_addr_add(ctx
, t0
, t0
, t1
);
14566 gen_load_gpr(t1
, rd
+1);
14567 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
14575 static void gen_sync(int stype
)
14577 TCGBar tcg_mo
= TCG_BAR_SC
;
14580 case 0x4: /* SYNC_WMB */
14581 tcg_mo
|= TCG_MO_ST_ST
;
14583 case 0x10: /* SYNC_MB */
14584 tcg_mo
|= TCG_MO_ALL
;
14586 case 0x11: /* SYNC_ACQUIRE */
14587 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
14589 case 0x12: /* SYNC_RELEASE */
14590 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
14592 case 0x13: /* SYNC_RMB */
14593 tcg_mo
|= TCG_MO_LD_LD
;
14596 tcg_mo
|= TCG_MO_ALL
;
14600 tcg_gen_mb(tcg_mo
);
14603 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
14605 int extension
= (ctx
->opcode
>> 6) & 0x3f;
14606 int minor
= (ctx
->opcode
>> 12) & 0xf;
14607 uint32_t mips32_op
;
14609 switch (extension
) {
14611 mips32_op
= OPC_TEQ
;
14614 mips32_op
= OPC_TGE
;
14617 mips32_op
= OPC_TGEU
;
14620 mips32_op
= OPC_TLT
;
14623 mips32_op
= OPC_TLTU
;
14626 mips32_op
= OPC_TNE
;
14628 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
14630 #ifndef CONFIG_USER_ONLY
14633 check_cp0_enabled(ctx
);
14635 /* Treat as NOP. */
14638 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
14642 check_cp0_enabled(ctx
);
14644 TCGv t0
= tcg_temp_new();
14646 gen_load_gpr(t0
, rt
);
14647 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
14653 switch (minor
& 3) {
14655 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14658 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14661 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14664 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14667 goto pool32axf_invalid
;
14671 switch (minor
& 3) {
14673 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14676 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
14679 goto pool32axf_invalid
;
14685 check_insn(ctx
, ISA_MIPS32R6
);
14686 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
14689 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
14692 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
14695 mips32_op
= OPC_CLO
;
14698 mips32_op
= OPC_CLZ
;
14700 check_insn(ctx
, ISA_MIPS32
);
14701 gen_cl(ctx
, mips32_op
, rt
, rs
);
14704 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14705 gen_rdhwr(ctx
, rt
, rs
, 0);
14708 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
14711 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14712 mips32_op
= OPC_MULT
;
14715 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14716 mips32_op
= OPC_MULTU
;
14719 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14720 mips32_op
= OPC_DIV
;
14723 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14724 mips32_op
= OPC_DIVU
;
14727 check_insn(ctx
, ISA_MIPS32
);
14728 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
14731 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14732 mips32_op
= OPC_MADD
;
14735 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14736 mips32_op
= OPC_MADDU
;
14739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14740 mips32_op
= OPC_MSUB
;
14743 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14744 mips32_op
= OPC_MSUBU
;
14746 check_insn(ctx
, ISA_MIPS32
);
14747 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
14750 goto pool32axf_invalid
;
14761 generate_exception_err(ctx
, EXCP_CpU
, 2);
14764 goto pool32axf_invalid
;
14769 case JALR
: /* JALRC */
14770 case JALR_HB
: /* JALRC_HB */
14771 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14772 /* JALRC, JALRC_HB */
14773 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
14775 /* JALR, JALR_HB */
14776 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
14777 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14782 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14783 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
14784 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14787 goto pool32axf_invalid
;
14793 check_cp0_enabled(ctx
);
14794 check_insn(ctx
, ISA_MIPS32R2
);
14795 gen_load_srsgpr(rs
, rt
);
14798 check_cp0_enabled(ctx
);
14799 check_insn(ctx
, ISA_MIPS32R2
);
14800 gen_store_srsgpr(rs
, rt
);
14803 goto pool32axf_invalid
;
14806 #ifndef CONFIG_USER_ONLY
14810 mips32_op
= OPC_TLBP
;
14813 mips32_op
= OPC_TLBR
;
14816 mips32_op
= OPC_TLBWI
;
14819 mips32_op
= OPC_TLBWR
;
14822 mips32_op
= OPC_TLBINV
;
14825 mips32_op
= OPC_TLBINVF
;
14828 mips32_op
= OPC_WAIT
;
14831 mips32_op
= OPC_DERET
;
14834 mips32_op
= OPC_ERET
;
14836 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
14839 goto pool32axf_invalid
;
14845 check_cp0_enabled(ctx
);
14847 TCGv t0
= tcg_temp_new();
14849 save_cpu_state(ctx
, 1);
14850 gen_helper_di(t0
, cpu_env
);
14851 gen_store_gpr(t0
, rs
);
14852 /* Stop translation as we may have switched the execution mode */
14853 ctx
->base
.is_jmp
= DISAS_STOP
;
14858 check_cp0_enabled(ctx
);
14860 TCGv t0
= tcg_temp_new();
14862 save_cpu_state(ctx
, 1);
14863 gen_helper_ei(t0
, cpu_env
);
14864 gen_store_gpr(t0
, rs
);
14865 /* DISAS_STOP isn't sufficient, we need to ensure we break out
14866 of translated code to check for pending interrupts. */
14867 gen_save_pc(ctx
->base
.pc_next
+ 4);
14868 ctx
->base
.is_jmp
= DISAS_EXIT
;
14873 goto pool32axf_invalid
;
14880 gen_sync(extract32(ctx
->opcode
, 16, 5));
14883 generate_exception_end(ctx
, EXCP_SYSCALL
);
14886 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
14887 gen_helper_do_semihosting(cpu_env
);
14889 check_insn(ctx
, ISA_MIPS32
);
14890 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14891 generate_exception_end(ctx
, EXCP_RI
);
14893 generate_exception_end(ctx
, EXCP_DBp
);
14898 goto pool32axf_invalid
;
14902 switch (minor
& 3) {
14904 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
14907 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
14910 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
14913 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
14916 goto pool32axf_invalid
;
14920 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14923 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
14926 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
14929 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
14932 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
14935 goto pool32axf_invalid
;
14940 MIPS_INVAL("pool32axf");
14941 generate_exception_end(ctx
, EXCP_RI
);
14946 /* Values for microMIPS fmt field. Variable-width, depending on which
14947 formats the instruction supports. */
14966 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
14968 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
14969 uint32_t mips32_op
;
14971 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
14972 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
14973 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
14975 switch (extension
) {
14976 case FLOAT_1BIT_FMT(CFC1
, 0):
14977 mips32_op
= OPC_CFC1
;
14979 case FLOAT_1BIT_FMT(CTC1
, 0):
14980 mips32_op
= OPC_CTC1
;
14982 case FLOAT_1BIT_FMT(MFC1
, 0):
14983 mips32_op
= OPC_MFC1
;
14985 case FLOAT_1BIT_FMT(MTC1
, 0):
14986 mips32_op
= OPC_MTC1
;
14988 case FLOAT_1BIT_FMT(MFHC1
, 0):
14989 mips32_op
= OPC_MFHC1
;
14991 case FLOAT_1BIT_FMT(MTHC1
, 0):
14992 mips32_op
= OPC_MTHC1
;
14994 gen_cp1(ctx
, mips32_op
, rt
, rs
);
14997 /* Reciprocal square root */
14998 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
14999 mips32_op
= OPC_RSQRT_S
;
15001 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15002 mips32_op
= OPC_RSQRT_D
;
15006 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15007 mips32_op
= OPC_SQRT_S
;
15009 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15010 mips32_op
= OPC_SQRT_D
;
15014 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15015 mips32_op
= OPC_RECIP_S
;
15017 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15018 mips32_op
= OPC_RECIP_D
;
15022 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15023 mips32_op
= OPC_FLOOR_L_S
;
15025 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15026 mips32_op
= OPC_FLOOR_L_D
;
15028 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15029 mips32_op
= OPC_FLOOR_W_S
;
15031 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15032 mips32_op
= OPC_FLOOR_W_D
;
15036 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15037 mips32_op
= OPC_CEIL_L_S
;
15039 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15040 mips32_op
= OPC_CEIL_L_D
;
15042 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15043 mips32_op
= OPC_CEIL_W_S
;
15045 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15046 mips32_op
= OPC_CEIL_W_D
;
15050 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15051 mips32_op
= OPC_TRUNC_L_S
;
15053 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15054 mips32_op
= OPC_TRUNC_L_D
;
15056 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15057 mips32_op
= OPC_TRUNC_W_S
;
15059 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15060 mips32_op
= OPC_TRUNC_W_D
;
15064 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15065 mips32_op
= OPC_ROUND_L_S
;
15067 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15068 mips32_op
= OPC_ROUND_L_D
;
15070 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15071 mips32_op
= OPC_ROUND_W_S
;
15073 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15074 mips32_op
= OPC_ROUND_W_D
;
15077 /* Integer to floating-point conversion */
15078 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15079 mips32_op
= OPC_CVT_L_S
;
15081 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15082 mips32_op
= OPC_CVT_L_D
;
15084 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15085 mips32_op
= OPC_CVT_W_S
;
15087 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15088 mips32_op
= OPC_CVT_W_D
;
15091 /* Paired-foo conversions */
15092 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15093 mips32_op
= OPC_CVT_S_PL
;
15095 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15096 mips32_op
= OPC_CVT_S_PU
;
15098 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15099 mips32_op
= OPC_CVT_PW_PS
;
15101 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15102 mips32_op
= OPC_CVT_PS_PW
;
15105 /* Floating-point moves */
15106 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15107 mips32_op
= OPC_MOV_S
;
15109 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15110 mips32_op
= OPC_MOV_D
;
15112 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15113 mips32_op
= OPC_MOV_PS
;
15116 /* Absolute value */
15117 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15118 mips32_op
= OPC_ABS_S
;
15120 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15121 mips32_op
= OPC_ABS_D
;
15123 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15124 mips32_op
= OPC_ABS_PS
;
15128 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15129 mips32_op
= OPC_NEG_S
;
15131 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15132 mips32_op
= OPC_NEG_D
;
15134 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15135 mips32_op
= OPC_NEG_PS
;
15138 /* Reciprocal square root step */
15139 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15140 mips32_op
= OPC_RSQRT1_S
;
15142 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15143 mips32_op
= OPC_RSQRT1_D
;
15145 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15146 mips32_op
= OPC_RSQRT1_PS
;
15149 /* Reciprocal step */
15150 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15151 mips32_op
= OPC_RECIP1_S
;
15153 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15154 mips32_op
= OPC_RECIP1_S
;
15156 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15157 mips32_op
= OPC_RECIP1_PS
;
15160 /* Conversions from double */
15161 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15162 mips32_op
= OPC_CVT_D_S
;
15164 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15165 mips32_op
= OPC_CVT_D_W
;
15167 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15168 mips32_op
= OPC_CVT_D_L
;
15171 /* Conversions from single */
15172 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15173 mips32_op
= OPC_CVT_S_D
;
15175 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15176 mips32_op
= OPC_CVT_S_W
;
15178 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15179 mips32_op
= OPC_CVT_S_L
;
15181 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15184 /* Conditional moves on floating-point codes */
15185 case COND_FLOAT_MOV(MOVT
, 0):
15186 case COND_FLOAT_MOV(MOVT
, 1):
15187 case COND_FLOAT_MOV(MOVT
, 2):
15188 case COND_FLOAT_MOV(MOVT
, 3):
15189 case COND_FLOAT_MOV(MOVT
, 4):
15190 case COND_FLOAT_MOV(MOVT
, 5):
15191 case COND_FLOAT_MOV(MOVT
, 6):
15192 case COND_FLOAT_MOV(MOVT
, 7):
15193 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15194 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15196 case COND_FLOAT_MOV(MOVF
, 0):
15197 case COND_FLOAT_MOV(MOVF
, 1):
15198 case COND_FLOAT_MOV(MOVF
, 2):
15199 case COND_FLOAT_MOV(MOVF
, 3):
15200 case COND_FLOAT_MOV(MOVF
, 4):
15201 case COND_FLOAT_MOV(MOVF
, 5):
15202 case COND_FLOAT_MOV(MOVF
, 6):
15203 case COND_FLOAT_MOV(MOVF
, 7):
15204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15205 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15208 MIPS_INVAL("pool32fxf");
15209 generate_exception_end(ctx
, EXCP_RI
);
15214 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15218 int rt
, rs
, rd
, rr
;
15220 uint32_t op
, minor
, minor2
, mips32_op
;
15221 uint32_t cond
, fmt
, cc
;
15223 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15224 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15226 rt
= (ctx
->opcode
>> 21) & 0x1f;
15227 rs
= (ctx
->opcode
>> 16) & 0x1f;
15228 rd
= (ctx
->opcode
>> 11) & 0x1f;
15229 rr
= (ctx
->opcode
>> 6) & 0x1f;
15230 imm
= (int16_t) ctx
->opcode
;
15232 op
= (ctx
->opcode
>> 26) & 0x3f;
15235 minor
= ctx
->opcode
& 0x3f;
15238 minor
= (ctx
->opcode
>> 6) & 0xf;
15241 mips32_op
= OPC_SLL
;
15244 mips32_op
= OPC_SRA
;
15247 mips32_op
= OPC_SRL
;
15250 mips32_op
= OPC_ROTR
;
15252 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15255 check_insn(ctx
, ISA_MIPS32R6
);
15256 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15259 check_insn(ctx
, ISA_MIPS32R6
);
15260 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15263 check_insn(ctx
, ISA_MIPS32R6
);
15264 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15267 goto pool32a_invalid
;
15271 minor
= (ctx
->opcode
>> 6) & 0xf;
15275 mips32_op
= OPC_ADD
;
15278 mips32_op
= OPC_ADDU
;
15281 mips32_op
= OPC_SUB
;
15284 mips32_op
= OPC_SUBU
;
15287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15288 mips32_op
= OPC_MUL
;
15290 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15294 mips32_op
= OPC_SLLV
;
15297 mips32_op
= OPC_SRLV
;
15300 mips32_op
= OPC_SRAV
;
15303 mips32_op
= OPC_ROTRV
;
15305 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15307 /* Logical operations */
15309 mips32_op
= OPC_AND
;
15312 mips32_op
= OPC_OR
;
15315 mips32_op
= OPC_NOR
;
15318 mips32_op
= OPC_XOR
;
15320 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15322 /* Set less than */
15324 mips32_op
= OPC_SLT
;
15327 mips32_op
= OPC_SLTU
;
15329 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15332 goto pool32a_invalid
;
15336 minor
= (ctx
->opcode
>> 6) & 0xf;
15338 /* Conditional moves */
15339 case MOVN
: /* MUL */
15340 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15342 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15345 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15348 case MOVZ
: /* MUH */
15349 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15351 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15354 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15358 check_insn(ctx
, ISA_MIPS32R6
);
15359 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15362 check_insn(ctx
, ISA_MIPS32R6
);
15363 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15365 case LWXS
: /* DIV */
15366 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15368 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
15371 gen_ldxs(ctx
, rs
, rt
, rd
);
15375 check_insn(ctx
, ISA_MIPS32R6
);
15376 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
15379 check_insn(ctx
, ISA_MIPS32R6
);
15380 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
15383 check_insn(ctx
, ISA_MIPS32R6
);
15384 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
15387 goto pool32a_invalid
;
15391 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
15394 check_insn(ctx
, ISA_MIPS32R6
);
15395 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
15396 extract32(ctx
->opcode
, 9, 2));
15399 check_insn(ctx
, ISA_MIPS32R6
);
15400 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
15403 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
15406 gen_pool32axf(env
, ctx
, rt
, rs
);
15409 generate_exception_end(ctx
, EXCP_BREAK
);
15412 check_insn(ctx
, ISA_MIPS32R6
);
15413 generate_exception_end(ctx
, EXCP_RI
);
15417 MIPS_INVAL("pool32a");
15418 generate_exception_end(ctx
, EXCP_RI
);
15423 minor
= (ctx
->opcode
>> 12) & 0xf;
15426 check_cp0_enabled(ctx
);
15427 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15428 gen_cache_operation(ctx
, rt
, rs
, imm
);
15433 /* COP2: Not implemented. */
15434 generate_exception_err(ctx
, EXCP_CpU
, 2);
15436 #ifdef TARGET_MIPS64
15439 check_insn(ctx
, ISA_MIPS3
);
15440 check_mips_64(ctx
);
15445 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15447 #ifdef TARGET_MIPS64
15450 check_insn(ctx
, ISA_MIPS3
);
15451 check_mips_64(ctx
);
15456 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
15459 MIPS_INVAL("pool32b");
15460 generate_exception_end(ctx
, EXCP_RI
);
15465 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15466 minor
= ctx
->opcode
& 0x3f;
15467 check_cp1_enabled(ctx
);
15470 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15471 mips32_op
= OPC_ALNV_PS
;
15474 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15475 mips32_op
= OPC_MADD_S
;
15478 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15479 mips32_op
= OPC_MADD_D
;
15482 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15483 mips32_op
= OPC_MADD_PS
;
15486 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15487 mips32_op
= OPC_MSUB_S
;
15490 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15491 mips32_op
= OPC_MSUB_D
;
15494 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15495 mips32_op
= OPC_MSUB_PS
;
15498 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15499 mips32_op
= OPC_NMADD_S
;
15502 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15503 mips32_op
= OPC_NMADD_D
;
15506 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15507 mips32_op
= OPC_NMADD_PS
;
15510 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15511 mips32_op
= OPC_NMSUB_S
;
15514 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15515 mips32_op
= OPC_NMSUB_D
;
15518 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15519 mips32_op
= OPC_NMSUB_PS
;
15521 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
15523 case CABS_COND_FMT
:
15524 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15525 cond
= (ctx
->opcode
>> 6) & 0xf;
15526 cc
= (ctx
->opcode
>> 13) & 0x7;
15527 fmt
= (ctx
->opcode
>> 10) & 0x3;
15530 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
15533 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
15536 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
15539 goto pool32f_invalid
;
15543 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15544 cond
= (ctx
->opcode
>> 6) & 0xf;
15545 cc
= (ctx
->opcode
>> 13) & 0x7;
15546 fmt
= (ctx
->opcode
>> 10) & 0x3;
15549 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
15552 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
15555 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
15558 goto pool32f_invalid
;
15562 check_insn(ctx
, ISA_MIPS32R6
);
15563 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15566 check_insn(ctx
, ISA_MIPS32R6
);
15567 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
15570 gen_pool32fxf(ctx
, rt
, rs
);
15574 switch ((ctx
->opcode
>> 6) & 0x7) {
15576 mips32_op
= OPC_PLL_PS
;
15579 mips32_op
= OPC_PLU_PS
;
15582 mips32_op
= OPC_PUL_PS
;
15585 mips32_op
= OPC_PUU_PS
;
15588 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15589 mips32_op
= OPC_CVT_PS_S
;
15591 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15594 goto pool32f_invalid
;
15598 check_insn(ctx
, ISA_MIPS32R6
);
15599 switch ((ctx
->opcode
>> 9) & 0x3) {
15601 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
15604 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
15607 goto pool32f_invalid
;
15612 switch ((ctx
->opcode
>> 6) & 0x7) {
15614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15615 mips32_op
= OPC_LWXC1
;
15618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15619 mips32_op
= OPC_SWXC1
;
15622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15623 mips32_op
= OPC_LDXC1
;
15626 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15627 mips32_op
= OPC_SDXC1
;
15630 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15631 mips32_op
= OPC_LUXC1
;
15634 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15635 mips32_op
= OPC_SUXC1
;
15637 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
15640 goto pool32f_invalid
;
15644 check_insn(ctx
, ISA_MIPS32R6
);
15645 switch ((ctx
->opcode
>> 9) & 0x3) {
15647 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
15650 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
15653 goto pool32f_invalid
;
15658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15659 fmt
= (ctx
->opcode
>> 9) & 0x3;
15660 switch ((ctx
->opcode
>> 6) & 0x7) {
15664 mips32_op
= OPC_RSQRT2_S
;
15667 mips32_op
= OPC_RSQRT2_D
;
15670 mips32_op
= OPC_RSQRT2_PS
;
15673 goto pool32f_invalid
;
15679 mips32_op
= OPC_RECIP2_S
;
15682 mips32_op
= OPC_RECIP2_D
;
15685 mips32_op
= OPC_RECIP2_PS
;
15688 goto pool32f_invalid
;
15692 mips32_op
= OPC_ADDR_PS
;
15695 mips32_op
= OPC_MULR_PS
;
15697 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15700 goto pool32f_invalid
;
15704 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
15705 cc
= (ctx
->opcode
>> 13) & 0x7;
15706 fmt
= (ctx
->opcode
>> 9) & 0x3;
15707 switch ((ctx
->opcode
>> 6) & 0x7) {
15708 case MOVF_FMT
: /* RINT_FMT */
15709 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15713 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
15716 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
15719 goto pool32f_invalid
;
15725 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
15728 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
15732 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
15735 goto pool32f_invalid
;
15739 case MOVT_FMT
: /* CLASS_FMT */
15740 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15744 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
15747 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
15750 goto pool32f_invalid
;
15756 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
15759 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
15763 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
15766 goto pool32f_invalid
;
15771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15774 goto pool32f_invalid
;
15777 #define FINSN_3ARG_SDPS(prfx) \
15778 switch ((ctx->opcode >> 8) & 0x3) { \
15780 mips32_op = OPC_##prfx##_S; \
15783 mips32_op = OPC_##prfx##_D; \
15785 case FMT_SDPS_PS: \
15787 mips32_op = OPC_##prfx##_PS; \
15790 goto pool32f_invalid; \
15793 check_insn(ctx
, ISA_MIPS32R6
);
15794 switch ((ctx
->opcode
>> 9) & 0x3) {
15796 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
15799 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
15802 goto pool32f_invalid
;
15806 check_insn(ctx
, ISA_MIPS32R6
);
15807 switch ((ctx
->opcode
>> 9) & 0x3) {
15809 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
15812 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
15815 goto pool32f_invalid
;
15819 /* regular FP ops */
15820 switch ((ctx
->opcode
>> 6) & 0x3) {
15822 FINSN_3ARG_SDPS(ADD
);
15825 FINSN_3ARG_SDPS(SUB
);
15828 FINSN_3ARG_SDPS(MUL
);
15831 fmt
= (ctx
->opcode
>> 8) & 0x3;
15833 mips32_op
= OPC_DIV_D
;
15834 } else if (fmt
== 0) {
15835 mips32_op
= OPC_DIV_S
;
15837 goto pool32f_invalid
;
15841 goto pool32f_invalid
;
15846 switch ((ctx
->opcode
>> 6) & 0x7) {
15847 case MOVN_FMT
: /* SELEQZ_FMT */
15848 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15850 switch ((ctx
->opcode
>> 9) & 0x3) {
15852 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
15855 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
15858 goto pool32f_invalid
;
15862 FINSN_3ARG_SDPS(MOVN
);
15866 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15867 FINSN_3ARG_SDPS(MOVN
);
15869 case MOVZ_FMT
: /* SELNEZ_FMT */
15870 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15872 switch ((ctx
->opcode
>> 9) & 0x3) {
15874 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
15877 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
15880 goto pool32f_invalid
;
15884 FINSN_3ARG_SDPS(MOVZ
);
15888 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15889 FINSN_3ARG_SDPS(MOVZ
);
15892 check_insn(ctx
, ISA_MIPS32R6
);
15893 switch ((ctx
->opcode
>> 9) & 0x3) {
15895 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
15898 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
15901 goto pool32f_invalid
;
15905 check_insn(ctx
, ISA_MIPS32R6
);
15906 switch ((ctx
->opcode
>> 9) & 0x3) {
15908 mips32_op
= OPC_MADDF_S
;
15911 mips32_op
= OPC_MADDF_D
;
15914 goto pool32f_invalid
;
15918 check_insn(ctx
, ISA_MIPS32R6
);
15919 switch ((ctx
->opcode
>> 9) & 0x3) {
15921 mips32_op
= OPC_MSUBF_S
;
15924 mips32_op
= OPC_MSUBF_D
;
15927 goto pool32f_invalid
;
15931 goto pool32f_invalid
;
15935 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15939 MIPS_INVAL("pool32f");
15940 generate_exception_end(ctx
, EXCP_RI
);
15944 generate_exception_err(ctx
, EXCP_CpU
, 1);
15948 minor
= (ctx
->opcode
>> 21) & 0x1f;
15951 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15952 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
15955 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15956 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
15957 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15960 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15961 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
15962 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15965 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15966 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
15969 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15970 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
15971 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15974 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15975 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
15976 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15980 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
15983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15984 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
15988 case TLTI
: /* BC1EQZC */
15989 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15991 check_cp1_enabled(ctx
);
15992 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
15995 mips32_op
= OPC_TLTI
;
15999 case TGEI
: /* BC1NEZC */
16000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16002 check_cp1_enabled(ctx
);
16003 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16006 mips32_op
= OPC_TGEI
;
16011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16012 mips32_op
= OPC_TLTIU
;
16015 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16016 mips32_op
= OPC_TGEIU
;
16018 case TNEI
: /* SYNCI */
16019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16021 /* Break the TB to be able to sync copied instructions
16023 ctx
->base
.is_jmp
= DISAS_STOP
;
16026 mips32_op
= OPC_TNEI
;
16031 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16032 mips32_op
= OPC_TEQI
;
16034 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16039 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16040 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16041 4, rs
, 0, imm
<< 1, 0);
16042 /* Compact branches don't have a delay slot, so just let
16043 the normal delay slot handling take us to the branch
16047 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16048 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16051 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16052 /* Break the TB to be able to sync copied instructions
16054 ctx
->base
.is_jmp
= DISAS_STOP
;
16058 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16059 /* COP2: Not implemented. */
16060 generate_exception_err(ctx
, EXCP_CpU
, 2);
16063 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16064 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16067 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16068 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16071 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16072 mips32_op
= OPC_BC1FANY4
;
16075 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16076 mips32_op
= OPC_BC1TANY4
;
16079 check_insn(ctx
, ASE_MIPS3D
);
16082 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16083 check_cp1_enabled(ctx
);
16084 gen_compute_branch1(ctx
, mips32_op
,
16085 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16087 generate_exception_err(ctx
, EXCP_CpU
, 1);
16092 /* MIPS DSP: not implemented */
16095 MIPS_INVAL("pool32i");
16096 generate_exception_end(ctx
, EXCP_RI
);
16101 minor
= (ctx
->opcode
>> 12) & 0xf;
16102 offset
= sextract32(ctx
->opcode
, 0,
16103 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16107 mips32_op
= OPC_LWL
;
16110 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16111 mips32_op
= OPC_SWL
;
16114 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16115 mips32_op
= OPC_LWR
;
16118 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16119 mips32_op
= OPC_SWR
;
16121 #if defined(TARGET_MIPS64)
16123 check_insn(ctx
, ISA_MIPS3
);
16124 check_mips_64(ctx
);
16125 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16126 mips32_op
= OPC_LDL
;
16129 check_insn(ctx
, ISA_MIPS3
);
16130 check_mips_64(ctx
);
16131 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16132 mips32_op
= OPC_SDL
;
16135 check_insn(ctx
, ISA_MIPS3
);
16136 check_mips_64(ctx
);
16137 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16138 mips32_op
= OPC_LDR
;
16141 check_insn(ctx
, ISA_MIPS3
);
16142 check_mips_64(ctx
);
16143 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16144 mips32_op
= OPC_SDR
;
16147 check_insn(ctx
, ISA_MIPS3
);
16148 check_mips_64(ctx
);
16149 mips32_op
= OPC_LWU
;
16152 check_insn(ctx
, ISA_MIPS3
);
16153 check_mips_64(ctx
);
16154 mips32_op
= OPC_LLD
;
16158 mips32_op
= OPC_LL
;
16161 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16164 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16167 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16169 #if defined(TARGET_MIPS64)
16171 check_insn(ctx
, ISA_MIPS3
);
16172 check_mips_64(ctx
);
16173 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16178 MIPS_INVAL("pool32c ld-eva");
16179 generate_exception_end(ctx
, EXCP_RI
);
16182 check_cp0_enabled(ctx
);
16184 minor2
= (ctx
->opcode
>> 9) & 0x7;
16185 offset
= sextract32(ctx
->opcode
, 0, 9);
16188 mips32_op
= OPC_LBUE
;
16191 mips32_op
= OPC_LHUE
;
16194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16195 mips32_op
= OPC_LWLE
;
16198 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16199 mips32_op
= OPC_LWRE
;
16202 mips32_op
= OPC_LBE
;
16205 mips32_op
= OPC_LHE
;
16208 mips32_op
= OPC_LLE
;
16211 mips32_op
= OPC_LWE
;
16217 MIPS_INVAL("pool32c st-eva");
16218 generate_exception_end(ctx
, EXCP_RI
);
16221 check_cp0_enabled(ctx
);
16223 minor2
= (ctx
->opcode
>> 9) & 0x7;
16224 offset
= sextract32(ctx
->opcode
, 0, 9);
16227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16228 mips32_op
= OPC_SWLE
;
16231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16232 mips32_op
= OPC_SWRE
;
16235 /* Treat as no-op */
16236 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16237 /* hint codes 24-31 are reserved and signal RI */
16238 generate_exception(ctx
, EXCP_RI
);
16242 /* Treat as no-op */
16243 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16244 gen_cache_operation(ctx
, rt
, rs
, offset
);
16248 mips32_op
= OPC_SBE
;
16251 mips32_op
= OPC_SHE
;
16254 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16257 mips32_op
= OPC_SWE
;
16262 /* Treat as no-op */
16263 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16264 /* hint codes 24-31 are reserved and signal RI */
16265 generate_exception(ctx
, EXCP_RI
);
16269 MIPS_INVAL("pool32c");
16270 generate_exception_end(ctx
, EXCP_RI
);
16274 case ADDI32
: /* AUI, LUI */
16275 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16277 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16280 mips32_op
= OPC_ADDI
;
16285 mips32_op
= OPC_ADDIU
;
16287 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16290 /* Logical operations */
16292 mips32_op
= OPC_ORI
;
16295 mips32_op
= OPC_XORI
;
16298 mips32_op
= OPC_ANDI
;
16300 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16303 /* Set less than immediate */
16305 mips32_op
= OPC_SLTI
;
16308 mips32_op
= OPC_SLTIU
;
16310 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16313 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16314 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16315 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16316 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16318 case JALS32
: /* BOVC, BEQC, BEQZALC */
16319 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16322 mips32_op
= OPC_BOVC
;
16323 } else if (rs
< rt
&& rs
== 0) {
16325 mips32_op
= OPC_BEQZALC
;
16328 mips32_op
= OPC_BEQC
;
16330 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16333 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16334 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16335 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16338 case BEQ32
: /* BC */
16339 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16341 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16342 sextract32(ctx
->opcode
<< 1, 0, 27));
16345 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16348 case BNE32
: /* BALC */
16349 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16351 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16352 sextract32(ctx
->opcode
<< 1, 0, 27));
16355 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16358 case J32
: /* BGTZC, BLTZC, BLTC */
16359 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16360 if (rs
== 0 && rt
!= 0) {
16362 mips32_op
= OPC_BGTZC
;
16363 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16365 mips32_op
= OPC_BLTZC
;
16368 mips32_op
= OPC_BLTC
;
16370 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16373 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
16374 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16377 case JAL32
: /* BLEZC, BGEZC, BGEC */
16378 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16379 if (rs
== 0 && rt
!= 0) {
16381 mips32_op
= OPC_BLEZC
;
16382 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16384 mips32_op
= OPC_BGEZC
;
16387 mips32_op
= OPC_BGEC
;
16389 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16392 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
16393 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
16394 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16397 /* Floating point (COP1) */
16399 mips32_op
= OPC_LWC1
;
16402 mips32_op
= OPC_LDC1
;
16405 mips32_op
= OPC_SWC1
;
16408 mips32_op
= OPC_SDC1
;
16410 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
16412 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16413 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16414 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
16415 switch ((ctx
->opcode
>> 16) & 0x1f) {
16424 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16427 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
16430 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
16440 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
16443 generate_exception(ctx
, EXCP_RI
);
16448 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
16449 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
16451 gen_addiupc(ctx
, reg
, offset
, 0, 0);
16454 case BNVC
: /* BNEC, BNEZALC */
16455 check_insn(ctx
, ISA_MIPS32R6
);
16458 mips32_op
= OPC_BNVC
;
16459 } else if (rs
< rt
&& rs
== 0) {
16461 mips32_op
= OPC_BNEZALC
;
16464 mips32_op
= OPC_BNEC
;
16466 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16468 case R6_BNEZC
: /* JIALC */
16469 check_insn(ctx
, ISA_MIPS32R6
);
16472 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
16473 sextract32(ctx
->opcode
<< 1, 0, 22));
16476 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
16479 case R6_BEQZC
: /* JIC */
16480 check_insn(ctx
, ISA_MIPS32R6
);
16483 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
16484 sextract32(ctx
->opcode
<< 1, 0, 22));
16487 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
16490 case BLEZALC
: /* BGEZALC, BGEUC */
16491 check_insn(ctx
, ISA_MIPS32R6
);
16492 if (rs
== 0 && rt
!= 0) {
16494 mips32_op
= OPC_BLEZALC
;
16495 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16497 mips32_op
= OPC_BGEZALC
;
16500 mips32_op
= OPC_BGEUC
;
16502 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16504 case BGTZALC
: /* BLTZALC, BLTUC */
16505 check_insn(ctx
, ISA_MIPS32R6
);
16506 if (rs
== 0 && rt
!= 0) {
16508 mips32_op
= OPC_BGTZALC
;
16509 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16511 mips32_op
= OPC_BLTZALC
;
16514 mips32_op
= OPC_BLTUC
;
16516 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16518 /* Loads and stores */
16520 mips32_op
= OPC_LB
;
16523 mips32_op
= OPC_LBU
;
16526 mips32_op
= OPC_LH
;
16529 mips32_op
= OPC_LHU
;
16532 mips32_op
= OPC_LW
;
16534 #ifdef TARGET_MIPS64
16536 check_insn(ctx
, ISA_MIPS3
);
16537 check_mips_64(ctx
);
16538 mips32_op
= OPC_LD
;
16541 check_insn(ctx
, ISA_MIPS3
);
16542 check_mips_64(ctx
);
16543 mips32_op
= OPC_SD
;
16547 mips32_op
= OPC_SB
;
16550 mips32_op
= OPC_SH
;
16553 mips32_op
= OPC_SW
;
16556 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
16559 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
16562 generate_exception_end(ctx
, EXCP_RI
);
16567 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
16571 /* make sure instructions are on a halfword boundary */
16572 if (ctx
->base
.pc_next
& 0x1) {
16573 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
16574 generate_exception_end(ctx
, EXCP_AdEL
);
16578 op
= (ctx
->opcode
>> 10) & 0x3f;
16579 /* Enforce properly-sized instructions in a delay slot */
16580 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
16581 switch (op
& 0x7) { /* MSB-3..MSB-5 */
16583 /* POOL32A, POOL32B, POOL32I, POOL32C */
16585 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
16587 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
16589 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
16591 /* LB32, LH32, LWC132, LDC132, LW32 */
16592 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
16593 generate_exception_end(ctx
, EXCP_RI
);
16598 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
16600 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
16602 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
16603 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
16604 generate_exception_end(ctx
, EXCP_RI
);
16614 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16615 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
16616 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
16619 switch (ctx
->opcode
& 0x1) {
16627 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16628 /* In the Release 6 the register number location in
16629 * the instruction encoding has changed.
16631 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
16633 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
16639 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16640 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
16641 int amount
= (ctx
->opcode
>> 1) & 0x7;
16643 amount
= amount
== 0 ? 8 : amount
;
16645 switch (ctx
->opcode
& 0x1) {
16654 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
16658 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16659 gen_pool16c_r6_insn(ctx
);
16661 gen_pool16c_insn(ctx
);
16666 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16667 int rb
= 28; /* GP */
16668 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
16670 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
16674 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16675 if (ctx
->opcode
& 1) {
16676 generate_exception_end(ctx
, EXCP_RI
);
16679 int enc_dest
= uMIPS_RD(ctx
->opcode
);
16680 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
16681 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
16682 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
16687 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16688 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16689 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
16690 offset
= (offset
== 0xf ? -1 : offset
);
16692 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
16697 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16698 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16699 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
16701 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
16706 int rd
= (ctx
->opcode
>> 5) & 0x1f;
16707 int rb
= 29; /* SP */
16708 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
16710 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
16715 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
16716 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16717 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
16719 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
16724 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
16725 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16726 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
16728 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
16733 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
16734 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16735 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
16737 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
16742 int rd
= (ctx
->opcode
>> 5) & 0x1f;
16743 int rb
= 29; /* SP */
16744 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
16746 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
16751 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
16752 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
16753 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
16755 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
16760 int rd
= uMIPS_RD5(ctx
->opcode
);
16761 int rs
= uMIPS_RS5(ctx
->opcode
);
16763 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
16770 switch (ctx
->opcode
& 0x1) {
16780 switch (ctx
->opcode
& 0x1) {
16785 gen_addiur1sp(ctx
);
16789 case B16
: /* BC16 */
16790 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
16791 sextract32(ctx
->opcode
, 0, 10) << 1,
16792 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
16794 case BNEZ16
: /* BNEZC16 */
16795 case BEQZ16
: /* BEQZC16 */
16796 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
16797 mmreg(uMIPS_RD(ctx
->opcode
)),
16798 0, sextract32(ctx
->opcode
, 0, 7) << 1,
16799 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
16804 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
16805 int imm
= ZIMM(ctx
->opcode
, 0, 7);
16807 imm
= (imm
== 0x7f ? -1 : imm
);
16808 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
16814 generate_exception_end(ctx
, EXCP_RI
);
16817 decode_micromips32_opc(env
, ctx
);
16830 /* MAJOR, P16, and P32 pools opcodes */
16834 NM_MOVE_BALC
= 0x02,
16842 NM_P16_SHIFT
= 0x0c,
16860 NM_P_LS_U12
= 0x21,
16870 NM_P16_ADDU
= 0x2c,
16884 NM_MOVEPREV
= 0x3f,
16887 /* POOL32A instruction pool */
16889 NM_POOL32A0
= 0x00,
16890 NM_SPECIAL2
= 0x01,
16893 NM_POOL32A5
= 0x05,
16894 NM_POOL32A7
= 0x07,
16897 /* P.GP.W instruction pool */
16899 NM_ADDIUGP_W
= 0x00,
16904 /* P48I instruction pool */
16908 NM_ADDIUGP48
= 0x02,
16909 NM_ADDIUPC48
= 0x03,
16914 /* P.U12 instruction pool */
16923 NM_ADDIUNEG
= 0x08,
16930 /* POOL32F instruction pool */
16932 NM_POOL32F_0
= 0x00,
16933 NM_POOL32F_3
= 0x03,
16934 NM_POOL32F_5
= 0x05,
16937 /* POOL32S instruction pool */
16939 NM_POOL32S_0
= 0x00,
16940 NM_POOL32S_4
= 0x04,
16943 /* P.LUI instruction pool */
16949 /* P.GP.BH instruction pool */
16954 NM_ADDIUGP_B
= 0x03,
16957 NM_P_GP_CP1
= 0x06,
16960 /* P.LS.U12 instruction pool */
16965 NM_P_PREFU12
= 0x03,
16978 /* P.LS.S9 instruction pool */
16984 NM_P_LS_UAWM
= 0x05,
16987 /* P.BAL instruction pool */
16993 /* P.J instruction pool */
16996 NM_JALRC_HB
= 0x01,
16997 NM_P_BALRSC
= 0x08,
17000 /* P.BR1 instruction pool */
17008 /* P.BR2 instruction pool */
17015 /* P.BRI instruction pool */
17027 /* P16.SHIFT instruction pool */
17033 /* POOL16C instruction pool */
17035 NM_POOL16C_0
= 0x00,
17039 /* P16.A1 instruction pool */
17041 NM_ADDIUR1SP
= 0x01,
17044 /* P16.A2 instruction pool */
17047 NM_P_ADDIURS5
= 0x01,
17050 /* P16.ADDU instruction pool */
17056 /* P16.SR instruction pool */
17059 NM_RESTORE_JRC16
= 0x01,
17062 /* P16.4X4 instruction pool */
17068 /* P16.LB instruction pool */
17075 /* P16.LH instruction pool */
17082 /* P.RI instruction pool */
17085 NM_P_SYSCALL
= 0x01,
17090 /* POOL32A0 instruction pool */
17125 NM_D_E_MT_VPE
= 0x56,
17133 /* POOL32A5 instruction pool */
17135 NM_CMP_EQ_PH
= 0x00,
17136 NM_CMP_LT_PH
= 0x08,
17137 NM_CMP_LE_PH
= 0x10,
17138 NM_CMPGU_EQ_QB
= 0x18,
17139 NM_CMPGU_LT_QB
= 0x20,
17140 NM_CMPGU_LE_QB
= 0x28,
17141 NM_CMPGDU_EQ_QB
= 0x30,
17142 NM_CMPGDU_LT_QB
= 0x38,
17143 NM_CMPGDU_LE_QB
= 0x40,
17144 NM_CMPU_EQ_QB
= 0x48,
17145 NM_CMPU_LT_QB
= 0x50,
17146 NM_CMPU_LE_QB
= 0x58,
17147 NM_ADDQ_S_W
= 0x60,
17148 NM_SUBQ_S_W
= 0x68,
17152 NM_ADDQ_S_PH
= 0x01,
17153 NM_ADDQH_R_PH
= 0x09,
17154 NM_ADDQH_R_W
= 0x11,
17155 NM_ADDU_S_QB
= 0x19,
17156 NM_ADDU_S_PH
= 0x21,
17157 NM_ADDUH_R_QB
= 0x29,
17158 NM_SHRAV_R_PH
= 0x31,
17159 NM_SHRAV_R_QB
= 0x39,
17160 NM_SUBQ_S_PH
= 0x41,
17161 NM_SUBQH_R_PH
= 0x49,
17162 NM_SUBQH_R_W
= 0x51,
17163 NM_SUBU_S_QB
= 0x59,
17164 NM_SUBU_S_PH
= 0x61,
17165 NM_SUBUH_R_QB
= 0x69,
17166 NM_SHLLV_S_PH
= 0x71,
17167 NM_PRECR_SRA_R_PH_W
= 0x79,
17169 NM_MULEU_S_PH_QBL
= 0x12,
17170 NM_MULEU_S_PH_QBR
= 0x1a,
17171 NM_MULQ_RS_PH
= 0x22,
17172 NM_MULQ_S_PH
= 0x2a,
17173 NM_MULQ_RS_W
= 0x32,
17174 NM_MULQ_S_W
= 0x3a,
17177 NM_SHRAV_R_W
= 0x5a,
17178 NM_SHRLV_PH
= 0x62,
17179 NM_SHRLV_QB
= 0x6a,
17180 NM_SHLLV_QB
= 0x72,
17181 NM_SHLLV_S_W
= 0x7a,
17185 NM_MULEQ_S_W_PHL
= 0x04,
17186 NM_MULEQ_S_W_PHR
= 0x0c,
17188 NM_MUL_S_PH
= 0x05,
17189 NM_PRECR_QB_PH
= 0x0d,
17190 NM_PRECRQ_QB_PH
= 0x15,
17191 NM_PRECRQ_PH_W
= 0x1d,
17192 NM_PRECRQ_RS_PH_W
= 0x25,
17193 NM_PRECRQU_S_QB_PH
= 0x2d,
17194 NM_PACKRL_PH
= 0x35,
17198 NM_SHRA_R_W
= 0x5e,
17199 NM_SHRA_R_PH
= 0x66,
17200 NM_SHLL_S_PH
= 0x76,
17201 NM_SHLL_S_W
= 0x7e,
17206 /* POOL32A7 instruction pool */
17211 NM_POOL32AXF
= 0x07,
17214 /* P.SR instruction pool */
17220 /* P.SHIFT instruction pool */
17228 /* P.ROTX instruction pool */
17233 /* P.INS instruction pool */
17238 /* P.EXT instruction pool */
17243 /* POOL32F_0 (fmt) instruction pool */
17248 NM_SELEQZ_S
= 0x07,
17249 NM_SELEQZ_D
= 0x47,
17253 NM_SELNEZ_S
= 0x0f,
17254 NM_SELNEZ_D
= 0x4f,
17269 /* POOL32F_3 instruction pool */
17273 NM_MINA_FMT
= 0x04,
17274 NM_MAXA_FMT
= 0x05,
17275 NM_POOL32FXF
= 0x07,
17278 /* POOL32F_5 instruction pool */
17280 NM_CMP_CONDN_S
= 0x00,
17281 NM_CMP_CONDN_D
= 0x02,
17284 /* P.GP.LH instruction pool */
17290 /* P.GP.SH instruction pool */
17295 /* P.GP.CP1 instruction pool */
17303 /* P.LS.S0 instruction pool */
17320 NM_P_PREFS9
= 0x03,
17326 /* P.LS.S1 instruction pool */
17328 NM_ASET_ACLR
= 0x02,
17336 /* P.LS.E0 instruction pool */
17352 /* P.PREFE instruction pool */
17358 /* P.LLE instruction pool */
17364 /* P.SCE instruction pool */
17370 /* P.LS.WM instruction pool */
17376 /* P.LS.UAWM instruction pool */
17382 /* P.BR3A instruction pool */
17388 NM_BPOSGE32C
= 0x04,
17391 /* P16.RI instruction pool */
17393 NM_P16_SYSCALL
= 0x01,
17398 /* POOL16C_0 instruction pool */
17400 NM_POOL16C_00
= 0x00,
17403 /* P16.JRC instruction pool */
17409 /* P.SYSCALL instruction pool */
17415 /* P.TRAP instruction pool */
17421 /* P.CMOVE instruction pool */
17427 /* POOL32Axf instruction pool */
17429 NM_POOL32AXF_1
= 0x01,
17430 NM_POOL32AXF_2
= 0x02,
17431 NM_POOL32AXF_4
= 0x04,
17432 NM_POOL32AXF_5
= 0x05,
17433 NM_POOL32AXF_7
= 0x07,
17436 /* POOL32Axf_1 instruction pool */
17438 NM_POOL32AXF_1_0
= 0x00,
17439 NM_POOL32AXF_1_1
= 0x01,
17440 NM_POOL32AXF_1_3
= 0x03,
17441 NM_POOL32AXF_1_4
= 0x04,
17442 NM_POOL32AXF_1_5
= 0x05,
17443 NM_POOL32AXF_1_7
= 0x07,
17446 /* POOL32Axf_2 instruction pool */
17448 NM_POOL32AXF_2_0_7
= 0x00,
17449 NM_POOL32AXF_2_8_15
= 0x01,
17450 NM_POOL32AXF_2_16_23
= 0x02,
17451 NM_POOL32AXF_2_24_31
= 0x03,
17454 /* POOL32Axf_7 instruction pool */
17456 NM_SHRA_R_QB
= 0x0,
17461 /* POOL32Axf_1_0 instruction pool */
17469 /* POOL32Axf_1_1 instruction pool */
17475 /* POOL32Axf_1_3 instruction pool */
17483 /* POOL32Axf_1_4 instruction pool */
17489 /* POOL32Axf_1_5 instruction pool */
17491 NM_MAQ_S_W_PHR
= 0x0,
17492 NM_MAQ_S_W_PHL
= 0x1,
17493 NM_MAQ_SA_W_PHR
= 0x2,
17494 NM_MAQ_SA_W_PHL
= 0x3,
17497 /* POOL32Axf_1_7 instruction pool */
17501 NM_EXTR_RS_W
= 0x2,
17505 /* POOL32Axf_2_0_7 instruction pool */
17508 NM_DPAQ_S_W_PH
= 0x1,
17510 NM_DPSQ_S_W_PH
= 0x3,
17517 /* POOL32Axf_2_8_15 instruction pool */
17519 NM_DPAX_W_PH
= 0x0,
17520 NM_DPAQ_SA_L_W
= 0x1,
17521 NM_DPSX_W_PH
= 0x2,
17522 NM_DPSQ_SA_L_W
= 0x3,
17525 NM_EXTRV_R_W
= 0x7,
17528 /* POOL32Axf_2_16_23 instruction pool */
17530 NM_DPAU_H_QBL
= 0x0,
17531 NM_DPAQX_S_W_PH
= 0x1,
17532 NM_DPSU_H_QBL
= 0x2,
17533 NM_DPSQX_S_W_PH
= 0x3,
17536 NM_MULSA_W_PH
= 0x6,
17537 NM_EXTRV_RS_W
= 0x7,
17540 /* POOL32Axf_2_24_31 instruction pool */
17542 NM_DPAU_H_QBR
= 0x0,
17543 NM_DPAQX_SA_W_PH
= 0x1,
17544 NM_DPSU_H_QBR
= 0x2,
17545 NM_DPSQX_SA_W_PH
= 0x3,
17548 NM_MULSAQ_S_W_PH
= 0x6,
17549 NM_EXTRV_S_H
= 0x7,
17552 /* POOL32Axf_{4, 5} instruction pool */
17571 /* nanoMIPS DSP instructions */
17572 NM_ABSQ_S_QB
= 0x00,
17573 NM_ABSQ_S_PH
= 0x08,
17574 NM_ABSQ_S_W
= 0x10,
17575 NM_PRECEQ_W_PHL
= 0x28,
17576 NM_PRECEQ_W_PHR
= 0x30,
17577 NM_PRECEQU_PH_QBL
= 0x38,
17578 NM_PRECEQU_PH_QBR
= 0x48,
17579 NM_PRECEU_PH_QBL
= 0x58,
17580 NM_PRECEU_PH_QBR
= 0x68,
17581 NM_PRECEQU_PH_QBLA
= 0x39,
17582 NM_PRECEQU_PH_QBRA
= 0x49,
17583 NM_PRECEU_PH_QBLA
= 0x59,
17584 NM_PRECEU_PH_QBRA
= 0x69,
17585 NM_REPLV_PH
= 0x01,
17586 NM_REPLV_QB
= 0x09,
17589 NM_RADDU_W_QB
= 0x78,
17595 /* PP.SR instruction pool */
17599 NM_RESTORE_JRC
= 0x03,
17602 /* P.SR.F instruction pool */
17605 NM_RESTOREF
= 0x01,
17608 /* P16.SYSCALL instruction pool */
17610 NM_SYSCALL16
= 0x00,
17611 NM_HYPCALL16
= 0x01,
17614 /* POOL16C_00 instruction pool */
17622 /* PP.LSX and PP.LSXS instruction pool */
17660 /* ERETx instruction pool */
17666 /* POOL32FxF_{0, 1} insturction pool */
17675 NM_CVT_S_PL
= 0x84,
17676 NM_CVT_S_PU
= 0xa4,
17678 NM_CVT_L_S
= 0x004,
17679 NM_CVT_L_D
= 0x104,
17680 NM_CVT_W_S
= 0x024,
17681 NM_CVT_W_D
= 0x124,
17683 NM_RSQRT_S
= 0x008,
17684 NM_RSQRT_D
= 0x108,
17689 NM_RECIP_S
= 0x048,
17690 NM_RECIP_D
= 0x148,
17692 NM_FLOOR_L_S
= 0x00c,
17693 NM_FLOOR_L_D
= 0x10c,
17695 NM_FLOOR_W_S
= 0x02c,
17696 NM_FLOOR_W_D
= 0x12c,
17698 NM_CEIL_L_S
= 0x04c,
17699 NM_CEIL_L_D
= 0x14c,
17700 NM_CEIL_W_S
= 0x06c,
17701 NM_CEIL_W_D
= 0x16c,
17702 NM_TRUNC_L_S
= 0x08c,
17703 NM_TRUNC_L_D
= 0x18c,
17704 NM_TRUNC_W_S
= 0x0ac,
17705 NM_TRUNC_W_D
= 0x1ac,
17706 NM_ROUND_L_S
= 0x0cc,
17707 NM_ROUND_L_D
= 0x1cc,
17708 NM_ROUND_W_S
= 0x0ec,
17709 NM_ROUND_W_D
= 0x1ec,
17717 NM_CVT_D_S
= 0x04d,
17718 NM_CVT_D_W
= 0x0cd,
17719 NM_CVT_D_L
= 0x14d,
17720 NM_CVT_S_D
= 0x06d,
17721 NM_CVT_S_W
= 0x0ed,
17722 NM_CVT_S_L
= 0x16d,
17725 /* P.LL instruction pool */
17731 /* P.SC instruction pool */
17737 /* P.DVP instruction pool */
17746 * nanoMIPS decoding engine
17751 /* extraction utilities */
17753 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
17754 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
17755 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
17756 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
17757 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
17758 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
17760 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
17761 static inline int decode_gpr_gpr3(int r
)
17763 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
17765 return map
[r
& 0x7];
17768 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
17769 static inline int decode_gpr_gpr3_src_store(int r
)
17771 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
17773 return map
[r
& 0x7];
17776 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
17777 static inline int decode_gpr_gpr4(int r
)
17779 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
17780 16, 17, 18, 19, 20, 21, 22, 23 };
17782 return map
[r
& 0xf];
17785 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
17786 static inline int decode_gpr_gpr4_zero(int r
)
17788 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
17789 16, 17, 18, 19, 20, 21, 22, 23 };
17791 return map
[r
& 0xf];
17795 /* extraction utilities */
17797 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
17798 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
17799 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
17800 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
17801 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
17802 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
17805 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
17807 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
17810 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
17811 uint8_t gp
, uint16_t u
)
17814 TCGv va
= tcg_temp_new();
17815 TCGv t0
= tcg_temp_new();
17817 while (counter
!= count
) {
17818 bool use_gp
= gp
&& (counter
== count
- 1);
17819 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
17820 int this_offset
= -((counter
+ 1) << 2);
17821 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
17822 gen_load_gpr(t0
, this_rt
);
17823 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
17824 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
17828 /* adjust stack pointer */
17829 gen_adjust_sp(ctx
, -u
);
17835 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
17836 uint8_t gp
, uint16_t u
)
17839 TCGv va
= tcg_temp_new();
17840 TCGv t0
= tcg_temp_new();
17842 while (counter
!= count
) {
17843 bool use_gp
= gp
&& (counter
== count
- 1);
17844 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
17845 int this_offset
= u
- ((counter
+ 1) << 2);
17846 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
17847 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
17848 ctx
->default_tcg_memop_mask
);
17849 tcg_gen_ext32s_tl(t0
, t0
);
17850 gen_store_gpr(t0
, this_rt
);
17854 /* adjust stack pointer */
17855 gen_adjust_sp(ctx
, u
);
17861 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
17863 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
17864 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
17866 switch (extract32(ctx
->opcode
, 2, 2)) {
17868 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
17871 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
17874 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
17877 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
17882 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
17884 int rt
= extract32(ctx
->opcode
, 21, 5);
17885 int rs
= extract32(ctx
->opcode
, 16, 5);
17886 int rd
= extract32(ctx
->opcode
, 11, 5);
17888 switch (extract32(ctx
->opcode
, 3, 7)) {
17890 switch (extract32(ctx
->opcode
, 10, 1)) {
17893 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
17897 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
17903 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
17907 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
17910 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
17913 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
17916 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
17919 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
17922 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
17925 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
17928 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
17932 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
17935 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
17938 switch (extract32(ctx
->opcode
, 10, 1)) {
17940 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
17943 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
17948 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
17951 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
17954 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
17957 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
17960 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
17965 #ifndef CONFIG_USER_ONLY
17966 TCGv t0
= tcg_temp_new();
17967 switch (extract32(ctx
->opcode
, 10, 1)) {
17970 check_cp0_enabled(ctx
);
17971 gen_helper_dvp(t0
, cpu_env
);
17972 gen_store_gpr(t0
, rt
);
17977 check_cp0_enabled(ctx
);
17978 gen_helper_evp(t0
, cpu_env
);
17979 gen_store_gpr(t0
, rt
);
17986 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
17991 TCGv t0
= tcg_temp_new();
17992 TCGv t1
= tcg_temp_new();
17993 TCGv t2
= tcg_temp_new();
17995 gen_load_gpr(t1
, rs
);
17996 gen_load_gpr(t2
, rt
);
17997 tcg_gen_add_tl(t0
, t1
, t2
);
17998 tcg_gen_ext32s_tl(t0
, t0
);
17999 tcg_gen_xor_tl(t1
, t1
, t2
);
18000 tcg_gen_xor_tl(t2
, t0
, t2
);
18001 tcg_gen_andc_tl(t1
, t2
, t1
);
18003 /* operands of same sign, result different sign */
18004 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18005 gen_store_gpr(t0
, rd
);
18013 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18016 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18019 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18022 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18025 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18028 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18031 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18034 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18036 #ifndef CONFIG_USER_ONLY
18038 check_cp0_enabled(ctx
);
18040 /* Treat as NOP. */
18043 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18046 check_cp0_enabled(ctx
);
18048 TCGv t0
= tcg_temp_new();
18050 gen_load_gpr(t0
, rt
);
18051 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18055 case NM_D_E_MT_VPE
:
18057 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18058 TCGv t0
= tcg_temp_new();
18065 gen_helper_dmt(t0
);
18066 gen_store_gpr(t0
, rt
);
18067 } else if (rs
== 0) {
18070 gen_helper_dvpe(t0
, cpu_env
);
18071 gen_store_gpr(t0
, rt
);
18073 generate_exception_end(ctx
, EXCP_RI
);
18080 gen_helper_emt(t0
);
18081 gen_store_gpr(t0
, rt
);
18082 } else if (rs
== 0) {
18085 gen_helper_evpe(t0
, cpu_env
);
18086 gen_store_gpr(t0
, rt
);
18088 generate_exception_end(ctx
, EXCP_RI
);
18099 TCGv t0
= tcg_temp_new();
18100 TCGv t1
= tcg_temp_new();
18102 gen_load_gpr(t0
, rt
);
18103 gen_load_gpr(t1
, rs
);
18104 gen_helper_fork(t0
, t1
);
18111 check_cp0_enabled(ctx
);
18113 /* Treat as NOP. */
18116 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18117 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18121 check_cp0_enabled(ctx
);
18122 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18123 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18128 TCGv t0
= tcg_temp_new();
18130 gen_load_gpr(t0
, rs
);
18131 gen_helper_yield(t0
, cpu_env
, t0
);
18132 gen_store_gpr(t0
, rt
);
18138 generate_exception_end(ctx
, EXCP_RI
);
18144 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18145 int ret
, int v1
, int v2
)
18151 t0
= tcg_temp_new_i32();
18153 v0_t
= tcg_temp_new();
18154 v1_t
= tcg_temp_new();
18156 tcg_gen_movi_i32(t0
, v2
>> 3);
18158 gen_load_gpr(v0_t
, ret
);
18159 gen_load_gpr(v1_t
, v1
);
18162 case NM_MAQ_S_W_PHR
:
18164 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18166 case NM_MAQ_S_W_PHL
:
18168 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18170 case NM_MAQ_SA_W_PHR
:
18172 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18174 case NM_MAQ_SA_W_PHL
:
18176 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18179 generate_exception_end(ctx
, EXCP_RI
);
18183 tcg_temp_free_i32(t0
);
18185 tcg_temp_free(v0_t
);
18186 tcg_temp_free(v1_t
);
18190 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18191 int ret
, int v1
, int v2
)
18194 TCGv t0
= tcg_temp_new();
18195 TCGv t1
= tcg_temp_new();
18196 TCGv v0_t
= tcg_temp_new();
18198 gen_load_gpr(v0_t
, v1
);
18201 case NM_POOL32AXF_1_0
:
18203 switch (extract32(ctx
->opcode
, 12, 2)) {
18205 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18208 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18211 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18214 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18218 case NM_POOL32AXF_1_1
:
18220 switch (extract32(ctx
->opcode
, 12, 2)) {
18222 tcg_gen_movi_tl(t0
, v2
);
18223 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18226 tcg_gen_movi_tl(t0
, v2
>> 3);
18227 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18230 generate_exception_end(ctx
, EXCP_RI
);
18234 case NM_POOL32AXF_1_3
:
18236 imm
= extract32(ctx
->opcode
, 14, 7);
18237 switch (extract32(ctx
->opcode
, 12, 2)) {
18239 tcg_gen_movi_tl(t0
, imm
);
18240 gen_helper_rddsp(t0
, t0
, cpu_env
);
18241 gen_store_gpr(t0
, ret
);
18244 gen_load_gpr(t0
, ret
);
18245 tcg_gen_movi_tl(t1
, imm
);
18246 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18249 tcg_gen_movi_tl(t0
, v2
>> 3);
18250 tcg_gen_movi_tl(t1
, v1
);
18251 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18252 gen_store_gpr(t0
, ret
);
18255 tcg_gen_movi_tl(t0
, v2
>> 3);
18256 tcg_gen_movi_tl(t1
, v1
);
18257 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18258 gen_store_gpr(t0
, ret
);
18262 case NM_POOL32AXF_1_4
:
18264 tcg_gen_movi_tl(t0
, v2
>> 2);
18265 switch (extract32(ctx
->opcode
, 12, 1)) {
18267 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18268 gen_store_gpr(t0
, ret
);
18271 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18272 gen_store_gpr(t0
, ret
);
18276 case NM_POOL32AXF_1_5
:
18277 opc
= extract32(ctx
->opcode
, 12, 2);
18278 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18280 case NM_POOL32AXF_1_7
:
18282 tcg_gen_movi_tl(t0
, v2
>> 3);
18283 tcg_gen_movi_tl(t1
, v1
);
18284 switch (extract32(ctx
->opcode
, 12, 2)) {
18286 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18287 gen_store_gpr(t0
, ret
);
18290 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18291 gen_store_gpr(t0
, ret
);
18294 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18295 gen_store_gpr(t0
, ret
);
18298 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18299 gen_store_gpr(t0
, ret
);
18304 generate_exception_end(ctx
, EXCP_RI
);
18310 tcg_temp_free(v0_t
);
18313 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18314 TCGv v0
, TCGv v1
, int rd
)
18318 t0
= tcg_temp_new_i32();
18320 tcg_gen_movi_i32(t0
, rd
>> 3);
18323 case NM_POOL32AXF_2_0_7
:
18324 switch (extract32(ctx
->opcode
, 9, 3)) {
18327 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18329 case NM_DPAQ_S_W_PH
:
18331 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18335 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18337 case NM_DPSQ_S_W_PH
:
18339 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18342 generate_exception_end(ctx
, EXCP_RI
);
18346 case NM_POOL32AXF_2_8_15
:
18347 switch (extract32(ctx
->opcode
, 9, 3)) {
18350 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18352 case NM_DPAQ_SA_L_W
:
18354 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18358 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
18360 case NM_DPSQ_SA_L_W
:
18362 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18365 generate_exception_end(ctx
, EXCP_RI
);
18369 case NM_POOL32AXF_2_16_23
:
18370 switch (extract32(ctx
->opcode
, 9, 3)) {
18371 case NM_DPAU_H_QBL
:
18373 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
18375 case NM_DPAQX_S_W_PH
:
18377 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18379 case NM_DPSU_H_QBL
:
18381 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
18383 case NM_DPSQX_S_W_PH
:
18385 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
18387 case NM_MULSA_W_PH
:
18389 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
18392 generate_exception_end(ctx
, EXCP_RI
);
18396 case NM_POOL32AXF_2_24_31
:
18397 switch (extract32(ctx
->opcode
, 9, 3)) {
18398 case NM_DPAU_H_QBR
:
18400 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
18402 case NM_DPAQX_SA_W_PH
:
18404 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18406 case NM_DPSU_H_QBR
:
18408 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
18410 case NM_DPSQX_SA_W_PH
:
18412 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
18414 case NM_MULSAQ_S_W_PH
:
18416 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18419 generate_exception_end(ctx
, EXCP_RI
);
18424 generate_exception_end(ctx
, EXCP_RI
);
18428 tcg_temp_free_i32(t0
);
18431 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18432 int rt
, int rs
, int rd
)
18435 TCGv t0
= tcg_temp_new();
18436 TCGv t1
= tcg_temp_new();
18437 TCGv v0_t
= tcg_temp_new();
18438 TCGv v1_t
= tcg_temp_new();
18440 gen_load_gpr(v0_t
, rt
);
18441 gen_load_gpr(v1_t
, rs
);
18444 case NM_POOL32AXF_2_0_7
:
18445 switch (extract32(ctx
->opcode
, 9, 3)) {
18447 case NM_DPAQ_S_W_PH
:
18449 case NM_DPSQ_S_W_PH
:
18450 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18455 gen_load_gpr(t0
, rs
);
18457 if (rd
!= 0 && rd
!= 2) {
18458 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
18459 tcg_gen_ext32u_tl(t0
, t0
);
18460 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
18461 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
18463 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
18469 int acc
= extract32(ctx
->opcode
, 14, 2);
18470 TCGv_i64 t2
= tcg_temp_new_i64();
18471 TCGv_i64 t3
= tcg_temp_new_i64();
18473 gen_load_gpr(t0
, rt
);
18474 gen_load_gpr(t1
, rs
);
18475 tcg_gen_ext_tl_i64(t2
, t0
);
18476 tcg_gen_ext_tl_i64(t3
, t1
);
18477 tcg_gen_mul_i64(t2
, t2
, t3
);
18478 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18479 tcg_gen_add_i64(t2
, t2
, t3
);
18480 tcg_temp_free_i64(t3
);
18481 gen_move_low32(cpu_LO
[acc
], t2
);
18482 gen_move_high32(cpu_HI
[acc
], t2
);
18483 tcg_temp_free_i64(t2
);
18489 int acc
= extract32(ctx
->opcode
, 14, 2);
18490 TCGv_i32 t2
= tcg_temp_new_i32();
18491 TCGv_i32 t3
= tcg_temp_new_i32();
18493 gen_load_gpr(t0
, rs
);
18494 gen_load_gpr(t1
, rt
);
18495 tcg_gen_trunc_tl_i32(t2
, t0
);
18496 tcg_gen_trunc_tl_i32(t3
, t1
);
18497 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
18498 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18499 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18500 tcg_temp_free_i32(t2
);
18501 tcg_temp_free_i32(t3
);
18506 gen_load_gpr(v1_t
, rs
);
18507 tcg_gen_movi_tl(t0
, rd
>> 3);
18508 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
18509 gen_store_gpr(t0
, ret
);
18513 case NM_POOL32AXF_2_8_15
:
18514 switch (extract32(ctx
->opcode
, 9, 3)) {
18516 case NM_DPAQ_SA_L_W
:
18518 case NM_DPSQ_SA_L_W
:
18519 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18524 int acc
= extract32(ctx
->opcode
, 14, 2);
18525 TCGv_i64 t2
= tcg_temp_new_i64();
18526 TCGv_i64 t3
= tcg_temp_new_i64();
18528 gen_load_gpr(t0
, rs
);
18529 gen_load_gpr(t1
, rt
);
18530 tcg_gen_ext32u_tl(t0
, t0
);
18531 tcg_gen_ext32u_tl(t1
, t1
);
18532 tcg_gen_extu_tl_i64(t2
, t0
);
18533 tcg_gen_extu_tl_i64(t3
, t1
);
18534 tcg_gen_mul_i64(t2
, t2
, t3
);
18535 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18536 tcg_gen_add_i64(t2
, t2
, t3
);
18537 tcg_temp_free_i64(t3
);
18538 gen_move_low32(cpu_LO
[acc
], t2
);
18539 gen_move_high32(cpu_HI
[acc
], t2
);
18540 tcg_temp_free_i64(t2
);
18546 int acc
= extract32(ctx
->opcode
, 14, 2);
18547 TCGv_i32 t2
= tcg_temp_new_i32();
18548 TCGv_i32 t3
= tcg_temp_new_i32();
18550 gen_load_gpr(t0
, rs
);
18551 gen_load_gpr(t1
, rt
);
18552 tcg_gen_trunc_tl_i32(t2
, t0
);
18553 tcg_gen_trunc_tl_i32(t3
, t1
);
18554 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
18555 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
18556 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
18557 tcg_temp_free_i32(t2
);
18558 tcg_temp_free_i32(t3
);
18563 tcg_gen_movi_tl(t0
, rd
>> 3);
18564 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
18565 gen_store_gpr(t0
, ret
);
18568 generate_exception_end(ctx
, EXCP_RI
);
18572 case NM_POOL32AXF_2_16_23
:
18573 switch (extract32(ctx
->opcode
, 9, 3)) {
18574 case NM_DPAU_H_QBL
:
18575 case NM_DPAQX_S_W_PH
:
18576 case NM_DPSU_H_QBL
:
18577 case NM_DPSQX_S_W_PH
:
18578 case NM_MULSA_W_PH
:
18579 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18583 tcg_gen_movi_tl(t0
, rd
>> 3);
18584 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
18585 gen_store_gpr(t0
, ret
);
18590 int acc
= extract32(ctx
->opcode
, 14, 2);
18591 TCGv_i64 t2
= tcg_temp_new_i64();
18592 TCGv_i64 t3
= tcg_temp_new_i64();
18594 gen_load_gpr(t0
, rs
);
18595 gen_load_gpr(t1
, rt
);
18596 tcg_gen_ext_tl_i64(t2
, t0
);
18597 tcg_gen_ext_tl_i64(t3
, t1
);
18598 tcg_gen_mul_i64(t2
, t2
, t3
);
18599 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18600 tcg_gen_sub_i64(t2
, t3
, t2
);
18601 tcg_temp_free_i64(t3
);
18602 gen_move_low32(cpu_LO
[acc
], t2
);
18603 gen_move_high32(cpu_HI
[acc
], t2
);
18604 tcg_temp_free_i64(t2
);
18607 case NM_EXTRV_RS_W
:
18609 tcg_gen_movi_tl(t0
, rd
>> 3);
18610 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
18611 gen_store_gpr(t0
, ret
);
18615 case NM_POOL32AXF_2_24_31
:
18616 switch (extract32(ctx
->opcode
, 9, 3)) {
18617 case NM_DPAU_H_QBR
:
18618 case NM_DPAQX_SA_W_PH
:
18619 case NM_DPSU_H_QBR
:
18620 case NM_DPSQX_SA_W_PH
:
18621 case NM_MULSAQ_S_W_PH
:
18622 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
18626 tcg_gen_movi_tl(t0
, rd
>> 3);
18627 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
18628 gen_store_gpr(t0
, ret
);
18633 int acc
= extract32(ctx
->opcode
, 14, 2);
18634 TCGv_i64 t2
= tcg_temp_new_i64();
18635 TCGv_i64 t3
= tcg_temp_new_i64();
18637 gen_load_gpr(t0
, rs
);
18638 gen_load_gpr(t1
, rt
);
18639 tcg_gen_ext32u_tl(t0
, t0
);
18640 tcg_gen_ext32u_tl(t1
, t1
);
18641 tcg_gen_extu_tl_i64(t2
, t0
);
18642 tcg_gen_extu_tl_i64(t3
, t1
);
18643 tcg_gen_mul_i64(t2
, t2
, t3
);
18644 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
18645 tcg_gen_sub_i64(t2
, t3
, t2
);
18646 tcg_temp_free_i64(t3
);
18647 gen_move_low32(cpu_LO
[acc
], t2
);
18648 gen_move_high32(cpu_HI
[acc
], t2
);
18649 tcg_temp_free_i64(t2
);
18654 tcg_gen_movi_tl(t0
, rd
>> 3);
18655 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
18656 gen_store_gpr(t0
, ret
);
18661 generate_exception_end(ctx
, EXCP_RI
);
18668 tcg_temp_free(v0_t
);
18669 tcg_temp_free(v1_t
);
18672 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18676 TCGv t0
= tcg_temp_new();
18677 TCGv v0_t
= tcg_temp_new();
18679 gen_load_gpr(v0_t
, rs
);
18684 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
18685 gen_store_gpr(v0_t
, ret
);
18689 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
18690 gen_store_gpr(v0_t
, ret
);
18694 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
18695 gen_store_gpr(v0_t
, ret
);
18697 case NM_PRECEQ_W_PHL
:
18699 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
18700 tcg_gen_ext32s_tl(v0_t
, v0_t
);
18701 gen_store_gpr(v0_t
, ret
);
18703 case NM_PRECEQ_W_PHR
:
18705 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
18706 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
18707 tcg_gen_ext32s_tl(v0_t
, v0_t
);
18708 gen_store_gpr(v0_t
, ret
);
18710 case NM_PRECEQU_PH_QBL
:
18712 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
18713 gen_store_gpr(v0_t
, ret
);
18715 case NM_PRECEQU_PH_QBR
:
18717 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
18718 gen_store_gpr(v0_t
, ret
);
18720 case NM_PRECEQU_PH_QBLA
:
18722 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
18723 gen_store_gpr(v0_t
, ret
);
18725 case NM_PRECEQU_PH_QBRA
:
18727 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
18728 gen_store_gpr(v0_t
, ret
);
18730 case NM_PRECEU_PH_QBL
:
18732 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
18733 gen_store_gpr(v0_t
, ret
);
18735 case NM_PRECEU_PH_QBR
:
18737 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
18738 gen_store_gpr(v0_t
, ret
);
18740 case NM_PRECEU_PH_QBLA
:
18742 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
18743 gen_store_gpr(v0_t
, ret
);
18745 case NM_PRECEU_PH_QBRA
:
18747 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
18748 gen_store_gpr(v0_t
, ret
);
18752 tcg_gen_ext16u_tl(v0_t
, v0_t
);
18753 tcg_gen_shli_tl(t0
, v0_t
, 16);
18754 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
18755 tcg_gen_ext32s_tl(v0_t
, v0_t
);
18756 gen_store_gpr(v0_t
, ret
);
18760 tcg_gen_ext8u_tl(v0_t
, v0_t
);
18761 tcg_gen_shli_tl(t0
, v0_t
, 8);
18762 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
18763 tcg_gen_shli_tl(t0
, v0_t
, 16);
18764 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
18765 tcg_gen_ext32s_tl(v0_t
, v0_t
);
18766 gen_store_gpr(v0_t
, ret
);
18770 gen_helper_bitrev(v0_t
, v0_t
);
18771 gen_store_gpr(v0_t
, ret
);
18776 TCGv tv0
= tcg_temp_new();
18778 gen_load_gpr(tv0
, rt
);
18779 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
18780 gen_store_gpr(v0_t
, ret
);
18781 tcg_temp_free(tv0
);
18784 case NM_RADDU_W_QB
:
18786 gen_helper_raddu_w_qb(v0_t
, v0_t
);
18787 gen_store_gpr(v0_t
, ret
);
18790 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
18794 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
18798 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
18801 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
18804 generate_exception_end(ctx
, EXCP_RI
);
18808 tcg_temp_free(v0_t
);
18812 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18813 int rt
, int rs
, int rd
)
18815 TCGv t0
= tcg_temp_new();
18816 TCGv rs_t
= tcg_temp_new();
18818 gen_load_gpr(rs_t
, rs
);
18823 tcg_gen_movi_tl(t0
, rd
>> 2);
18824 switch (extract32(ctx
->opcode
, 12, 1)) {
18827 gen_helper_shra_qb(t0
, t0
, rs_t
);
18828 gen_store_gpr(t0
, rt
);
18832 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
18833 gen_store_gpr(t0
, rt
);
18839 tcg_gen_movi_tl(t0
, rd
>> 1);
18840 gen_helper_shrl_ph(t0
, t0
, rs_t
);
18841 gen_store_gpr(t0
, rt
);
18847 target_long result
;
18848 imm
= extract32(ctx
->opcode
, 13, 8);
18849 result
= (uint32_t)imm
<< 24 |
18850 (uint32_t)imm
<< 16 |
18851 (uint32_t)imm
<< 8 |
18853 result
= (int32_t)result
;
18854 tcg_gen_movi_tl(t0
, result
);
18855 gen_store_gpr(t0
, rt
);
18859 generate_exception_end(ctx
, EXCP_RI
);
18863 tcg_temp_free(rs_t
);
18867 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18869 int rt
= extract32(ctx
->opcode
, 21, 5);
18870 int rs
= extract32(ctx
->opcode
, 16, 5);
18871 int rd
= extract32(ctx
->opcode
, 11, 5);
18873 switch (extract32(ctx
->opcode
, 6, 3)) {
18874 case NM_POOL32AXF_1
:
18876 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
18877 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18880 case NM_POOL32AXF_2
:
18882 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
18883 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18886 case NM_POOL32AXF_4
:
18888 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
18889 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
18892 case NM_POOL32AXF_5
:
18893 switch (extract32(ctx
->opcode
, 9, 7)) {
18894 #ifndef CONFIG_USER_ONLY
18896 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
18899 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
18902 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
18905 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
18908 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
18911 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
18914 check_cp0_enabled(ctx
);
18916 TCGv t0
= tcg_temp_new();
18918 save_cpu_state(ctx
, 1);
18919 gen_helper_di(t0
, cpu_env
);
18920 gen_store_gpr(t0
, rt
);
18921 /* Stop translation as we may have switched the execution mode */
18922 ctx
->base
.is_jmp
= DISAS_STOP
;
18927 check_cp0_enabled(ctx
);
18929 TCGv t0
= tcg_temp_new();
18931 save_cpu_state(ctx
, 1);
18932 gen_helper_ei(t0
, cpu_env
);
18933 gen_store_gpr(t0
, rt
);
18934 /* Stop translation as we may have switched the execution mode */
18935 ctx
->base
.is_jmp
= DISAS_STOP
;
18940 gen_load_srsgpr(rs
, rt
);
18943 gen_store_srsgpr(rs
, rt
);
18946 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
18949 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
18952 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
18956 generate_exception_end(ctx
, EXCP_RI
);
18960 case NM_POOL32AXF_7
:
18962 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
18963 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
18967 generate_exception_end(ctx
, EXCP_RI
);
18972 /* Immediate Value Compact Branches */
18973 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
18974 int rt
, int32_t imm
, int32_t offset
)
18977 int bcond_compute
= 0;
18978 TCGv t0
= tcg_temp_new();
18979 TCGv t1
= tcg_temp_new();
18981 gen_load_gpr(t0
, rt
);
18982 tcg_gen_movi_tl(t1
, imm
);
18983 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18985 /* Load needed operands and calculate btarget */
18988 if (rt
== 0 && imm
== 0) {
18989 /* Unconditional branch */
18990 } else if (rt
== 0 && imm
!= 0) {
18995 cond
= TCG_COND_EQ
;
19001 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19002 generate_exception_end(ctx
, EXCP_RI
);
19004 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19005 /* Unconditional branch */
19006 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19010 tcg_gen_shri_tl(t0
, t0
, imm
);
19011 tcg_gen_andi_tl(t0
, t0
, 1);
19012 tcg_gen_movi_tl(t1
, 0);
19014 if (opc
== NM_BBEQZC
) {
19015 cond
= TCG_COND_EQ
;
19017 cond
= TCG_COND_NE
;
19022 if (rt
== 0 && imm
== 0) {
19025 } else if (rt
== 0 && imm
!= 0) {
19026 /* Unconditional branch */
19029 cond
= TCG_COND_NE
;
19033 if (rt
== 0 && imm
== 0) {
19034 /* Unconditional branch */
19037 cond
= TCG_COND_GE
;
19042 cond
= TCG_COND_LT
;
19045 if (rt
== 0 && imm
== 0) {
19046 /* Unconditional branch */
19049 cond
= TCG_COND_GEU
;
19054 cond
= TCG_COND_LTU
;
19057 MIPS_INVAL("Immediate Value Compact branch");
19058 generate_exception_end(ctx
, EXCP_RI
);
19062 if (bcond_compute
== 0) {
19063 /* Uncoditional compact branch */
19064 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19066 /* Conditional compact branch */
19067 TCGLabel
*fs
= gen_new_label();
19069 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19071 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19074 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19082 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19083 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19086 TCGv t0
= tcg_temp_new();
19087 TCGv t1
= tcg_temp_new();
19090 gen_load_gpr(t0
, rs
);
19094 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19097 /* calculate btarget */
19098 tcg_gen_shli_tl(t0
, t0
, 1);
19099 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19100 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19102 /* unconditional branch to register */
19103 tcg_gen_mov_tl(cpu_PC
, btarget
);
19104 tcg_gen_lookup_and_goto_ptr();
19110 /* nanoMIPS Branches */
19111 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19112 int rs
, int rt
, int32_t offset
)
19114 int bcond_compute
= 0;
19115 TCGv t0
= tcg_temp_new();
19116 TCGv t1
= tcg_temp_new();
19118 /* Load needed operands and calculate btarget */
19120 /* compact branch */
19123 gen_load_gpr(t0
, rs
);
19124 gen_load_gpr(t1
, rt
);
19126 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19130 if (rs
== 0 || rs
== rt
) {
19131 /* OPC_BLEZALC, OPC_BGEZALC */
19132 /* OPC_BGTZALC, OPC_BLTZALC */
19133 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19135 gen_load_gpr(t0
, rs
);
19136 gen_load_gpr(t1
, rt
);
19138 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19141 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19145 /* OPC_BEQZC, OPC_BNEZC */
19146 gen_load_gpr(t0
, rs
);
19148 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19150 /* OPC_JIC, OPC_JIALC */
19151 TCGv tbase
= tcg_temp_new();
19152 TCGv toffset
= tcg_temp_new();
19154 gen_load_gpr(tbase
, rt
);
19155 tcg_gen_movi_tl(toffset
, offset
);
19156 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19157 tcg_temp_free(tbase
);
19158 tcg_temp_free(toffset
);
19162 MIPS_INVAL("Compact branch/jump");
19163 generate_exception_end(ctx
, EXCP_RI
);
19167 if (bcond_compute
== 0) {
19168 /* Uncoditional compact branch */
19171 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19174 MIPS_INVAL("Compact branch/jump");
19175 generate_exception_end(ctx
, EXCP_RI
);
19179 /* Conditional compact branch */
19180 TCGLabel
*fs
= gen_new_label();
19184 if (rs
== 0 && rt
!= 0) {
19186 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19187 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19189 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19192 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19196 if (rs
== 0 && rt
!= 0) {
19198 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19199 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19201 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19204 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19208 if (rs
== 0 && rt
!= 0) {
19210 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19211 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19213 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19216 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19220 if (rs
== 0 && rt
!= 0) {
19222 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19223 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19225 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19228 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19232 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19235 MIPS_INVAL("Compact conditional branch/jump");
19236 generate_exception_end(ctx
, EXCP_RI
);
19240 /* Generating branch here as compact branches don't have delay slot */
19241 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19244 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19253 /* nanoMIPS CP1 Branches */
19254 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19255 int32_t ft
, int32_t offset
)
19257 target_ulong btarget
;
19258 TCGv_i64 t0
= tcg_temp_new_i64();
19260 gen_load_fpr64(ctx
, t0
, ft
);
19261 tcg_gen_andi_i64(t0
, t0
, 1);
19263 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19267 tcg_gen_xori_i64(t0
, t0
, 1);
19268 ctx
->hflags
|= MIPS_HFLAG_BC
;
19271 /* t0 already set */
19272 ctx
->hflags
|= MIPS_HFLAG_BC
;
19275 MIPS_INVAL("cp1 cond branch");
19276 generate_exception_end(ctx
, EXCP_RI
);
19280 tcg_gen_trunc_i64_tl(bcond
, t0
);
19282 ctx
->btarget
= btarget
;
19285 tcg_temp_free_i64(t0
);
19289 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19292 t0
= tcg_temp_new();
19293 t1
= tcg_temp_new();
19295 gen_load_gpr(t0
, rs
);
19296 gen_load_gpr(t1
, rt
);
19298 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19299 /* PP.LSXS instructions require shifting */
19300 switch (extract32(ctx
->opcode
, 7, 4)) {
19305 tcg_gen_shli_tl(t0
, t0
, 1);
19312 tcg_gen_shli_tl(t0
, t0
, 2);
19316 tcg_gen_shli_tl(t0
, t0
, 3);
19320 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19322 switch (extract32(ctx
->opcode
, 7, 4)) {
19324 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19326 gen_store_gpr(t0
, rd
);
19330 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19332 gen_store_gpr(t0
, rd
);
19336 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19338 gen_store_gpr(t0
, rd
);
19341 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19343 gen_store_gpr(t0
, rd
);
19347 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19349 gen_store_gpr(t0
, rd
);
19353 gen_load_gpr(t1
, rd
);
19354 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19360 gen_load_gpr(t1
, rd
);
19361 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19367 gen_load_gpr(t1
, rd
);
19368 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
19372 /*case NM_LWC1XS:*/
19374 /*case NM_LDC1XS:*/
19376 /*case NM_SWC1XS:*/
19378 /*case NM_SDC1XS:*/
19379 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19380 check_cp1_enabled(ctx
);
19381 switch (extract32(ctx
->opcode
, 7, 4)) {
19383 /*case NM_LWC1XS:*/
19384 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
19387 /*case NM_LDC1XS:*/
19388 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
19391 /*case NM_SWC1XS:*/
19392 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
19395 /*case NM_SDC1XS:*/
19396 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
19400 generate_exception_err(ctx
, EXCP_CpU
, 1);
19404 generate_exception_end(ctx
, EXCP_RI
);
19412 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
19416 rt
= extract32(ctx
->opcode
, 21, 5);
19417 rs
= extract32(ctx
->opcode
, 16, 5);
19418 rd
= extract32(ctx
->opcode
, 11, 5);
19420 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
19421 generate_exception_end(ctx
, EXCP_RI
);
19424 check_cp1_enabled(ctx
);
19425 switch (extract32(ctx
->opcode
, 0, 3)) {
19427 switch (extract32(ctx
->opcode
, 3, 7)) {
19429 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
19432 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
19435 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
19438 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
19441 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
19444 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
19447 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
19450 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
19453 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
19456 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
19459 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
19462 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
19465 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
19468 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
19471 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
19474 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
19477 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
19480 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
19483 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
19486 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
19489 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
19492 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
19495 generate_exception_end(ctx
, EXCP_RI
);
19500 switch (extract32(ctx
->opcode
, 3, 3)) {
19502 switch (extract32(ctx
->opcode
, 9, 1)) {
19504 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
19507 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
19512 switch (extract32(ctx
->opcode
, 9, 1)) {
19514 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
19517 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
19522 switch (extract32(ctx
->opcode
, 9, 1)) {
19524 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
19527 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
19532 switch (extract32(ctx
->opcode
, 9, 1)) {
19534 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
19537 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
19542 switch (extract32(ctx
->opcode
, 6, 8)) {
19544 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
19547 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
19550 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
19553 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
19556 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
19559 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
19562 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
19565 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
19568 switch (extract32(ctx
->opcode
, 6, 9)) {
19570 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
19573 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
19576 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
19579 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
19582 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
19585 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
19588 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
19591 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
19594 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
19597 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
19600 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
19603 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
19606 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
19609 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
19612 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
19615 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
19618 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
19621 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
19624 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
19627 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
19630 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
19633 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
19636 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
19639 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
19642 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
19645 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
19648 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
19651 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
19654 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
19657 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
19660 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
19663 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
19666 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
19669 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
19672 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
19675 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
19678 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
19681 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
19684 generate_exception_end(ctx
, EXCP_RI
);
19693 switch (extract32(ctx
->opcode
, 3, 3)) {
19694 case NM_CMP_CONDN_S
:
19695 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
19697 case NM_CMP_CONDN_D
:
19698 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
19701 generate_exception_end(ctx
, EXCP_RI
);
19706 generate_exception_end(ctx
, EXCP_RI
);
19711 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
19712 int rd
, int rs
, int rt
)
19715 TCGv t0
= tcg_temp_new();
19716 TCGv v1_t
= tcg_temp_new();
19717 TCGv v2_t
= tcg_temp_new();
19719 gen_load_gpr(v1_t
, rs
);
19720 gen_load_gpr(v2_t
, rt
);
19725 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
19729 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
19733 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
19735 case NM_CMPU_EQ_QB
:
19737 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
19739 case NM_CMPU_LT_QB
:
19741 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
19743 case NM_CMPU_LE_QB
:
19745 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
19747 case NM_CMPGU_EQ_QB
:
19749 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
19750 gen_store_gpr(v1_t
, ret
);
19752 case NM_CMPGU_LT_QB
:
19754 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
19755 gen_store_gpr(v1_t
, ret
);
19757 case NM_CMPGU_LE_QB
:
19759 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
19760 gen_store_gpr(v1_t
, ret
);
19762 case NM_CMPGDU_EQ_QB
:
19764 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
19765 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
19766 gen_store_gpr(v1_t
, ret
);
19768 case NM_CMPGDU_LT_QB
:
19770 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
19771 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
19772 gen_store_gpr(v1_t
, ret
);
19774 case NM_CMPGDU_LE_QB
:
19776 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
19777 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
19778 gen_store_gpr(v1_t
, ret
);
19782 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
19783 gen_store_gpr(v1_t
, ret
);
19787 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19788 gen_store_gpr(v1_t
, ret
);
19792 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19793 gen_store_gpr(v1_t
, ret
);
19797 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19798 gen_store_gpr(v1_t
, ret
);
19802 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19803 gen_store_gpr(v1_t
, ret
);
19807 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
19808 gen_store_gpr(v1_t
, ret
);
19812 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
19813 gen_store_gpr(v1_t
, ret
);
19817 switch (extract32(ctx
->opcode
, 10, 1)) {
19820 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19821 gen_store_gpr(v1_t
, ret
);
19825 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19826 gen_store_gpr(v1_t
, ret
);
19830 case NM_ADDQH_R_PH
:
19832 switch (extract32(ctx
->opcode
, 10, 1)) {
19835 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
19836 gen_store_gpr(v1_t
, ret
);
19840 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
19841 gen_store_gpr(v1_t
, ret
);
19847 switch (extract32(ctx
->opcode
, 10, 1)) {
19850 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
19851 gen_store_gpr(v1_t
, ret
);
19855 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
19856 gen_store_gpr(v1_t
, ret
);
19862 switch (extract32(ctx
->opcode
, 10, 1)) {
19865 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19866 gen_store_gpr(v1_t
, ret
);
19870 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19871 gen_store_gpr(v1_t
, ret
);
19877 switch (extract32(ctx
->opcode
, 10, 1)) {
19880 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19881 gen_store_gpr(v1_t
, ret
);
19885 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19886 gen_store_gpr(v1_t
, ret
);
19890 case NM_ADDUH_R_QB
:
19892 switch (extract32(ctx
->opcode
, 10, 1)) {
19895 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
19896 gen_store_gpr(v1_t
, ret
);
19900 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
19901 gen_store_gpr(v1_t
, ret
);
19905 case NM_SHRAV_R_PH
:
19907 switch (extract32(ctx
->opcode
, 10, 1)) {
19910 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
19911 gen_store_gpr(v1_t
, ret
);
19915 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
19916 gen_store_gpr(v1_t
, ret
);
19920 case NM_SHRAV_R_QB
:
19922 switch (extract32(ctx
->opcode
, 10, 1)) {
19925 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
19926 gen_store_gpr(v1_t
, ret
);
19930 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
19931 gen_store_gpr(v1_t
, ret
);
19937 switch (extract32(ctx
->opcode
, 10, 1)) {
19940 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19941 gen_store_gpr(v1_t
, ret
);
19945 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19946 gen_store_gpr(v1_t
, ret
);
19950 case NM_SUBQH_R_PH
:
19952 switch (extract32(ctx
->opcode
, 10, 1)) {
19955 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
19956 gen_store_gpr(v1_t
, ret
);
19960 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
19961 gen_store_gpr(v1_t
, ret
);
19967 switch (extract32(ctx
->opcode
, 10, 1)) {
19970 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
19971 gen_store_gpr(v1_t
, ret
);
19975 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
19976 gen_store_gpr(v1_t
, ret
);
19982 switch (extract32(ctx
->opcode
, 10, 1)) {
19985 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19986 gen_store_gpr(v1_t
, ret
);
19990 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19991 gen_store_gpr(v1_t
, ret
);
19997 switch (extract32(ctx
->opcode
, 10, 1)) {
20000 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20001 gen_store_gpr(v1_t
, ret
);
20005 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20006 gen_store_gpr(v1_t
, ret
);
20010 case NM_SUBUH_R_QB
:
20012 switch (extract32(ctx
->opcode
, 10, 1)) {
20015 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20016 gen_store_gpr(v1_t
, ret
);
20020 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20021 gen_store_gpr(v1_t
, ret
);
20025 case NM_SHLLV_S_PH
:
20027 switch (extract32(ctx
->opcode
, 10, 1)) {
20030 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20031 gen_store_gpr(v1_t
, ret
);
20035 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20036 gen_store_gpr(v1_t
, ret
);
20040 case NM_PRECR_SRA_R_PH_W
:
20042 switch (extract32(ctx
->opcode
, 10, 1)) {
20044 /* PRECR_SRA_PH_W */
20046 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20047 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20049 gen_store_gpr(v1_t
, rt
);
20050 tcg_temp_free_i32(sa_t
);
20054 /* PRECR_SRA_R_PH_W */
20056 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20057 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20059 gen_store_gpr(v1_t
, rt
);
20060 tcg_temp_free_i32(sa_t
);
20065 case NM_MULEU_S_PH_QBL
:
20067 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20068 gen_store_gpr(v1_t
, ret
);
20070 case NM_MULEU_S_PH_QBR
:
20072 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20073 gen_store_gpr(v1_t
, ret
);
20075 case NM_MULQ_RS_PH
:
20077 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20078 gen_store_gpr(v1_t
, ret
);
20082 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20083 gen_store_gpr(v1_t
, ret
);
20087 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20088 gen_store_gpr(v1_t
, ret
);
20092 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20093 gen_store_gpr(v1_t
, ret
);
20097 gen_load_gpr(t0
, rs
);
20099 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20101 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20105 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20106 gen_store_gpr(v1_t
, ret
);
20110 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20111 gen_store_gpr(v1_t
, ret
);
20115 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20116 gen_store_gpr(v1_t
, ret
);
20120 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20121 gen_store_gpr(v1_t
, ret
);
20125 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20126 gen_store_gpr(v1_t
, ret
);
20130 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20131 gen_store_gpr(v1_t
, ret
);
20136 TCGv tv0
= tcg_temp_new();
20137 TCGv tv1
= tcg_temp_new();
20138 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20140 tcg_gen_movi_tl(tv0
, rd
>> 3);
20141 tcg_gen_movi_tl(tv1
, imm
);
20142 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20145 case NM_MULEQ_S_W_PHL
:
20147 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20148 gen_store_gpr(v1_t
, ret
);
20150 case NM_MULEQ_S_W_PHR
:
20152 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20153 gen_store_gpr(v1_t
, ret
);
20157 switch (extract32(ctx
->opcode
, 10, 1)) {
20160 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20161 gen_store_gpr(v1_t
, ret
);
20165 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20166 gen_store_gpr(v1_t
, ret
);
20170 case NM_PRECR_QB_PH
:
20172 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20173 gen_store_gpr(v1_t
, ret
);
20175 case NM_PRECRQ_QB_PH
:
20177 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20178 gen_store_gpr(v1_t
, ret
);
20180 case NM_PRECRQ_PH_W
:
20182 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20183 gen_store_gpr(v1_t
, ret
);
20185 case NM_PRECRQ_RS_PH_W
:
20187 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20188 gen_store_gpr(v1_t
, ret
);
20190 case NM_PRECRQU_S_QB_PH
:
20192 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20193 gen_store_gpr(v1_t
, ret
);
20197 tcg_gen_movi_tl(t0
, rd
);
20198 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20199 gen_store_gpr(v1_t
, rt
);
20203 tcg_gen_movi_tl(t0
, rd
>> 1);
20204 switch (extract32(ctx
->opcode
, 10, 1)) {
20207 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20208 gen_store_gpr(v1_t
, rt
);
20212 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20213 gen_store_gpr(v1_t
, rt
);
20219 tcg_gen_movi_tl(t0
, rd
>> 1);
20220 switch (extract32(ctx
->opcode
, 10, 2)) {
20223 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20224 gen_store_gpr(v1_t
, rt
);
20228 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20229 gen_store_gpr(v1_t
, rt
);
20232 generate_exception_end(ctx
, EXCP_RI
);
20238 tcg_gen_movi_tl(t0
, rd
);
20239 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20240 gen_store_gpr(v1_t
, rt
);
20246 imm
= sextract32(ctx
->opcode
, 11, 11);
20247 imm
= (int16_t)(imm
<< 6) >> 6;
20249 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20254 generate_exception_end(ctx
, EXCP_RI
);
20259 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20267 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20268 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20270 rt
= extract32(ctx
->opcode
, 21, 5);
20271 rs
= extract32(ctx
->opcode
, 16, 5);
20272 rd
= extract32(ctx
->opcode
, 11, 5);
20274 op
= extract32(ctx
->opcode
, 26, 6);
20279 switch (extract32(ctx
->opcode
, 19, 2)) {
20282 generate_exception_end(ctx
, EXCP_RI
);
20285 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20286 generate_exception_end(ctx
, EXCP_SYSCALL
);
20288 generate_exception_end(ctx
, EXCP_RI
);
20292 generate_exception_end(ctx
, EXCP_BREAK
);
20295 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20296 gen_helper_do_semihosting(cpu_env
);
20298 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20299 generate_exception_end(ctx
, EXCP_RI
);
20301 generate_exception_end(ctx
, EXCP_DBp
);
20308 imm
= extract32(ctx
->opcode
, 0, 16);
20310 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20312 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20314 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20319 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20320 extract32(ctx
->opcode
, 1, 20) << 1;
20321 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20322 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20326 switch (ctx
->opcode
& 0x07) {
20328 gen_pool32a0_nanomips_insn(env
, ctx
);
20332 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20333 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20337 switch (extract32(ctx
->opcode
, 3, 3)) {
20339 gen_p_lsx(ctx
, rd
, rs
, rt
);
20342 /* In nanoMIPS, the shift field directly encodes the shift
20343 * amount, meaning that the supported shift values are in
20344 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20345 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20346 extract32(ctx
->opcode
, 9, 2) - 1);
20349 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20352 gen_pool32axf_nanomips_insn(env
, ctx
);
20355 generate_exception_end(ctx
, EXCP_RI
);
20360 generate_exception_end(ctx
, EXCP_RI
);
20365 switch (ctx
->opcode
& 0x03) {
20368 offset
= extract32(ctx
->opcode
, 0, 21);
20369 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
20373 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20376 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
20379 generate_exception_end(ctx
, EXCP_RI
);
20385 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
20386 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
20387 switch (extract32(ctx
->opcode
, 16, 5)) {
20391 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
20397 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
20398 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20404 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
20410 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20413 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20420 t0
= tcg_temp_new();
20422 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20425 tcg_gen_movi_tl(t0
, addr
);
20426 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
20434 t0
= tcg_temp_new();
20435 t1
= tcg_temp_new();
20437 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
20440 tcg_gen_movi_tl(t0
, addr
);
20441 gen_load_gpr(t1
, rt
);
20443 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
20450 generate_exception_end(ctx
, EXCP_RI
);
20456 switch (extract32(ctx
->opcode
, 12, 4)) {
20458 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20461 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20464 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20467 switch (extract32(ctx
->opcode
, 20, 1)) {
20469 switch (ctx
->opcode
& 3) {
20471 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20472 extract32(ctx
->opcode
, 2, 1),
20473 extract32(ctx
->opcode
, 3, 9) << 3);
20476 case NM_RESTORE_JRC
:
20477 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
20478 extract32(ctx
->opcode
, 2, 1),
20479 extract32(ctx
->opcode
, 3, 9) << 3);
20480 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
20481 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20485 generate_exception_end(ctx
, EXCP_RI
);
20490 generate_exception_end(ctx
, EXCP_RI
);
20495 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20498 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
20502 TCGv t0
= tcg_temp_new();
20504 imm
= extract32(ctx
->opcode
, 0, 12);
20505 gen_load_gpr(t0
, rs
);
20506 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
20507 gen_store_gpr(t0
, rt
);
20513 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
20514 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
20518 int shift
= extract32(ctx
->opcode
, 0, 5);
20519 switch (extract32(ctx
->opcode
, 5, 4)) {
20521 if (rt
== 0 && shift
== 0) {
20523 } else if (rt
== 0 && shift
== 3) {
20524 /* EHB - treat as NOP */
20525 } else if (rt
== 0 && shift
== 5) {
20526 /* PAUSE - treat as NOP */
20527 } else if (rt
== 0 && shift
== 6) {
20529 gen_sync(extract32(ctx
->opcode
, 16, 5));
20532 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
20533 extract32(ctx
->opcode
, 0, 5));
20537 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
20538 extract32(ctx
->opcode
, 0, 5));
20541 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
20542 extract32(ctx
->opcode
, 0, 5));
20545 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
20546 extract32(ctx
->opcode
, 0, 5));
20554 TCGv t0
= tcg_temp_new();
20555 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
20556 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
20558 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
20560 gen_load_gpr(t0
, rs
);
20561 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
20564 tcg_temp_free_i32(shift
);
20565 tcg_temp_free_i32(shiftx
);
20566 tcg_temp_free_i32(stripe
);
20570 switch (((ctx
->opcode
>> 10) & 2) |
20571 (extract32(ctx
->opcode
, 5, 1))) {
20574 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20575 extract32(ctx
->opcode
, 6, 5));
20578 generate_exception_end(ctx
, EXCP_RI
);
20583 switch (((ctx
->opcode
>> 10) & 2) |
20584 (extract32(ctx
->opcode
, 5, 1))) {
20587 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
20588 extract32(ctx
->opcode
, 6, 5));
20591 generate_exception_end(ctx
, EXCP_RI
);
20596 generate_exception_end(ctx
, EXCP_RI
);
20601 gen_pool32f_nanomips_insn(ctx
);
20606 switch (extract32(ctx
->opcode
, 1, 1)) {
20609 tcg_gen_movi_tl(cpu_gpr
[rt
],
20610 sextract32(ctx
->opcode
, 0, 1) << 31 |
20611 extract32(ctx
->opcode
, 2, 10) << 21 |
20612 extract32(ctx
->opcode
, 12, 9) << 12);
20617 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
20618 extract32(ctx
->opcode
, 2, 10) << 21 |
20619 extract32(ctx
->opcode
, 12, 9) << 12;
20621 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20622 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20629 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
20631 switch (extract32(ctx
->opcode
, 18, 3)) {
20633 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
20636 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
20639 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
20643 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
20648 switch (ctx
->opcode
& 1) {
20650 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
20653 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
20659 switch (ctx
->opcode
& 1) {
20661 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
20664 generate_exception_end(ctx
, EXCP_RI
);
20670 switch (ctx
->opcode
& 0x3) {
20672 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
20675 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
20678 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
20681 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
20686 generate_exception_end(ctx
, EXCP_RI
);
20693 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
20695 switch (extract32(ctx
->opcode
, 12, 4)) {
20699 /* Break the TB to be able to sync copied instructions
20701 ctx
->base
.is_jmp
= DISAS_STOP
;
20704 /* Treat as NOP. */
20708 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
20711 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
20714 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
20717 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
20720 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
20723 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
20726 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
20729 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
20732 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
20735 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
20738 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
20741 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
20744 generate_exception_end(ctx
, EXCP_RI
);
20751 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
20752 extract32(ctx
->opcode
, 0, 8);
20754 switch (extract32(ctx
->opcode
, 8, 3)) {
20756 switch (extract32(ctx
->opcode
, 11, 4)) {
20758 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
20761 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
20764 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
20767 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
20770 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
20773 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
20776 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
20779 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
20782 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
20785 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
20788 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
20791 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
20796 /* Break the TB to be able to sync copied instructions
20798 ctx
->base
.is_jmp
= DISAS_STOP
;
20801 /* Treat as NOP. */
20805 generate_exception_end(ctx
, EXCP_RI
);
20810 switch (extract32(ctx
->opcode
, 11, 4)) {
20815 TCGv t0
= tcg_temp_new();
20816 TCGv t1
= tcg_temp_new();
20818 gen_base_offset_addr(ctx
, t0
, rs
, s
);
20820 switch (extract32(ctx
->opcode
, 11, 4)) {
20822 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
20824 gen_store_gpr(t0
, rt
);
20827 gen_load_gpr(t1
, rt
);
20828 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
20837 switch (ctx
->opcode
& 0x03) {
20839 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
20843 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
20848 switch (ctx
->opcode
& 0x03) {
20850 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
20854 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
20859 check_cp0_enabled(ctx
);
20860 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
20861 gen_cache_operation(ctx
, rt
, rs
, s
);
20870 int count
= extract32(ctx
->opcode
, 12, 3);
20873 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
20874 extract32(ctx
->opcode
, 0, 8);
20875 TCGv va
= tcg_temp_new();
20876 TCGv t1
= tcg_temp_new();
20877 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
20878 NM_P_LS_UAWM
? MO_UNALN
: 0;
20880 count
= (count
== 0) ? 8 : count
;
20881 while (counter
!= count
) {
20882 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
20883 int this_offset
= offset
+ (counter
<< 2);
20885 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
20887 switch (extract32(ctx
->opcode
, 11, 1)) {
20889 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
20891 gen_store_gpr(t1
, this_rt
);
20892 if ((this_rt
== rs
) &&
20893 (counter
!= (count
- 1))) {
20894 /* UNPREDICTABLE */
20898 this_rt
= (rt
== 0) ? 0 : this_rt
;
20899 gen_load_gpr(t1
, this_rt
);
20900 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
20911 generate_exception_end(ctx
, EXCP_RI
);
20919 TCGv t0
= tcg_temp_new();
20920 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20921 extract32(ctx
->opcode
, 1, 20) << 1;
20922 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
20923 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
20924 extract32(ctx
->opcode
, 21, 3));
20925 gen_load_gpr(t0
, rt
);
20926 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
20927 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
20933 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
20934 extract32(ctx
->opcode
, 1, 24) << 1;
20936 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
20938 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
20941 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
20946 switch (extract32(ctx
->opcode
, 12, 4)) {
20949 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
20952 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
20955 generate_exception_end(ctx
, EXCP_RI
);
20961 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
20962 extract32(ctx
->opcode
, 1, 13) << 1;
20963 switch (extract32(ctx
->opcode
, 14, 2)) {
20966 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
20969 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
20970 extract32(ctx
->opcode
, 1, 13) << 1;
20971 check_cp1_enabled(ctx
);
20972 switch (extract32(ctx
->opcode
, 16, 5)) {
20974 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
20977 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
20982 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
20983 extract32(ctx
->opcode
, 0, 1) << 13;
20985 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
20990 generate_exception_end(ctx
, EXCP_RI
);
20996 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
20998 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21002 if (rs
== rt
|| rt
== 0) {
21003 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21004 } else if (rs
== 0) {
21005 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21007 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21015 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21016 extract32(ctx
->opcode
, 1, 13) << 1;
21017 switch (extract32(ctx
->opcode
, 14, 2)) {
21020 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21023 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21025 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21027 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21031 if (rs
== 0 || rs
== rt
) {
21033 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21035 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21039 generate_exception_end(ctx
, EXCP_RI
);
21046 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21047 extract32(ctx
->opcode
, 1, 10) << 1;
21048 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21050 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21055 generate_exception_end(ctx
, EXCP_RI
);
21061 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21064 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21065 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21066 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21070 /* make sure instructions are on a halfword boundary */
21071 if (ctx
->base
.pc_next
& 0x1) {
21072 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21073 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21074 tcg_temp_free(tmp
);
21075 generate_exception_end(ctx
, EXCP_AdEL
);
21079 op
= extract32(ctx
->opcode
, 10, 6);
21082 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21085 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21086 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21089 switch (extract32(ctx
->opcode
, 3, 2)) {
21090 case NM_P16_SYSCALL
:
21091 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21092 generate_exception_end(ctx
, EXCP_SYSCALL
);
21094 generate_exception_end(ctx
, EXCP_RI
);
21098 generate_exception_end(ctx
, EXCP_BREAK
);
21101 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21102 gen_helper_do_semihosting(cpu_env
);
21104 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21105 generate_exception_end(ctx
, EXCP_RI
);
21107 generate_exception_end(ctx
, EXCP_DBp
);
21112 generate_exception_end(ctx
, EXCP_RI
);
21119 int shift
= extract32(ctx
->opcode
, 0, 3);
21121 shift
= (shift
== 0) ? 8 : shift
;
21123 switch (extract32(ctx
->opcode
, 3, 1)) {
21131 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21135 switch (ctx
->opcode
& 1) {
21137 gen_pool16c_nanomips_insn(ctx
);
21140 gen_ldxs(ctx
, rt
, rs
, rd
);
21145 switch (extract32(ctx
->opcode
, 6, 1)) {
21147 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21148 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21151 generate_exception_end(ctx
, EXCP_RI
);
21156 switch (extract32(ctx
->opcode
, 3, 1)) {
21158 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21159 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21161 case NM_P_ADDIURS5
:
21162 rt
= extract32(ctx
->opcode
, 5, 5);
21164 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21165 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21166 (extract32(ctx
->opcode
, 0, 3));
21167 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21173 switch (ctx
->opcode
& 0x1) {
21175 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21178 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21183 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21184 extract32(ctx
->opcode
, 5, 3);
21185 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21186 extract32(ctx
->opcode
, 0, 3);
21187 rt
= decode_gpr_gpr4(rt
);
21188 rs
= decode_gpr_gpr4(rs
);
21189 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21190 (extract32(ctx
->opcode
, 3, 1))) {
21193 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21197 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21200 generate_exception_end(ctx
, EXCP_RI
);
21206 int imm
= extract32(ctx
->opcode
, 0, 7);
21207 imm
= (imm
== 0x7f ? -1 : imm
);
21209 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21215 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21216 u
= (u
== 12) ? 0xff :
21217 (u
== 13) ? 0xffff : u
;
21218 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21222 offset
= extract32(ctx
->opcode
, 0, 2);
21223 switch (extract32(ctx
->opcode
, 2, 2)) {
21225 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21228 rt
= decode_gpr_gpr3_src_store(
21229 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21230 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21233 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21236 generate_exception_end(ctx
, EXCP_RI
);
21241 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21242 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21244 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21247 rt
= decode_gpr_gpr3_src_store(
21248 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21249 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21252 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21255 generate_exception_end(ctx
, EXCP_RI
);
21260 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21261 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21264 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21265 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21266 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
21270 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21271 extract32(ctx
->opcode
, 5, 3);
21272 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21273 extract32(ctx
->opcode
, 0, 3);
21274 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21275 (extract32(ctx
->opcode
, 8, 1) << 2);
21276 rt
= decode_gpr_gpr4(rt
);
21277 rs
= decode_gpr_gpr4(rs
);
21278 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
21282 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21283 extract32(ctx
->opcode
, 5, 3);
21284 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21285 extract32(ctx
->opcode
, 0, 3);
21286 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
21287 (extract32(ctx
->opcode
, 8, 1) << 2);
21288 rt
= decode_gpr_gpr4_zero(rt
);
21289 rs
= decode_gpr_gpr4(rs
);
21290 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21293 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21294 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
21297 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21298 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
21299 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
21302 rt
= decode_gpr_gpr3_src_store(
21303 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21304 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21305 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
21306 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
21309 rt
= decode_gpr_gpr3_src_store(
21310 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21311 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
21312 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
21315 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
21316 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21317 (extract32(ctx
->opcode
, 1, 9) << 1));
21320 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
21321 (sextract32(ctx
->opcode
, 0, 1) << 10) |
21322 (extract32(ctx
->opcode
, 1, 9) << 1));
21325 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
21326 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21327 (extract32(ctx
->opcode
, 1, 6) << 1));
21330 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
21331 (sextract32(ctx
->opcode
, 0, 1) << 7) |
21332 (extract32(ctx
->opcode
, 1, 6) << 1));
21335 switch (ctx
->opcode
& 0xf) {
21338 switch (extract32(ctx
->opcode
, 4, 1)) {
21340 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
21341 extract32(ctx
->opcode
, 5, 5), 0, 0);
21344 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
21345 extract32(ctx
->opcode
, 5, 5), 31, 0);
21352 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
21353 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
21354 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
21355 extract32(ctx
->opcode
, 0, 4) << 1);
21362 int count
= extract32(ctx
->opcode
, 0, 4);
21363 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
21365 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
21366 switch (extract32(ctx
->opcode
, 8, 1)) {
21368 gen_save(ctx
, rt
, count
, 0, u
);
21370 case NM_RESTORE_JRC16
:
21371 gen_restore(ctx
, rt
, count
, 0, u
);
21372 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21381 static const int gpr2reg1
[] = {4, 5, 6, 7};
21382 static const int gpr2reg2
[] = {5, 6, 7, 8};
21384 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
21385 extract32(ctx
->opcode
, 8, 1);
21386 int r1
= gpr2reg1
[rd2
];
21387 int r2
= gpr2reg2
[rd2
];
21388 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
21389 extract32(ctx
->opcode
, 0, 3);
21390 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
21391 extract32(ctx
->opcode
, 5, 3);
21392 TCGv t0
= tcg_temp_new();
21393 TCGv t1
= tcg_temp_new();
21394 if (op
== NM_MOVEP
) {
21397 rs
= decode_gpr_gpr4_zero(r3
);
21398 rt
= decode_gpr_gpr4_zero(r4
);
21400 rd
= decode_gpr_gpr4(r3
);
21401 re
= decode_gpr_gpr4(r4
);
21405 gen_load_gpr(t0
, rs
);
21406 gen_load_gpr(t1
, rt
);
21407 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21408 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
21414 return decode_nanomips_32_48_opc(env
, ctx
);
21421 /* SmartMIPS extension to MIPS32 */
21423 #if defined(TARGET_MIPS64)
21425 /* MDMX extension to MIPS64 */
21429 /* MIPSDSP functions. */
21430 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
21431 int rd
, int base
, int offset
)
21436 t0
= tcg_temp_new();
21439 gen_load_gpr(t0
, offset
);
21440 } else if (offset
== 0) {
21441 gen_load_gpr(t0
, base
);
21443 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
21448 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
21449 gen_store_gpr(t0
, rd
);
21452 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
21453 gen_store_gpr(t0
, rd
);
21456 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
21457 gen_store_gpr(t0
, rd
);
21459 #if defined(TARGET_MIPS64)
21461 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
21462 gen_store_gpr(t0
, rd
);
21469 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21470 int ret
, int v1
, int v2
)
21476 /* Treat as NOP. */
21480 v1_t
= tcg_temp_new();
21481 v2_t
= tcg_temp_new();
21483 gen_load_gpr(v1_t
, v1
);
21484 gen_load_gpr(v2_t
, v2
);
21487 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
21488 case OPC_MULT_G_2E
:
21492 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21494 case OPC_ADDUH_R_QB
:
21495 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21498 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21500 case OPC_ADDQH_R_PH
:
21501 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21504 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21506 case OPC_ADDQH_R_W
:
21507 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21510 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21512 case OPC_SUBUH_R_QB
:
21513 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21516 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21518 case OPC_SUBQH_R_PH
:
21519 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21522 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21524 case OPC_SUBQH_R_W
:
21525 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21529 case OPC_ABSQ_S_PH_DSP
:
21531 case OPC_ABSQ_S_QB
:
21533 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
21535 case OPC_ABSQ_S_PH
:
21537 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
21541 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
21543 case OPC_PRECEQ_W_PHL
:
21545 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
21546 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21548 case OPC_PRECEQ_W_PHR
:
21550 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
21551 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
21552 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21554 case OPC_PRECEQU_PH_QBL
:
21556 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
21558 case OPC_PRECEQU_PH_QBR
:
21560 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
21562 case OPC_PRECEQU_PH_QBLA
:
21564 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
21566 case OPC_PRECEQU_PH_QBRA
:
21568 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
21570 case OPC_PRECEU_PH_QBL
:
21572 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
21574 case OPC_PRECEU_PH_QBR
:
21576 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
21578 case OPC_PRECEU_PH_QBLA
:
21580 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
21582 case OPC_PRECEU_PH_QBRA
:
21584 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
21588 case OPC_ADDU_QB_DSP
:
21592 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21594 case OPC_ADDQ_S_PH
:
21596 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21600 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21604 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21606 case OPC_ADDU_S_QB
:
21608 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21612 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21614 case OPC_ADDU_S_PH
:
21616 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21620 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21622 case OPC_SUBQ_S_PH
:
21624 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21628 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21632 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21634 case OPC_SUBU_S_QB
:
21636 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21640 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21642 case OPC_SUBU_S_PH
:
21644 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21648 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21652 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21656 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
21658 case OPC_RADDU_W_QB
:
21660 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
21664 case OPC_CMPU_EQ_QB_DSP
:
21666 case OPC_PRECR_QB_PH
:
21668 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21670 case OPC_PRECRQ_QB_PH
:
21672 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21674 case OPC_PRECR_SRA_PH_W
:
21677 TCGv_i32 sa_t
= tcg_const_i32(v2
);
21678 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
21680 tcg_temp_free_i32(sa_t
);
21683 case OPC_PRECR_SRA_R_PH_W
:
21686 TCGv_i32 sa_t
= tcg_const_i32(v2
);
21687 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
21689 tcg_temp_free_i32(sa_t
);
21692 case OPC_PRECRQ_PH_W
:
21694 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
21696 case OPC_PRECRQ_RS_PH_W
:
21698 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21700 case OPC_PRECRQU_S_QB_PH
:
21702 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21706 #ifdef TARGET_MIPS64
21707 case OPC_ABSQ_S_QH_DSP
:
21709 case OPC_PRECEQ_L_PWL
:
21711 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
21713 case OPC_PRECEQ_L_PWR
:
21715 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
21717 case OPC_PRECEQ_PW_QHL
:
21719 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
21721 case OPC_PRECEQ_PW_QHR
:
21723 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
21725 case OPC_PRECEQ_PW_QHLA
:
21727 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
21729 case OPC_PRECEQ_PW_QHRA
:
21731 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
21733 case OPC_PRECEQU_QH_OBL
:
21735 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
21737 case OPC_PRECEQU_QH_OBR
:
21739 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
21741 case OPC_PRECEQU_QH_OBLA
:
21743 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
21745 case OPC_PRECEQU_QH_OBRA
:
21747 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
21749 case OPC_PRECEU_QH_OBL
:
21751 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
21753 case OPC_PRECEU_QH_OBR
:
21755 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
21757 case OPC_PRECEU_QH_OBLA
:
21759 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
21761 case OPC_PRECEU_QH_OBRA
:
21763 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
21765 case OPC_ABSQ_S_OB
:
21767 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
21769 case OPC_ABSQ_S_PW
:
21771 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
21773 case OPC_ABSQ_S_QH
:
21775 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
21779 case OPC_ADDU_OB_DSP
:
21781 case OPC_RADDU_L_OB
:
21783 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
21787 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21789 case OPC_SUBQ_S_PW
:
21791 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21795 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21797 case OPC_SUBQ_S_QH
:
21799 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21803 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21805 case OPC_SUBU_S_OB
:
21807 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21811 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21813 case OPC_SUBU_S_QH
:
21815 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21819 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21821 case OPC_SUBUH_R_OB
:
21823 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21827 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21829 case OPC_ADDQ_S_PW
:
21831 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21835 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21837 case OPC_ADDQ_S_QH
:
21839 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21843 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21845 case OPC_ADDU_S_OB
:
21847 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21851 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21853 case OPC_ADDU_S_QH
:
21855 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21859 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21861 case OPC_ADDUH_R_OB
:
21863 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21867 case OPC_CMPU_EQ_OB_DSP
:
21869 case OPC_PRECR_OB_QH
:
21871 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
21873 case OPC_PRECR_SRA_QH_PW
:
21876 TCGv_i32 ret_t
= tcg_const_i32(ret
);
21877 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
21878 tcg_temp_free_i32(ret_t
);
21881 case OPC_PRECR_SRA_R_QH_PW
:
21884 TCGv_i32 sa_v
= tcg_const_i32(ret
);
21885 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
21886 tcg_temp_free_i32(sa_v
);
21889 case OPC_PRECRQ_OB_QH
:
21891 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
21893 case OPC_PRECRQ_PW_L
:
21895 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
21897 case OPC_PRECRQ_QH_PW
:
21899 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
21901 case OPC_PRECRQ_RS_QH_PW
:
21903 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21905 case OPC_PRECRQU_S_OB_QH
:
21907 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21914 tcg_temp_free(v1_t
);
21915 tcg_temp_free(v2_t
);
21918 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
21919 int ret
, int v1
, int v2
)
21927 /* Treat as NOP. */
21931 t0
= tcg_temp_new();
21932 v1_t
= tcg_temp_new();
21933 v2_t
= tcg_temp_new();
21935 tcg_gen_movi_tl(t0
, v1
);
21936 gen_load_gpr(v1_t
, v1
);
21937 gen_load_gpr(v2_t
, v2
);
21940 case OPC_SHLL_QB_DSP
:
21942 op2
= MASK_SHLL_QB(ctx
->opcode
);
21946 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21950 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21954 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21958 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21960 case OPC_SHLL_S_PH
:
21962 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21964 case OPC_SHLLV_S_PH
:
21966 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21970 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
21972 case OPC_SHLLV_S_W
:
21974 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21978 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
21982 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21986 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
21990 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21994 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
21996 case OPC_SHRA_R_QB
:
21998 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22002 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22004 case OPC_SHRAV_R_QB
:
22006 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22010 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22012 case OPC_SHRA_R_PH
:
22014 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22018 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22020 case OPC_SHRAV_R_PH
:
22022 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22026 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22028 case OPC_SHRAV_R_W
:
22030 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22032 default: /* Invalid */
22033 MIPS_INVAL("MASK SHLL.QB");
22034 generate_exception_end(ctx
, EXCP_RI
);
22039 #ifdef TARGET_MIPS64
22040 case OPC_SHLL_OB_DSP
:
22041 op2
= MASK_SHLL_OB(ctx
->opcode
);
22045 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22049 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22051 case OPC_SHLL_S_PW
:
22053 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22055 case OPC_SHLLV_S_PW
:
22057 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22061 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22065 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22069 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22073 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22075 case OPC_SHLL_S_QH
:
22077 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22079 case OPC_SHLLV_S_QH
:
22081 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22085 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22089 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22091 case OPC_SHRA_R_OB
:
22093 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22095 case OPC_SHRAV_R_OB
:
22097 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22101 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22105 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22107 case OPC_SHRA_R_PW
:
22109 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22111 case OPC_SHRAV_R_PW
:
22113 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22117 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22121 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22123 case OPC_SHRA_R_QH
:
22125 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22127 case OPC_SHRAV_R_QH
:
22129 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22133 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22137 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22141 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22145 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22147 default: /* Invalid */
22148 MIPS_INVAL("MASK SHLL.OB");
22149 generate_exception_end(ctx
, EXCP_RI
);
22157 tcg_temp_free(v1_t
);
22158 tcg_temp_free(v2_t
);
22161 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22162 int ret
, int v1
, int v2
, int check_ret
)
22168 if ((ret
== 0) && (check_ret
== 1)) {
22169 /* Treat as NOP. */
22173 t0
= tcg_temp_new_i32();
22174 v1_t
= tcg_temp_new();
22175 v2_t
= tcg_temp_new();
22177 tcg_gen_movi_i32(t0
, ret
);
22178 gen_load_gpr(v1_t
, v1
);
22179 gen_load_gpr(v2_t
, v2
);
22182 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22183 * the same mask and op1. */
22184 case OPC_MULT_G_2E
:
22188 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22191 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22194 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22196 case OPC_MULQ_RS_W
:
22197 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22201 case OPC_DPA_W_PH_DSP
:
22203 case OPC_DPAU_H_QBL
:
22205 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22207 case OPC_DPAU_H_QBR
:
22209 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22211 case OPC_DPSU_H_QBL
:
22213 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22215 case OPC_DPSU_H_QBR
:
22217 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22221 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22223 case OPC_DPAX_W_PH
:
22225 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22227 case OPC_DPAQ_S_W_PH
:
22229 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22231 case OPC_DPAQX_S_W_PH
:
22233 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22235 case OPC_DPAQX_SA_W_PH
:
22237 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22241 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22243 case OPC_DPSX_W_PH
:
22245 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22247 case OPC_DPSQ_S_W_PH
:
22249 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22251 case OPC_DPSQX_S_W_PH
:
22253 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22255 case OPC_DPSQX_SA_W_PH
:
22257 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22259 case OPC_MULSAQ_S_W_PH
:
22261 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22263 case OPC_DPAQ_SA_L_W
:
22265 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22267 case OPC_DPSQ_SA_L_W
:
22269 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
22271 case OPC_MAQ_S_W_PHL
:
22273 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22275 case OPC_MAQ_S_W_PHR
:
22277 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22279 case OPC_MAQ_SA_W_PHL
:
22281 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
22283 case OPC_MAQ_SA_W_PHR
:
22285 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
22287 case OPC_MULSA_W_PH
:
22289 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22293 #ifdef TARGET_MIPS64
22294 case OPC_DPAQ_W_QH_DSP
:
22296 int ac
= ret
& 0x03;
22297 tcg_gen_movi_i32(t0
, ac
);
22302 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
22306 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
22310 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
22314 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
22318 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22320 case OPC_DPAQ_S_W_QH
:
22322 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22324 case OPC_DPAQ_SA_L_PW
:
22326 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22328 case OPC_DPAU_H_OBL
:
22330 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22332 case OPC_DPAU_H_OBR
:
22334 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22338 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22340 case OPC_DPSQ_S_W_QH
:
22342 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22344 case OPC_DPSQ_SA_L_PW
:
22346 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22348 case OPC_DPSU_H_OBL
:
22350 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
22352 case OPC_DPSU_H_OBR
:
22354 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
22356 case OPC_MAQ_S_L_PWL
:
22358 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
22360 case OPC_MAQ_S_L_PWR
:
22362 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
22364 case OPC_MAQ_S_W_QHLL
:
22366 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22368 case OPC_MAQ_SA_W_QHLL
:
22370 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
22372 case OPC_MAQ_S_W_QHLR
:
22374 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22376 case OPC_MAQ_SA_W_QHLR
:
22378 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
22380 case OPC_MAQ_S_W_QHRL
:
22382 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22384 case OPC_MAQ_SA_W_QHRL
:
22386 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
22388 case OPC_MAQ_S_W_QHRR
:
22390 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22392 case OPC_MAQ_SA_W_QHRR
:
22394 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
22396 case OPC_MULSAQ_S_L_PW
:
22398 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
22400 case OPC_MULSAQ_S_W_QH
:
22402 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
22408 case OPC_ADDU_QB_DSP
:
22410 case OPC_MULEU_S_PH_QBL
:
22412 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22414 case OPC_MULEU_S_PH_QBR
:
22416 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22418 case OPC_MULQ_RS_PH
:
22420 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22422 case OPC_MULEQ_S_W_PHL
:
22424 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22426 case OPC_MULEQ_S_W_PHR
:
22428 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22430 case OPC_MULQ_S_PH
:
22432 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22436 #ifdef TARGET_MIPS64
22437 case OPC_ADDU_OB_DSP
:
22439 case OPC_MULEQ_S_PW_QHL
:
22441 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22443 case OPC_MULEQ_S_PW_QHR
:
22445 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22447 case OPC_MULEU_S_QH_OBL
:
22449 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22451 case OPC_MULEU_S_QH_OBR
:
22453 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22455 case OPC_MULQ_RS_QH
:
22457 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22464 tcg_temp_free_i32(t0
);
22465 tcg_temp_free(v1_t
);
22466 tcg_temp_free(v2_t
);
22469 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22477 /* Treat as NOP. */
22481 t0
= tcg_temp_new();
22482 val_t
= tcg_temp_new();
22483 gen_load_gpr(val_t
, val
);
22486 case OPC_ABSQ_S_PH_DSP
:
22490 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
22495 target_long result
;
22496 imm
= (ctx
->opcode
>> 16) & 0xFF;
22497 result
= (uint32_t)imm
<< 24 |
22498 (uint32_t)imm
<< 16 |
22499 (uint32_t)imm
<< 8 |
22501 result
= (int32_t)result
;
22502 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
22507 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22508 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22509 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22510 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22511 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22512 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22517 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22518 imm
= (int16_t)(imm
<< 6) >> 6;
22519 tcg_gen_movi_tl(cpu_gpr
[ret
], \
22520 (target_long
)((int32_t)imm
<< 16 | \
22526 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22527 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22528 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22529 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22533 #ifdef TARGET_MIPS64
22534 case OPC_ABSQ_S_QH_DSP
:
22541 imm
= (ctx
->opcode
>> 16) & 0xFF;
22542 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
22543 temp
= (temp
<< 16) | temp
;
22544 temp
= (temp
<< 32) | temp
;
22545 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22553 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22554 imm
= (int16_t)(imm
<< 6) >> 6;
22555 temp
= ((target_long
)imm
<< 32) \
22556 | ((target_long
)imm
& 0xFFFFFFFF);
22557 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22565 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22566 imm
= (int16_t)(imm
<< 6) >> 6;
22568 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
22569 ((uint64_t)(uint16_t)imm
<< 32) |
22570 ((uint64_t)(uint16_t)imm
<< 16) |
22571 (uint64_t)(uint16_t)imm
;
22572 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
22577 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
22578 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
22579 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22580 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22581 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22582 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22583 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22587 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
22588 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22589 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22593 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
22594 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
22595 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22596 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
22597 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
22604 tcg_temp_free(val_t
);
22607 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
22608 uint32_t op1
, uint32_t op2
,
22609 int ret
, int v1
, int v2
, int check_ret
)
22615 if ((ret
== 0) && (check_ret
== 1)) {
22616 /* Treat as NOP. */
22620 t1
= tcg_temp_new();
22621 v1_t
= tcg_temp_new();
22622 v2_t
= tcg_temp_new();
22624 gen_load_gpr(v1_t
, v1
);
22625 gen_load_gpr(v2_t
, v2
);
22628 case OPC_CMPU_EQ_QB_DSP
:
22630 case OPC_CMPU_EQ_QB
:
22632 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
22634 case OPC_CMPU_LT_QB
:
22636 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
22638 case OPC_CMPU_LE_QB
:
22640 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
22642 case OPC_CMPGU_EQ_QB
:
22644 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22646 case OPC_CMPGU_LT_QB
:
22648 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22650 case OPC_CMPGU_LE_QB
:
22652 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22654 case OPC_CMPGDU_EQ_QB
:
22656 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
22657 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22658 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22659 tcg_gen_shli_tl(t1
, t1
, 24);
22660 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22662 case OPC_CMPGDU_LT_QB
:
22664 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
22665 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22666 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22667 tcg_gen_shli_tl(t1
, t1
, 24);
22668 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22670 case OPC_CMPGDU_LE_QB
:
22672 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
22673 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
22674 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
22675 tcg_gen_shli_tl(t1
, t1
, 24);
22676 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
22678 case OPC_CMP_EQ_PH
:
22680 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
22682 case OPC_CMP_LT_PH
:
22684 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
22686 case OPC_CMP_LE_PH
:
22688 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
22692 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22696 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22698 case OPC_PACKRL_PH
:
22700 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22704 #ifdef TARGET_MIPS64
22705 case OPC_CMPU_EQ_OB_DSP
:
22707 case OPC_CMP_EQ_PW
:
22709 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
22711 case OPC_CMP_LT_PW
:
22713 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
22715 case OPC_CMP_LE_PW
:
22717 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
22719 case OPC_CMP_EQ_QH
:
22721 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
22723 case OPC_CMP_LT_QH
:
22725 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
22727 case OPC_CMP_LE_QH
:
22729 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
22731 case OPC_CMPGDU_EQ_OB
:
22733 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22735 case OPC_CMPGDU_LT_OB
:
22737 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22739 case OPC_CMPGDU_LE_OB
:
22741 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22743 case OPC_CMPGU_EQ_OB
:
22745 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22747 case OPC_CMPGU_LT_OB
:
22749 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22751 case OPC_CMPGU_LE_OB
:
22753 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22755 case OPC_CMPU_EQ_OB
:
22757 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
22759 case OPC_CMPU_LT_OB
:
22761 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
22763 case OPC_CMPU_LE_OB
:
22765 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
22767 case OPC_PACKRL_PW
:
22769 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22773 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22777 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22781 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22789 tcg_temp_free(v1_t
);
22790 tcg_temp_free(v2_t
);
22793 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
22794 uint32_t op1
, int rt
, int rs
, int sa
)
22801 /* Treat as NOP. */
22805 t0
= tcg_temp_new();
22806 gen_load_gpr(t0
, rs
);
22809 case OPC_APPEND_DSP
:
22810 switch (MASK_APPEND(ctx
->opcode
)) {
22813 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
22815 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
22819 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
22820 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
22821 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
22822 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
22824 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
22828 if (sa
!= 0 && sa
!= 2) {
22829 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
22830 tcg_gen_ext32u_tl(t0
, t0
);
22831 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
22832 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
22834 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
22836 default: /* Invalid */
22837 MIPS_INVAL("MASK APPEND");
22838 generate_exception_end(ctx
, EXCP_RI
);
22842 #ifdef TARGET_MIPS64
22843 case OPC_DAPPEND_DSP
:
22844 switch (MASK_DAPPEND(ctx
->opcode
)) {
22847 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
22851 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
22852 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
22853 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
22857 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
22858 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
22859 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
22864 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
22865 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
22866 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
22867 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
22870 default: /* Invalid */
22871 MIPS_INVAL("MASK DAPPEND");
22872 generate_exception_end(ctx
, EXCP_RI
);
22881 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22882 int ret
, int v1
, int v2
, int check_ret
)
22891 if ((ret
== 0) && (check_ret
== 1)) {
22892 /* Treat as NOP. */
22896 t0
= tcg_temp_new();
22897 t1
= tcg_temp_new();
22898 v1_t
= tcg_temp_new();
22899 v2_t
= tcg_temp_new();
22901 gen_load_gpr(v1_t
, v1
);
22902 gen_load_gpr(v2_t
, v2
);
22905 case OPC_EXTR_W_DSP
:
22909 tcg_gen_movi_tl(t0
, v2
);
22910 tcg_gen_movi_tl(t1
, v1
);
22911 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22914 tcg_gen_movi_tl(t0
, v2
);
22915 tcg_gen_movi_tl(t1
, v1
);
22916 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22918 case OPC_EXTR_RS_W
:
22919 tcg_gen_movi_tl(t0
, v2
);
22920 tcg_gen_movi_tl(t1
, v1
);
22921 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22924 tcg_gen_movi_tl(t0
, v2
);
22925 tcg_gen_movi_tl(t1
, v1
);
22926 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22928 case OPC_EXTRV_S_H
:
22929 tcg_gen_movi_tl(t0
, v2
);
22930 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22933 tcg_gen_movi_tl(t0
, v2
);
22934 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22936 case OPC_EXTRV_R_W
:
22937 tcg_gen_movi_tl(t0
, v2
);
22938 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22940 case OPC_EXTRV_RS_W
:
22941 tcg_gen_movi_tl(t0
, v2
);
22942 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22945 tcg_gen_movi_tl(t0
, v2
);
22946 tcg_gen_movi_tl(t1
, v1
);
22947 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22950 tcg_gen_movi_tl(t0
, v2
);
22951 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22954 tcg_gen_movi_tl(t0
, v2
);
22955 tcg_gen_movi_tl(t1
, v1
);
22956 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
22959 tcg_gen_movi_tl(t0
, v2
);
22960 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
22963 imm
= (ctx
->opcode
>> 20) & 0x3F;
22964 tcg_gen_movi_tl(t0
, ret
);
22965 tcg_gen_movi_tl(t1
, imm
);
22966 gen_helper_shilo(t0
, t1
, cpu_env
);
22969 tcg_gen_movi_tl(t0
, ret
);
22970 gen_helper_shilo(t0
, v1_t
, cpu_env
);
22973 tcg_gen_movi_tl(t0
, ret
);
22974 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
22977 imm
= (ctx
->opcode
>> 11) & 0x3FF;
22978 tcg_gen_movi_tl(t0
, imm
);
22979 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
22982 imm
= (ctx
->opcode
>> 16) & 0x03FF;
22983 tcg_gen_movi_tl(t0
, imm
);
22984 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
22988 #ifdef TARGET_MIPS64
22989 case OPC_DEXTR_W_DSP
:
22993 tcg_gen_movi_tl(t0
, ret
);
22994 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
22998 int shift
= (ctx
->opcode
>> 19) & 0x7F;
22999 int ac
= (ctx
->opcode
>> 11) & 0x03;
23000 tcg_gen_movi_tl(t0
, shift
);
23001 tcg_gen_movi_tl(t1
, ac
);
23002 gen_helper_dshilo(t0
, t1
, cpu_env
);
23007 int ac
= (ctx
->opcode
>> 11) & 0x03;
23008 tcg_gen_movi_tl(t0
, ac
);
23009 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23013 tcg_gen_movi_tl(t0
, v2
);
23014 tcg_gen_movi_tl(t1
, v1
);
23016 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23019 tcg_gen_movi_tl(t0
, v2
);
23020 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23023 tcg_gen_movi_tl(t0
, v2
);
23024 tcg_gen_movi_tl(t1
, v1
);
23025 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23028 tcg_gen_movi_tl(t0
, v2
);
23029 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23032 tcg_gen_movi_tl(t0
, v2
);
23033 tcg_gen_movi_tl(t1
, v1
);
23034 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23036 case OPC_DEXTR_R_L
:
23037 tcg_gen_movi_tl(t0
, v2
);
23038 tcg_gen_movi_tl(t1
, v1
);
23039 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23041 case OPC_DEXTR_RS_L
:
23042 tcg_gen_movi_tl(t0
, v2
);
23043 tcg_gen_movi_tl(t1
, v1
);
23044 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23047 tcg_gen_movi_tl(t0
, v2
);
23048 tcg_gen_movi_tl(t1
, v1
);
23049 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23051 case OPC_DEXTR_R_W
:
23052 tcg_gen_movi_tl(t0
, v2
);
23053 tcg_gen_movi_tl(t1
, v1
);
23054 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23056 case OPC_DEXTR_RS_W
:
23057 tcg_gen_movi_tl(t0
, v2
);
23058 tcg_gen_movi_tl(t1
, v1
);
23059 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23061 case OPC_DEXTR_S_H
:
23062 tcg_gen_movi_tl(t0
, v2
);
23063 tcg_gen_movi_tl(t1
, v1
);
23064 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23066 case OPC_DEXTRV_S_H
:
23067 tcg_gen_movi_tl(t0
, v2
);
23068 tcg_gen_movi_tl(t1
, v1
);
23069 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23072 tcg_gen_movi_tl(t0
, v2
);
23073 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23075 case OPC_DEXTRV_R_L
:
23076 tcg_gen_movi_tl(t0
, v2
);
23077 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23079 case OPC_DEXTRV_RS_L
:
23080 tcg_gen_movi_tl(t0
, v2
);
23081 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23084 tcg_gen_movi_tl(t0
, v2
);
23085 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23087 case OPC_DEXTRV_R_W
:
23088 tcg_gen_movi_tl(t0
, v2
);
23089 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23091 case OPC_DEXTRV_RS_W
:
23092 tcg_gen_movi_tl(t0
, v2
);
23093 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23102 tcg_temp_free(v1_t
);
23103 tcg_temp_free(v2_t
);
23106 /* End MIPSDSP functions. */
23108 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23110 int rs
, rt
, rd
, sa
;
23113 rs
= (ctx
->opcode
>> 21) & 0x1f;
23114 rt
= (ctx
->opcode
>> 16) & 0x1f;
23115 rd
= (ctx
->opcode
>> 11) & 0x1f;
23116 sa
= (ctx
->opcode
>> 6) & 0x1f;
23118 op1
= MASK_SPECIAL(ctx
->opcode
);
23121 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23127 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23137 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23140 MIPS_INVAL("special_r6 muldiv");
23141 generate_exception_end(ctx
, EXCP_RI
);
23147 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23151 if (rt
== 0 && sa
== 1) {
23152 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23153 We need additionally to check other fields */
23154 gen_cl(ctx
, op1
, rd
, rs
);
23156 generate_exception_end(ctx
, EXCP_RI
);
23160 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23161 gen_helper_do_semihosting(cpu_env
);
23163 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23164 generate_exception_end(ctx
, EXCP_RI
);
23166 generate_exception_end(ctx
, EXCP_DBp
);
23170 #if defined(TARGET_MIPS64)
23172 check_mips_64(ctx
);
23173 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23177 if (rt
== 0 && sa
== 1) {
23178 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23179 We need additionally to check other fields */
23180 check_mips_64(ctx
);
23181 gen_cl(ctx
, op1
, rd
, rs
);
23183 generate_exception_end(ctx
, EXCP_RI
);
23191 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23201 check_mips_64(ctx
);
23202 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23205 MIPS_INVAL("special_r6 muldiv");
23206 generate_exception_end(ctx
, EXCP_RI
);
23211 default: /* Invalid */
23212 MIPS_INVAL("special_r6");
23213 generate_exception_end(ctx
, EXCP_RI
);
23218 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23220 int rs
, rt
, rd
, sa
;
23223 rs
= (ctx
->opcode
>> 21) & 0x1f;
23224 rt
= (ctx
->opcode
>> 16) & 0x1f;
23225 rd
= (ctx
->opcode
>> 11) & 0x1f;
23226 sa
= (ctx
->opcode
>> 6) & 0x1f;
23228 op1
= MASK_SPECIAL(ctx
->opcode
);
23230 case OPC_MOVN
: /* Conditional move */
23232 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
23233 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
23234 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23236 case OPC_MFHI
: /* Move from HI/LO */
23238 gen_HILO(ctx
, op1
, rs
& 3, rd
);
23241 case OPC_MTLO
: /* Move to HI/LO */
23242 gen_HILO(ctx
, op1
, rd
& 3, rs
);
23245 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
23246 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
23247 check_cp1_enabled(ctx
);
23248 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
23249 (ctx
->opcode
>> 16) & 1);
23251 generate_exception_err(ctx
, EXCP_CpU
, 1);
23257 check_insn(ctx
, INSN_VR54XX
);
23258 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
23259 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
23261 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23266 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23268 #if defined(TARGET_MIPS64)
23273 check_insn(ctx
, ISA_MIPS3
);
23274 check_mips_64(ctx
);
23275 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23279 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23282 #ifdef MIPS_STRICT_STANDARD
23283 MIPS_INVAL("SPIM");
23284 generate_exception_end(ctx
, EXCP_RI
);
23286 /* Implemented as RI exception for now. */
23287 MIPS_INVAL("spim (unofficial)");
23288 generate_exception_end(ctx
, EXCP_RI
);
23291 default: /* Invalid */
23292 MIPS_INVAL("special_legacy");
23293 generate_exception_end(ctx
, EXCP_RI
);
23298 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
23300 int rs
, rt
, rd
, sa
;
23303 rs
= (ctx
->opcode
>> 21) & 0x1f;
23304 rt
= (ctx
->opcode
>> 16) & 0x1f;
23305 rd
= (ctx
->opcode
>> 11) & 0x1f;
23306 sa
= (ctx
->opcode
>> 6) & 0x1f;
23308 op1
= MASK_SPECIAL(ctx
->opcode
);
23310 case OPC_SLL
: /* Shift with immediate */
23311 if (sa
== 5 && rd
== 0 &&
23312 rs
== 0 && rt
== 0) { /* PAUSE */
23313 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
23314 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
23315 generate_exception_end(ctx
, EXCP_RI
);
23321 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23324 switch ((ctx
->opcode
>> 21) & 0x1f) {
23326 /* rotr is decoded as srl on non-R2 CPUs */
23327 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23332 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23335 generate_exception_end(ctx
, EXCP_RI
);
23343 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23345 case OPC_SLLV
: /* Shifts */
23347 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23350 switch ((ctx
->opcode
>> 6) & 0x1f) {
23352 /* rotrv is decoded as srlv on non-R2 CPUs */
23353 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23358 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23361 generate_exception_end(ctx
, EXCP_RI
);
23365 case OPC_SLT
: /* Set on less than */
23367 gen_slt(ctx
, op1
, rd
, rs
, rt
);
23369 case OPC_AND
: /* Logic*/
23373 gen_logic(ctx
, op1
, rd
, rs
, rt
);
23376 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
23378 case OPC_TGE
: /* Traps */
23384 check_insn(ctx
, ISA_MIPS2
);
23385 gen_trap(ctx
, op1
, rs
, rt
, -1);
23387 case OPC_LSA
: /* OPC_PMON */
23388 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23389 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23390 decode_opc_special_r6(env
, ctx
);
23392 /* Pmon entry point, also R4010 selsl */
23393 #ifdef MIPS_STRICT_STANDARD
23394 MIPS_INVAL("PMON / selsl");
23395 generate_exception_end(ctx
, EXCP_RI
);
23397 gen_helper_0e0i(pmon
, sa
);
23402 generate_exception_end(ctx
, EXCP_SYSCALL
);
23405 generate_exception_end(ctx
, EXCP_BREAK
);
23408 check_insn(ctx
, ISA_MIPS2
);
23409 gen_sync(extract32(ctx
->opcode
, 6, 5));
23412 #if defined(TARGET_MIPS64)
23413 /* MIPS64 specific opcodes */
23418 check_insn(ctx
, ISA_MIPS3
);
23419 check_mips_64(ctx
);
23420 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23423 switch ((ctx
->opcode
>> 21) & 0x1f) {
23425 /* drotr is decoded as dsrl on non-R2 CPUs */
23426 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23431 check_insn(ctx
, ISA_MIPS3
);
23432 check_mips_64(ctx
);
23433 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23436 generate_exception_end(ctx
, EXCP_RI
);
23441 switch ((ctx
->opcode
>> 21) & 0x1f) {
23443 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
23444 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23449 check_insn(ctx
, ISA_MIPS3
);
23450 check_mips_64(ctx
);
23451 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
23454 generate_exception_end(ctx
, EXCP_RI
);
23462 check_insn(ctx
, ISA_MIPS3
);
23463 check_mips_64(ctx
);
23464 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23468 check_insn(ctx
, ISA_MIPS3
);
23469 check_mips_64(ctx
);
23470 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23473 switch ((ctx
->opcode
>> 6) & 0x1f) {
23475 /* drotrv is decoded as dsrlv on non-R2 CPUs */
23476 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
23481 check_insn(ctx
, ISA_MIPS3
);
23482 check_mips_64(ctx
);
23483 gen_shift(ctx
, op1
, rd
, rs
, rt
);
23486 generate_exception_end(ctx
, EXCP_RI
);
23491 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
23492 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
23493 decode_opc_special_r6(env
, ctx
);
23498 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23499 decode_opc_special_r6(env
, ctx
);
23501 decode_opc_special_legacy(env
, ctx
);
23506 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23513 rs
= (ctx
->opcode
>> 21) & 0x1f;
23514 rt
= (ctx
->opcode
>> 16) & 0x1f;
23515 rd
= (ctx
->opcode
>> 11) & 0x1f;
23517 op1
= MASK_SPECIAL2(ctx
->opcode
);
23519 case OPC_MADD
: /* Multiply and add/sub */
23523 check_insn(ctx
, ISA_MIPS32
);
23524 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
23527 gen_arith(ctx
, op1
, rd
, rs
, rt
);
23530 case OPC_DIVU_G_2F
:
23531 case OPC_MULT_G_2F
:
23532 case OPC_MULTU_G_2F
:
23534 case OPC_MODU_G_2F
:
23535 check_insn(ctx
, INSN_LOONGSON2F
);
23536 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23540 check_insn(ctx
, ISA_MIPS32
);
23541 gen_cl(ctx
, op1
, rd
, rs
);
23544 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23545 gen_helper_do_semihosting(cpu_env
);
23547 /* XXX: not clear which exception should be raised
23548 * when in debug mode...
23550 check_insn(ctx
, ISA_MIPS32
);
23551 generate_exception_end(ctx
, EXCP_DBp
);
23554 #if defined(TARGET_MIPS64)
23557 check_insn(ctx
, ISA_MIPS64
);
23558 check_mips_64(ctx
);
23559 gen_cl(ctx
, op1
, rd
, rs
);
23561 case OPC_DMULT_G_2F
:
23562 case OPC_DMULTU_G_2F
:
23563 case OPC_DDIV_G_2F
:
23564 case OPC_DDIVU_G_2F
:
23565 case OPC_DMOD_G_2F
:
23566 case OPC_DMODU_G_2F
:
23567 check_insn(ctx
, INSN_LOONGSON2F
);
23568 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23571 default: /* Invalid */
23572 MIPS_INVAL("special2_legacy");
23573 generate_exception_end(ctx
, EXCP_RI
);
23578 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23580 int rs
, rt
, rd
, sa
;
23584 rs
= (ctx
->opcode
>> 21) & 0x1f;
23585 rt
= (ctx
->opcode
>> 16) & 0x1f;
23586 rd
= (ctx
->opcode
>> 11) & 0x1f;
23587 sa
= (ctx
->opcode
>> 6) & 0x1f;
23588 imm
= (int16_t)ctx
->opcode
>> 7;
23590 op1
= MASK_SPECIAL3(ctx
->opcode
);
23594 /* hint codes 24-31 are reserved and signal RI */
23595 generate_exception_end(ctx
, EXCP_RI
);
23597 /* Treat as NOP. */
23600 check_cp0_enabled(ctx
);
23601 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23602 gen_cache_operation(ctx
, rt
, rs
, imm
);
23606 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23609 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23614 /* Treat as NOP. */
23617 op2
= MASK_BSHFL(ctx
->opcode
);
23620 case OPC_ALIGN_END
:
23621 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
23624 gen_bitswap(ctx
, op2
, rd
, rt
);
23629 #if defined(TARGET_MIPS64)
23631 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23634 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23637 check_mips_64(ctx
);
23640 /* Treat as NOP. */
23643 op2
= MASK_DBSHFL(ctx
->opcode
);
23646 case OPC_DALIGN_END
:
23647 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
23650 gen_bitswap(ctx
, op2
, rd
, rt
);
23657 default: /* Invalid */
23658 MIPS_INVAL("special3_r6");
23659 generate_exception_end(ctx
, EXCP_RI
);
23664 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
23669 rs
= (ctx
->opcode
>> 21) & 0x1f;
23670 rt
= (ctx
->opcode
>> 16) & 0x1f;
23671 rd
= (ctx
->opcode
>> 11) & 0x1f;
23673 op1
= MASK_SPECIAL3(ctx
->opcode
);
23676 case OPC_DIVU_G_2E
:
23678 case OPC_MODU_G_2E
:
23679 case OPC_MULT_G_2E
:
23680 case OPC_MULTU_G_2E
:
23681 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23682 * the same mask and op1. */
23683 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
23684 op2
= MASK_ADDUH_QB(ctx
->opcode
);
23687 case OPC_ADDUH_R_QB
:
23689 case OPC_ADDQH_R_PH
:
23691 case OPC_ADDQH_R_W
:
23693 case OPC_SUBUH_R_QB
:
23695 case OPC_SUBQH_R_PH
:
23697 case OPC_SUBQH_R_W
:
23698 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23703 case OPC_MULQ_RS_W
:
23704 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23707 MIPS_INVAL("MASK ADDUH.QB");
23708 generate_exception_end(ctx
, EXCP_RI
);
23711 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
23712 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23714 generate_exception_end(ctx
, EXCP_RI
);
23718 op2
= MASK_LX(ctx
->opcode
);
23720 #if defined(TARGET_MIPS64)
23726 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
23728 default: /* Invalid */
23729 MIPS_INVAL("MASK LX");
23730 generate_exception_end(ctx
, EXCP_RI
);
23734 case OPC_ABSQ_S_PH_DSP
:
23735 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
23737 case OPC_ABSQ_S_QB
:
23738 case OPC_ABSQ_S_PH
:
23740 case OPC_PRECEQ_W_PHL
:
23741 case OPC_PRECEQ_W_PHR
:
23742 case OPC_PRECEQU_PH_QBL
:
23743 case OPC_PRECEQU_PH_QBR
:
23744 case OPC_PRECEQU_PH_QBLA
:
23745 case OPC_PRECEQU_PH_QBRA
:
23746 case OPC_PRECEU_PH_QBL
:
23747 case OPC_PRECEU_PH_QBR
:
23748 case OPC_PRECEU_PH_QBLA
:
23749 case OPC_PRECEU_PH_QBRA
:
23750 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23757 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
23760 MIPS_INVAL("MASK ABSQ_S.PH");
23761 generate_exception_end(ctx
, EXCP_RI
);
23765 case OPC_ADDU_QB_DSP
:
23766 op2
= MASK_ADDU_QB(ctx
->opcode
);
23769 case OPC_ADDQ_S_PH
:
23772 case OPC_ADDU_S_QB
:
23774 case OPC_ADDU_S_PH
:
23776 case OPC_SUBQ_S_PH
:
23779 case OPC_SUBU_S_QB
:
23781 case OPC_SUBU_S_PH
:
23785 case OPC_RADDU_W_QB
:
23786 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23788 case OPC_MULEU_S_PH_QBL
:
23789 case OPC_MULEU_S_PH_QBR
:
23790 case OPC_MULQ_RS_PH
:
23791 case OPC_MULEQ_S_W_PHL
:
23792 case OPC_MULEQ_S_W_PHR
:
23793 case OPC_MULQ_S_PH
:
23794 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23796 default: /* Invalid */
23797 MIPS_INVAL("MASK ADDU.QB");
23798 generate_exception_end(ctx
, EXCP_RI
);
23803 case OPC_CMPU_EQ_QB_DSP
:
23804 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
23806 case OPC_PRECR_SRA_PH_W
:
23807 case OPC_PRECR_SRA_R_PH_W
:
23808 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
23810 case OPC_PRECR_QB_PH
:
23811 case OPC_PRECRQ_QB_PH
:
23812 case OPC_PRECRQ_PH_W
:
23813 case OPC_PRECRQ_RS_PH_W
:
23814 case OPC_PRECRQU_S_QB_PH
:
23815 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23817 case OPC_CMPU_EQ_QB
:
23818 case OPC_CMPU_LT_QB
:
23819 case OPC_CMPU_LE_QB
:
23820 case OPC_CMP_EQ_PH
:
23821 case OPC_CMP_LT_PH
:
23822 case OPC_CMP_LE_PH
:
23823 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23825 case OPC_CMPGU_EQ_QB
:
23826 case OPC_CMPGU_LT_QB
:
23827 case OPC_CMPGU_LE_QB
:
23828 case OPC_CMPGDU_EQ_QB
:
23829 case OPC_CMPGDU_LT_QB
:
23830 case OPC_CMPGDU_LE_QB
:
23833 case OPC_PACKRL_PH
:
23834 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23836 default: /* Invalid */
23837 MIPS_INVAL("MASK CMPU.EQ.QB");
23838 generate_exception_end(ctx
, EXCP_RI
);
23842 case OPC_SHLL_QB_DSP
:
23843 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
23845 case OPC_DPA_W_PH_DSP
:
23846 op2
= MASK_DPA_W_PH(ctx
->opcode
);
23848 case OPC_DPAU_H_QBL
:
23849 case OPC_DPAU_H_QBR
:
23850 case OPC_DPSU_H_QBL
:
23851 case OPC_DPSU_H_QBR
:
23853 case OPC_DPAX_W_PH
:
23854 case OPC_DPAQ_S_W_PH
:
23855 case OPC_DPAQX_S_W_PH
:
23856 case OPC_DPAQX_SA_W_PH
:
23858 case OPC_DPSX_W_PH
:
23859 case OPC_DPSQ_S_W_PH
:
23860 case OPC_DPSQX_S_W_PH
:
23861 case OPC_DPSQX_SA_W_PH
:
23862 case OPC_MULSAQ_S_W_PH
:
23863 case OPC_DPAQ_SA_L_W
:
23864 case OPC_DPSQ_SA_L_W
:
23865 case OPC_MAQ_S_W_PHL
:
23866 case OPC_MAQ_S_W_PHR
:
23867 case OPC_MAQ_SA_W_PHL
:
23868 case OPC_MAQ_SA_W_PHR
:
23869 case OPC_MULSA_W_PH
:
23870 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23872 default: /* Invalid */
23873 MIPS_INVAL("MASK DPAW.PH");
23874 generate_exception_end(ctx
, EXCP_RI
);
23879 op2
= MASK_INSV(ctx
->opcode
);
23890 t0
= tcg_temp_new();
23891 t1
= tcg_temp_new();
23893 gen_load_gpr(t0
, rt
);
23894 gen_load_gpr(t1
, rs
);
23896 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
23902 default: /* Invalid */
23903 MIPS_INVAL("MASK INSV");
23904 generate_exception_end(ctx
, EXCP_RI
);
23908 case OPC_APPEND_DSP
:
23909 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
23911 case OPC_EXTR_W_DSP
:
23912 op2
= MASK_EXTR_W(ctx
->opcode
);
23916 case OPC_EXTR_RS_W
:
23918 case OPC_EXTRV_S_H
:
23920 case OPC_EXTRV_R_W
:
23921 case OPC_EXTRV_RS_W
:
23926 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
23929 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
23935 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23937 default: /* Invalid */
23938 MIPS_INVAL("MASK EXTR.W");
23939 generate_exception_end(ctx
, EXCP_RI
);
23943 #if defined(TARGET_MIPS64)
23944 case OPC_DDIV_G_2E
:
23945 case OPC_DDIVU_G_2E
:
23946 case OPC_DMULT_G_2E
:
23947 case OPC_DMULTU_G_2E
:
23948 case OPC_DMOD_G_2E
:
23949 case OPC_DMODU_G_2E
:
23950 check_insn(ctx
, INSN_LOONGSON2E
);
23951 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
23953 case OPC_ABSQ_S_QH_DSP
:
23954 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
23956 case OPC_PRECEQ_L_PWL
:
23957 case OPC_PRECEQ_L_PWR
:
23958 case OPC_PRECEQ_PW_QHL
:
23959 case OPC_PRECEQ_PW_QHR
:
23960 case OPC_PRECEQ_PW_QHLA
:
23961 case OPC_PRECEQ_PW_QHRA
:
23962 case OPC_PRECEQU_QH_OBL
:
23963 case OPC_PRECEQU_QH_OBR
:
23964 case OPC_PRECEQU_QH_OBLA
:
23965 case OPC_PRECEQU_QH_OBRA
:
23966 case OPC_PRECEU_QH_OBL
:
23967 case OPC_PRECEU_QH_OBR
:
23968 case OPC_PRECEU_QH_OBLA
:
23969 case OPC_PRECEU_QH_OBRA
:
23970 case OPC_ABSQ_S_OB
:
23971 case OPC_ABSQ_S_PW
:
23972 case OPC_ABSQ_S_QH
:
23973 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
23981 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
23983 default: /* Invalid */
23984 MIPS_INVAL("MASK ABSQ_S.QH");
23985 generate_exception_end(ctx
, EXCP_RI
);
23989 case OPC_ADDU_OB_DSP
:
23990 op2
= MASK_ADDU_OB(ctx
->opcode
);
23992 case OPC_RADDU_L_OB
:
23994 case OPC_SUBQ_S_PW
:
23996 case OPC_SUBQ_S_QH
:
23998 case OPC_SUBU_S_OB
:
24000 case OPC_SUBU_S_QH
:
24002 case OPC_SUBUH_R_OB
:
24004 case OPC_ADDQ_S_PW
:
24006 case OPC_ADDQ_S_QH
:
24008 case OPC_ADDU_S_OB
:
24010 case OPC_ADDU_S_QH
:
24012 case OPC_ADDUH_R_OB
:
24013 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24015 case OPC_MULEQ_S_PW_QHL
:
24016 case OPC_MULEQ_S_PW_QHR
:
24017 case OPC_MULEU_S_QH_OBL
:
24018 case OPC_MULEU_S_QH_OBR
:
24019 case OPC_MULQ_RS_QH
:
24020 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24022 default: /* Invalid */
24023 MIPS_INVAL("MASK ADDU.OB");
24024 generate_exception_end(ctx
, EXCP_RI
);
24028 case OPC_CMPU_EQ_OB_DSP
:
24029 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
24031 case OPC_PRECR_SRA_QH_PW
:
24032 case OPC_PRECR_SRA_R_QH_PW
:
24033 /* Return value is rt. */
24034 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
24036 case OPC_PRECR_OB_QH
:
24037 case OPC_PRECRQ_OB_QH
:
24038 case OPC_PRECRQ_PW_L
:
24039 case OPC_PRECRQ_QH_PW
:
24040 case OPC_PRECRQ_RS_QH_PW
:
24041 case OPC_PRECRQU_S_OB_QH
:
24042 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
24044 case OPC_CMPU_EQ_OB
:
24045 case OPC_CMPU_LT_OB
:
24046 case OPC_CMPU_LE_OB
:
24047 case OPC_CMP_EQ_QH
:
24048 case OPC_CMP_LT_QH
:
24049 case OPC_CMP_LE_QH
:
24050 case OPC_CMP_EQ_PW
:
24051 case OPC_CMP_LT_PW
:
24052 case OPC_CMP_LE_PW
:
24053 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24055 case OPC_CMPGDU_EQ_OB
:
24056 case OPC_CMPGDU_LT_OB
:
24057 case OPC_CMPGDU_LE_OB
:
24058 case OPC_CMPGU_EQ_OB
:
24059 case OPC_CMPGU_LT_OB
:
24060 case OPC_CMPGU_LE_OB
:
24061 case OPC_PACKRL_PW
:
24065 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
24067 default: /* Invalid */
24068 MIPS_INVAL("MASK CMPU_EQ.OB");
24069 generate_exception_end(ctx
, EXCP_RI
);
24073 case OPC_DAPPEND_DSP
:
24074 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
24076 case OPC_DEXTR_W_DSP
:
24077 op2
= MASK_DEXTR_W(ctx
->opcode
);
24084 case OPC_DEXTR_R_L
:
24085 case OPC_DEXTR_RS_L
:
24087 case OPC_DEXTR_R_W
:
24088 case OPC_DEXTR_RS_W
:
24089 case OPC_DEXTR_S_H
:
24091 case OPC_DEXTRV_R_L
:
24092 case OPC_DEXTRV_RS_L
:
24093 case OPC_DEXTRV_S_H
:
24095 case OPC_DEXTRV_R_W
:
24096 case OPC_DEXTRV_RS_W
:
24097 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
24102 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24104 default: /* Invalid */
24105 MIPS_INVAL("MASK EXTR.W");
24106 generate_exception_end(ctx
, EXCP_RI
);
24110 case OPC_DPAQ_W_QH_DSP
:
24111 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
24113 case OPC_DPAU_H_OBL
:
24114 case OPC_DPAU_H_OBR
:
24115 case OPC_DPSU_H_OBL
:
24116 case OPC_DPSU_H_OBR
:
24118 case OPC_DPAQ_S_W_QH
:
24120 case OPC_DPSQ_S_W_QH
:
24121 case OPC_MULSAQ_S_W_QH
:
24122 case OPC_DPAQ_SA_L_PW
:
24123 case OPC_DPSQ_SA_L_PW
:
24124 case OPC_MULSAQ_S_L_PW
:
24125 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24127 case OPC_MAQ_S_W_QHLL
:
24128 case OPC_MAQ_S_W_QHLR
:
24129 case OPC_MAQ_S_W_QHRL
:
24130 case OPC_MAQ_S_W_QHRR
:
24131 case OPC_MAQ_SA_W_QHLL
:
24132 case OPC_MAQ_SA_W_QHLR
:
24133 case OPC_MAQ_SA_W_QHRL
:
24134 case OPC_MAQ_SA_W_QHRR
:
24135 case OPC_MAQ_S_L_PWL
:
24136 case OPC_MAQ_S_L_PWR
:
24141 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
24143 default: /* Invalid */
24144 MIPS_INVAL("MASK DPAQ.W.QH");
24145 generate_exception_end(ctx
, EXCP_RI
);
24149 case OPC_DINSV_DSP
:
24150 op2
= MASK_INSV(ctx
->opcode
);
24161 t0
= tcg_temp_new();
24162 t1
= tcg_temp_new();
24164 gen_load_gpr(t0
, rt
);
24165 gen_load_gpr(t1
, rs
);
24167 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
24173 default: /* Invalid */
24174 MIPS_INVAL("MASK DINSV");
24175 generate_exception_end(ctx
, EXCP_RI
);
24179 case OPC_SHLL_OB_DSP
:
24180 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
24183 default: /* Invalid */
24184 MIPS_INVAL("special3_legacy");
24185 generate_exception_end(ctx
, EXCP_RI
);
24190 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
24192 int rs
, rt
, rd
, sa
;
24196 rs
= (ctx
->opcode
>> 21) & 0x1f;
24197 rt
= (ctx
->opcode
>> 16) & 0x1f;
24198 rd
= (ctx
->opcode
>> 11) & 0x1f;
24199 sa
= (ctx
->opcode
>> 6) & 0x1f;
24200 imm
= sextract32(ctx
->opcode
, 7, 9);
24202 op1
= MASK_SPECIAL3(ctx
->opcode
);
24205 * EVA loads and stores overlap Loongson 2E instructions decoded by
24206 * decode_opc_special3_legacy(), so be careful to allow their decoding when
24213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24221 check_cp0_enabled(ctx
);
24222 gen_ld(ctx
, op1
, rt
, rs
, imm
);
24226 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24231 check_cp0_enabled(ctx
);
24232 gen_st(ctx
, op1
, rt
, rs
, imm
);
24235 check_cp0_enabled(ctx
);
24236 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
24239 check_cp0_enabled(ctx
);
24240 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24241 gen_cache_operation(ctx
, rt
, rs
, imm
);
24243 /* Treat as NOP. */
24246 check_cp0_enabled(ctx
);
24247 /* Treat as NOP. */
24255 check_insn(ctx
, ISA_MIPS32R2
);
24256 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24259 op2
= MASK_BSHFL(ctx
->opcode
);
24262 case OPC_ALIGN_END
:
24264 check_insn(ctx
, ISA_MIPS32R6
);
24265 decode_opc_special3_r6(env
, ctx
);
24268 check_insn(ctx
, ISA_MIPS32R2
);
24269 gen_bshfl(ctx
, op2
, rt
, rd
);
24273 #if defined(TARGET_MIPS64)
24280 check_insn(ctx
, ISA_MIPS64R2
);
24281 check_mips_64(ctx
);
24282 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
24285 op2
= MASK_DBSHFL(ctx
->opcode
);
24288 case OPC_DALIGN_END
:
24290 check_insn(ctx
, ISA_MIPS32R6
);
24291 decode_opc_special3_r6(env
, ctx
);
24294 check_insn(ctx
, ISA_MIPS64R2
);
24295 check_mips_64(ctx
);
24296 op2
= MASK_DBSHFL(ctx
->opcode
);
24297 gen_bshfl(ctx
, op2
, rt
, rd
);
24303 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
24308 TCGv t0
= tcg_temp_new();
24309 TCGv t1
= tcg_temp_new();
24311 gen_load_gpr(t0
, rt
);
24312 gen_load_gpr(t1
, rs
);
24313 gen_helper_fork(t0
, t1
);
24321 TCGv t0
= tcg_temp_new();
24323 gen_load_gpr(t0
, rs
);
24324 gen_helper_yield(t0
, cpu_env
, t0
);
24325 gen_store_gpr(t0
, rd
);
24330 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24331 decode_opc_special3_r6(env
, ctx
);
24333 decode_opc_special3_legacy(env
, ctx
);
24338 /* MIPS SIMD Architecture (MSA) */
24339 static inline int check_msa_access(DisasContext
*ctx
)
24341 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
24342 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
24343 generate_exception_end(ctx
, EXCP_RI
);
24347 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
24348 if (ctx
->insn_flags
& ASE_MSA
) {
24349 generate_exception_end(ctx
, EXCP_MSADIS
);
24352 generate_exception_end(ctx
, EXCP_RI
);
24359 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
24361 /* generates tcg ops to check if any element is 0 */
24362 /* Note this function only works with MSA_WRLEN = 128 */
24363 uint64_t eval_zero_or_big
= 0;
24364 uint64_t eval_big
= 0;
24365 TCGv_i64 t0
= tcg_temp_new_i64();
24366 TCGv_i64 t1
= tcg_temp_new_i64();
24369 eval_zero_or_big
= 0x0101010101010101ULL
;
24370 eval_big
= 0x8080808080808080ULL
;
24373 eval_zero_or_big
= 0x0001000100010001ULL
;
24374 eval_big
= 0x8000800080008000ULL
;
24377 eval_zero_or_big
= 0x0000000100000001ULL
;
24378 eval_big
= 0x8000000080000000ULL
;
24381 eval_zero_or_big
= 0x0000000000000001ULL
;
24382 eval_big
= 0x8000000000000000ULL
;
24385 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
24386 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
24387 tcg_gen_andi_i64(t0
, t0
, eval_big
);
24388 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
24389 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
24390 tcg_gen_andi_i64(t1
, t1
, eval_big
);
24391 tcg_gen_or_i64(t0
, t0
, t1
);
24392 /* if all bits are zero then all elements are not zero */
24393 /* if some bit is non-zero then some element is zero */
24394 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
24395 tcg_gen_trunc_i64_tl(tresult
, t0
);
24396 tcg_temp_free_i64(t0
);
24397 tcg_temp_free_i64(t1
);
24400 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
24402 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
24403 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24404 int64_t s16
= (int16_t)ctx
->opcode
;
24406 check_msa_access(ctx
);
24408 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
24409 generate_exception_end(ctx
, EXCP_RI
);
24416 TCGv_i64 t0
= tcg_temp_new_i64();
24417 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
24418 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
24419 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
24420 tcg_gen_trunc_i64_tl(bcond
, t0
);
24421 tcg_temp_free_i64(t0
);
24428 gen_check_zero_element(bcond
, df
, wt
);
24434 gen_check_zero_element(bcond
, df
, wt
);
24435 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
24439 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
24441 ctx
->hflags
|= MIPS_HFLAG_BC
;
24442 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
24445 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
24447 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
24448 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
24449 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24450 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24452 TCGv_i32 twd
= tcg_const_i32(wd
);
24453 TCGv_i32 tws
= tcg_const_i32(ws
);
24454 TCGv_i32 ti8
= tcg_const_i32(i8
);
24456 switch (MASK_MSA_I8(ctx
->opcode
)) {
24458 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
24461 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
24464 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
24467 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
24470 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
24473 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
24476 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
24482 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
24483 if (df
== DF_DOUBLE
) {
24484 generate_exception_end(ctx
, EXCP_RI
);
24486 TCGv_i32 tdf
= tcg_const_i32(df
);
24487 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
24488 tcg_temp_free_i32(tdf
);
24493 MIPS_INVAL("MSA instruction");
24494 generate_exception_end(ctx
, EXCP_RI
);
24498 tcg_temp_free_i32(twd
);
24499 tcg_temp_free_i32(tws
);
24500 tcg_temp_free_i32(ti8
);
24503 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
24505 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
24506 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
24507 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
24508 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
24509 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24510 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24512 TCGv_i32 tdf
= tcg_const_i32(df
);
24513 TCGv_i32 twd
= tcg_const_i32(wd
);
24514 TCGv_i32 tws
= tcg_const_i32(ws
);
24515 TCGv_i32 timm
= tcg_temp_new_i32();
24516 tcg_gen_movi_i32(timm
, u5
);
24518 switch (MASK_MSA_I5(ctx
->opcode
)) {
24520 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
24523 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
24525 case OPC_MAXI_S_df
:
24526 tcg_gen_movi_i32(timm
, s5
);
24527 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
24529 case OPC_MAXI_U_df
:
24530 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
24532 case OPC_MINI_S_df
:
24533 tcg_gen_movi_i32(timm
, s5
);
24534 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
24536 case OPC_MINI_U_df
:
24537 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
24540 tcg_gen_movi_i32(timm
, s5
);
24541 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
24543 case OPC_CLTI_S_df
:
24544 tcg_gen_movi_i32(timm
, s5
);
24545 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
24547 case OPC_CLTI_U_df
:
24548 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
24550 case OPC_CLEI_S_df
:
24551 tcg_gen_movi_i32(timm
, s5
);
24552 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
24554 case OPC_CLEI_U_df
:
24555 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
24559 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
24560 tcg_gen_movi_i32(timm
, s10
);
24561 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
24565 MIPS_INVAL("MSA instruction");
24566 generate_exception_end(ctx
, EXCP_RI
);
24570 tcg_temp_free_i32(tdf
);
24571 tcg_temp_free_i32(twd
);
24572 tcg_temp_free_i32(tws
);
24573 tcg_temp_free_i32(timm
);
24576 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
24578 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
24579 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
24580 uint32_t df
= 0, m
= 0;
24581 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24582 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24589 if ((dfm
& 0x40) == 0x00) {
24592 } else if ((dfm
& 0x60) == 0x40) {
24595 } else if ((dfm
& 0x70) == 0x60) {
24598 } else if ((dfm
& 0x78) == 0x70) {
24602 generate_exception_end(ctx
, EXCP_RI
);
24606 tdf
= tcg_const_i32(df
);
24607 tm
= tcg_const_i32(m
);
24608 twd
= tcg_const_i32(wd
);
24609 tws
= tcg_const_i32(ws
);
24611 switch (MASK_MSA_BIT(ctx
->opcode
)) {
24613 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
24616 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
24619 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
24622 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
24625 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
24628 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
24630 case OPC_BINSLI_df
:
24631 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
24633 case OPC_BINSRI_df
:
24634 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
24637 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
24640 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
24643 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
24646 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
24649 MIPS_INVAL("MSA instruction");
24650 generate_exception_end(ctx
, EXCP_RI
);
24654 tcg_temp_free_i32(tdf
);
24655 tcg_temp_free_i32(tm
);
24656 tcg_temp_free_i32(twd
);
24657 tcg_temp_free_i32(tws
);
24660 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
24662 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
24663 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
24664 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24665 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24666 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24668 TCGv_i32 tdf
= tcg_const_i32(df
);
24669 TCGv_i32 twd
= tcg_const_i32(wd
);
24670 TCGv_i32 tws
= tcg_const_i32(ws
);
24671 TCGv_i32 twt
= tcg_const_i32(wt
);
24673 switch (MASK_MSA_3R(ctx
->opcode
)) {
24675 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
24678 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24681 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
24684 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24686 case OPC_SUBS_S_df
:
24687 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24690 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24693 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
24696 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
24699 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
24702 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24704 case OPC_ADDS_A_df
:
24705 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24707 case OPC_SUBS_U_df
:
24708 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24711 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24714 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
24717 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
24720 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
24723 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24726 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24728 case OPC_ADDS_S_df
:
24729 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24731 case OPC_SUBSUS_U_df
:
24732 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24735 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
24738 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
24741 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
24744 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
24747 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24750 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24752 case OPC_ADDS_U_df
:
24753 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24755 case OPC_SUBSUU_S_df
:
24756 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24759 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
24762 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
24765 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24768 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24771 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24773 case OPC_ASUB_S_df
:
24774 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24777 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24780 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
24783 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
24786 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24789 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24792 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24794 case OPC_ASUB_U_df
:
24795 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24798 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24801 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
24804 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
24807 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24809 case OPC_AVER_S_df
:
24810 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24813 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24816 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
24819 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
24822 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24824 case OPC_AVER_U_df
:
24825 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24828 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24831 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
24834 case OPC_DOTP_S_df
:
24835 case OPC_DOTP_U_df
:
24836 case OPC_DPADD_S_df
:
24837 case OPC_DPADD_U_df
:
24838 case OPC_DPSUB_S_df
:
24839 case OPC_HADD_S_df
:
24840 case OPC_DPSUB_U_df
:
24841 case OPC_HADD_U_df
:
24842 case OPC_HSUB_S_df
:
24843 case OPC_HSUB_U_df
:
24844 if (df
== DF_BYTE
) {
24845 generate_exception_end(ctx
, EXCP_RI
);
24848 switch (MASK_MSA_3R(ctx
->opcode
)) {
24849 case OPC_DOTP_S_df
:
24850 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24852 case OPC_DOTP_U_df
:
24853 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24855 case OPC_DPADD_S_df
:
24856 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24858 case OPC_DPADD_U_df
:
24859 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24861 case OPC_DPSUB_S_df
:
24862 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24864 case OPC_HADD_S_df
:
24865 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24867 case OPC_DPSUB_U_df
:
24868 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24870 case OPC_HADD_U_df
:
24871 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24873 case OPC_HSUB_S_df
:
24874 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
24876 case OPC_HSUB_U_df
:
24877 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
24882 MIPS_INVAL("MSA instruction");
24883 generate_exception_end(ctx
, EXCP_RI
);
24886 tcg_temp_free_i32(twd
);
24887 tcg_temp_free_i32(tws
);
24888 tcg_temp_free_i32(twt
);
24889 tcg_temp_free_i32(tdf
);
24892 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
24894 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
24895 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
24896 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
24897 TCGv telm
= tcg_temp_new();
24898 TCGv_i32 tsr
= tcg_const_i32(source
);
24899 TCGv_i32 tdt
= tcg_const_i32(dest
);
24901 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
24903 gen_load_gpr(telm
, source
);
24904 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
24907 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
24908 gen_store_gpr(telm
, dest
);
24911 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
24914 MIPS_INVAL("MSA instruction");
24915 generate_exception_end(ctx
, EXCP_RI
);
24919 tcg_temp_free(telm
);
24920 tcg_temp_free_i32(tdt
);
24921 tcg_temp_free_i32(tsr
);
24924 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
24927 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
24928 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24929 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24931 TCGv_i32 tws
= tcg_const_i32(ws
);
24932 TCGv_i32 twd
= tcg_const_i32(wd
);
24933 TCGv_i32 tn
= tcg_const_i32(n
);
24934 TCGv_i32 tdf
= tcg_const_i32(df
);
24936 switch (MASK_MSA_ELM(ctx
->opcode
)) {
24938 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
24940 case OPC_SPLATI_df
:
24941 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
24944 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
24946 case OPC_COPY_S_df
:
24947 case OPC_COPY_U_df
:
24948 case OPC_INSERT_df
:
24949 #if !defined(TARGET_MIPS64)
24950 /* Double format valid only for MIPS64 */
24951 if (df
== DF_DOUBLE
) {
24952 generate_exception_end(ctx
, EXCP_RI
);
24956 switch (MASK_MSA_ELM(ctx
->opcode
)) {
24957 case OPC_COPY_S_df
:
24958 if (likely(wd
!= 0)) {
24959 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
24962 case OPC_COPY_U_df
:
24963 if (likely(wd
!= 0)) {
24964 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
24967 case OPC_INSERT_df
:
24968 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
24973 MIPS_INVAL("MSA instruction");
24974 generate_exception_end(ctx
, EXCP_RI
);
24976 tcg_temp_free_i32(twd
);
24977 tcg_temp_free_i32(tws
);
24978 tcg_temp_free_i32(tn
);
24979 tcg_temp_free_i32(tdf
);
24982 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
24984 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
24985 uint32_t df
= 0, n
= 0;
24987 if ((dfn
& 0x30) == 0x00) {
24990 } else if ((dfn
& 0x38) == 0x20) {
24993 } else if ((dfn
& 0x3c) == 0x30) {
24996 } else if ((dfn
& 0x3e) == 0x38) {
24999 } else if (dfn
== 0x3E) {
25000 /* CTCMSA, CFCMSA, MOVE.V */
25001 gen_msa_elm_3e(env
, ctx
);
25004 generate_exception_end(ctx
, EXCP_RI
);
25008 gen_msa_elm_df(env
, ctx
, df
, n
);
25011 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25013 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
25014 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
25015 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25016 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25017 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25019 TCGv_i32 twd
= tcg_const_i32(wd
);
25020 TCGv_i32 tws
= tcg_const_i32(ws
);
25021 TCGv_i32 twt
= tcg_const_i32(wt
);
25022 TCGv_i32 tdf
= tcg_temp_new_i32();
25024 /* adjust df value for floating-point instruction */
25025 tcg_gen_movi_i32(tdf
, df
+ 2);
25027 switch (MASK_MSA_3RF(ctx
->opcode
)) {
25029 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25032 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25035 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25038 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25041 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25044 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25047 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
25050 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25053 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25056 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
25059 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25062 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25065 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
25068 tcg_gen_movi_i32(tdf
, df
+ 1);
25069 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25072 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25075 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
25077 case OPC_MADD_Q_df
:
25078 tcg_gen_movi_i32(tdf
, df
+ 1);
25079 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25082 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25084 case OPC_MSUB_Q_df
:
25085 tcg_gen_movi_i32(tdf
, df
+ 1);
25086 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25089 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25092 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
25095 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
25098 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
25101 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
25104 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
25107 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25110 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25113 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
25116 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
25119 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
25122 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
25125 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
25127 case OPC_MULR_Q_df
:
25128 tcg_gen_movi_i32(tdf
, df
+ 1);
25129 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25132 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
25134 case OPC_FMIN_A_df
:
25135 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25137 case OPC_MADDR_Q_df
:
25138 tcg_gen_movi_i32(tdf
, df
+ 1);
25139 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25142 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
25145 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
25147 case OPC_MSUBR_Q_df
:
25148 tcg_gen_movi_i32(tdf
, df
+ 1);
25149 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
25152 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
25154 case OPC_FMAX_A_df
:
25155 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
25158 MIPS_INVAL("MSA instruction");
25159 generate_exception_end(ctx
, EXCP_RI
);
25163 tcg_temp_free_i32(twd
);
25164 tcg_temp_free_i32(tws
);
25165 tcg_temp_free_i32(twt
);
25166 tcg_temp_free_i32(tdf
);
25169 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
25171 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25172 (op & (0x7 << 18)))
25173 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25174 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25175 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25176 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
25177 TCGv_i32 twd
= tcg_const_i32(wd
);
25178 TCGv_i32 tws
= tcg_const_i32(ws
);
25179 TCGv_i32 twt
= tcg_const_i32(wt
);
25180 TCGv_i32 tdf
= tcg_const_i32(df
);
25182 switch (MASK_MSA_2R(ctx
->opcode
)) {
25184 #if !defined(TARGET_MIPS64)
25185 /* Double format valid only for MIPS64 */
25186 if (df
== DF_DOUBLE
) {
25187 generate_exception_end(ctx
, EXCP_RI
);
25191 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
25194 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
25197 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
25200 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
25203 MIPS_INVAL("MSA instruction");
25204 generate_exception_end(ctx
, EXCP_RI
);
25208 tcg_temp_free_i32(twd
);
25209 tcg_temp_free_i32(tws
);
25210 tcg_temp_free_i32(twt
);
25211 tcg_temp_free_i32(tdf
);
25214 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
25216 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
25217 (op & (0xf << 17)))
25218 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25219 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25220 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25221 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
25222 TCGv_i32 twd
= tcg_const_i32(wd
);
25223 TCGv_i32 tws
= tcg_const_i32(ws
);
25224 TCGv_i32 twt
= tcg_const_i32(wt
);
25225 /* adjust df value for floating-point instruction */
25226 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
25228 switch (MASK_MSA_2RF(ctx
->opcode
)) {
25229 case OPC_FCLASS_df
:
25230 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
25232 case OPC_FTRUNC_S_df
:
25233 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
25235 case OPC_FTRUNC_U_df
:
25236 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
25239 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
25241 case OPC_FRSQRT_df
:
25242 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
25245 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
25248 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
25251 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
25253 case OPC_FEXUPL_df
:
25254 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
25256 case OPC_FEXUPR_df
:
25257 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
25260 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
25263 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
25265 case OPC_FTINT_S_df
:
25266 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
25268 case OPC_FTINT_U_df
:
25269 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
25271 case OPC_FFINT_S_df
:
25272 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
25274 case OPC_FFINT_U_df
:
25275 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
25279 tcg_temp_free_i32(twd
);
25280 tcg_temp_free_i32(tws
);
25281 tcg_temp_free_i32(twt
);
25282 tcg_temp_free_i32(tdf
);
25285 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
25287 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
25288 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
25289 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
25290 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25291 TCGv_i32 twd
= tcg_const_i32(wd
);
25292 TCGv_i32 tws
= tcg_const_i32(ws
);
25293 TCGv_i32 twt
= tcg_const_i32(wt
);
25295 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25297 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
25300 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
25303 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
25306 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
25309 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
25312 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
25315 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
25318 MIPS_INVAL("MSA instruction");
25319 generate_exception_end(ctx
, EXCP_RI
);
25323 tcg_temp_free_i32(twd
);
25324 tcg_temp_free_i32(tws
);
25325 tcg_temp_free_i32(twt
);
25328 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
25330 switch (MASK_MSA_VEC(ctx
->opcode
)) {
25338 gen_msa_vec_v(env
, ctx
);
25341 gen_msa_2r(env
, ctx
);
25344 gen_msa_2rf(env
, ctx
);
25347 MIPS_INVAL("MSA instruction");
25348 generate_exception_end(ctx
, EXCP_RI
);
25353 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
25355 uint32_t opcode
= ctx
->opcode
;
25356 check_insn(ctx
, ASE_MSA
);
25357 check_msa_access(ctx
);
25359 switch (MASK_MSA_MINOR(opcode
)) {
25360 case OPC_MSA_I8_00
:
25361 case OPC_MSA_I8_01
:
25362 case OPC_MSA_I8_02
:
25363 gen_msa_i8(env
, ctx
);
25365 case OPC_MSA_I5_06
:
25366 case OPC_MSA_I5_07
:
25367 gen_msa_i5(env
, ctx
);
25369 case OPC_MSA_BIT_09
:
25370 case OPC_MSA_BIT_0A
:
25371 gen_msa_bit(env
, ctx
);
25373 case OPC_MSA_3R_0D
:
25374 case OPC_MSA_3R_0E
:
25375 case OPC_MSA_3R_0F
:
25376 case OPC_MSA_3R_10
:
25377 case OPC_MSA_3R_11
:
25378 case OPC_MSA_3R_12
:
25379 case OPC_MSA_3R_13
:
25380 case OPC_MSA_3R_14
:
25381 case OPC_MSA_3R_15
:
25382 gen_msa_3r(env
, ctx
);
25385 gen_msa_elm(env
, ctx
);
25387 case OPC_MSA_3RF_1A
:
25388 case OPC_MSA_3RF_1B
:
25389 case OPC_MSA_3RF_1C
:
25390 gen_msa_3rf(env
, ctx
);
25393 gen_msa_vec(env
, ctx
);
25404 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
25405 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
25406 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
25407 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
25409 TCGv_i32 twd
= tcg_const_i32(wd
);
25410 TCGv taddr
= tcg_temp_new();
25411 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
25413 switch (MASK_MSA_MINOR(opcode
)) {
25415 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
25418 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
25421 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
25424 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
25427 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
25430 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
25433 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
25436 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
25440 tcg_temp_free_i32(twd
);
25441 tcg_temp_free(taddr
);
25445 MIPS_INVAL("MSA instruction");
25446 generate_exception_end(ctx
, EXCP_RI
);
25452 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
25455 int rs
, rt
, rd
, sa
;
25459 /* make sure instructions are on a word boundary */
25460 if (ctx
->base
.pc_next
& 0x3) {
25461 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
25462 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
25466 /* Handle blikely not taken case */
25467 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
25468 TCGLabel
*l1
= gen_new_label();
25470 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
25471 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
25472 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
25476 op
= MASK_OP_MAJOR(ctx
->opcode
);
25477 rs
= (ctx
->opcode
>> 21) & 0x1f;
25478 rt
= (ctx
->opcode
>> 16) & 0x1f;
25479 rd
= (ctx
->opcode
>> 11) & 0x1f;
25480 sa
= (ctx
->opcode
>> 6) & 0x1f;
25481 imm
= (int16_t)ctx
->opcode
;
25484 decode_opc_special(env
, ctx
);
25487 decode_opc_special2_legacy(env
, ctx
);
25490 decode_opc_special3(env
, ctx
);
25493 op1
= MASK_REGIMM(ctx
->opcode
);
25495 case OPC_BLTZL
: /* REGIMM branches */
25499 check_insn(ctx
, ISA_MIPS2
);
25500 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25504 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
25508 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25510 /* OPC_NAL, OPC_BAL */
25511 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
25513 generate_exception_end(ctx
, EXCP_RI
);
25516 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
25519 case OPC_TGEI
: /* REGIMM traps */
25526 check_insn(ctx
, ISA_MIPS2
);
25527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25528 gen_trap(ctx
, op1
, rs
, -1, imm
);
25531 check_insn(ctx
, ISA_MIPS32R6
);
25532 generate_exception_end(ctx
, EXCP_RI
);
25535 check_insn(ctx
, ISA_MIPS32R2
);
25536 /* Break the TB to be able to sync copied instructions
25538 ctx
->base
.is_jmp
= DISAS_STOP
;
25540 case OPC_BPOSGE32
: /* MIPS DSP branch */
25541 #if defined(TARGET_MIPS64)
25545 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
25547 #if defined(TARGET_MIPS64)
25549 check_insn(ctx
, ISA_MIPS32R6
);
25550 check_mips_64(ctx
);
25552 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
25556 check_insn(ctx
, ISA_MIPS32R6
);
25557 check_mips_64(ctx
);
25559 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
25563 default: /* Invalid */
25564 MIPS_INVAL("regimm");
25565 generate_exception_end(ctx
, EXCP_RI
);
25570 check_cp0_enabled(ctx
);
25571 op1
= MASK_CP0(ctx
->opcode
);
25579 #if defined(TARGET_MIPS64)
25583 #ifndef CONFIG_USER_ONLY
25584 gen_cp0(env
, ctx
, op1
, rt
, rd
);
25585 #endif /* !CONFIG_USER_ONLY */
25603 #ifndef CONFIG_USER_ONLY
25604 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
25605 #endif /* !CONFIG_USER_ONLY */
25608 #ifndef CONFIG_USER_ONLY
25611 TCGv t0
= tcg_temp_new();
25613 op2
= MASK_MFMC0(ctx
->opcode
);
25617 gen_helper_dmt(t0
);
25618 gen_store_gpr(t0
, rt
);
25622 gen_helper_emt(t0
);
25623 gen_store_gpr(t0
, rt
);
25627 gen_helper_dvpe(t0
, cpu_env
);
25628 gen_store_gpr(t0
, rt
);
25632 gen_helper_evpe(t0
, cpu_env
);
25633 gen_store_gpr(t0
, rt
);
25636 check_insn(ctx
, ISA_MIPS32R6
);
25638 gen_helper_dvp(t0
, cpu_env
);
25639 gen_store_gpr(t0
, rt
);
25643 check_insn(ctx
, ISA_MIPS32R6
);
25645 gen_helper_evp(t0
, cpu_env
);
25646 gen_store_gpr(t0
, rt
);
25650 check_insn(ctx
, ISA_MIPS32R2
);
25651 save_cpu_state(ctx
, 1);
25652 gen_helper_di(t0
, cpu_env
);
25653 gen_store_gpr(t0
, rt
);
25654 /* Stop translation as we may have switched
25655 the execution mode. */
25656 ctx
->base
.is_jmp
= DISAS_STOP
;
25659 check_insn(ctx
, ISA_MIPS32R2
);
25660 save_cpu_state(ctx
, 1);
25661 gen_helper_ei(t0
, cpu_env
);
25662 gen_store_gpr(t0
, rt
);
25663 /* DISAS_STOP isn't sufficient, we need to ensure we break
25664 out of translated code to check for pending interrupts */
25665 gen_save_pc(ctx
->base
.pc_next
+ 4);
25666 ctx
->base
.is_jmp
= DISAS_EXIT
;
25668 default: /* Invalid */
25669 MIPS_INVAL("mfmc0");
25670 generate_exception_end(ctx
, EXCP_RI
);
25675 #endif /* !CONFIG_USER_ONLY */
25678 check_insn(ctx
, ISA_MIPS32R2
);
25679 gen_load_srsgpr(rt
, rd
);
25682 check_insn(ctx
, ISA_MIPS32R2
);
25683 gen_store_srsgpr(rt
, rd
);
25687 generate_exception_end(ctx
, EXCP_RI
);
25691 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
25692 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25693 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
25694 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25697 /* Arithmetic with immediate opcode */
25698 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
25702 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
25704 case OPC_SLTI
: /* Set on less than with immediate opcode */
25706 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
25708 case OPC_ANDI
: /* Arithmetic with immediate opcode */
25709 case OPC_LUI
: /* OPC_AUI */
25712 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
25714 case OPC_J
: /* Jump */
25716 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
25717 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
25720 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
25721 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25723 generate_exception_end(ctx
, EXCP_RI
);
25726 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
25727 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25730 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
25733 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
25734 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25736 generate_exception_end(ctx
, EXCP_RI
);
25739 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
25740 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25743 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
25746 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
25749 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
25751 check_insn(ctx
, ISA_MIPS32R6
);
25752 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
25753 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25756 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
25759 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
25761 check_insn(ctx
, ISA_MIPS32R6
);
25762 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
25763 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
25768 check_insn(ctx
, ISA_MIPS2
);
25769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25773 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
25775 case OPC_LL
: /* Load and stores */
25776 check_insn(ctx
, ISA_MIPS2
);
25780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25788 gen_ld(ctx
, op
, rt
, rs
, imm
);
25792 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25797 gen_st(ctx
, op
, rt
, rs
, imm
);
25800 check_insn(ctx
, ISA_MIPS2
);
25801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25802 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
25805 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25806 check_cp0_enabled(ctx
);
25807 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
25808 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
25809 gen_cache_operation(ctx
, rt
, rs
, imm
);
25811 /* Treat as NOP. */
25814 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25815 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
25816 /* Treat as NOP. */
25819 /* Floating point (COP1). */
25824 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
25828 op1
= MASK_CP1(ctx
->opcode
);
25833 check_cp1_enabled(ctx
);
25834 check_insn(ctx
, ISA_MIPS32R2
);
25840 check_cp1_enabled(ctx
);
25841 gen_cp1(ctx
, op1
, rt
, rd
);
25843 #if defined(TARGET_MIPS64)
25846 check_cp1_enabled(ctx
);
25847 check_insn(ctx
, ISA_MIPS3
);
25848 check_mips_64(ctx
);
25849 gen_cp1(ctx
, op1
, rt
, rd
);
25852 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
25853 check_cp1_enabled(ctx
);
25854 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25856 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
25861 check_insn(ctx
, ASE_MIPS3D
);
25862 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
25863 (rt
>> 2) & 0x7, imm
<< 2);
25867 check_cp1_enabled(ctx
);
25868 check_insn(ctx
, ISA_MIPS32R6
);
25869 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
25873 check_cp1_enabled(ctx
);
25874 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25876 check_insn(ctx
, ASE_MIPS3D
);
25879 check_cp1_enabled(ctx
);
25880 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
25881 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
25882 (rt
>> 2) & 0x7, imm
<< 2);
25889 check_cp1_enabled(ctx
);
25890 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
25896 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
25897 check_cp1_enabled(ctx
);
25898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25900 case R6_OPC_CMP_AF_S
:
25901 case R6_OPC_CMP_UN_S
:
25902 case R6_OPC_CMP_EQ_S
:
25903 case R6_OPC_CMP_UEQ_S
:
25904 case R6_OPC_CMP_LT_S
:
25905 case R6_OPC_CMP_ULT_S
:
25906 case R6_OPC_CMP_LE_S
:
25907 case R6_OPC_CMP_ULE_S
:
25908 case R6_OPC_CMP_SAF_S
:
25909 case R6_OPC_CMP_SUN_S
:
25910 case R6_OPC_CMP_SEQ_S
:
25911 case R6_OPC_CMP_SEUQ_S
:
25912 case R6_OPC_CMP_SLT_S
:
25913 case R6_OPC_CMP_SULT_S
:
25914 case R6_OPC_CMP_SLE_S
:
25915 case R6_OPC_CMP_SULE_S
:
25916 case R6_OPC_CMP_OR_S
:
25917 case R6_OPC_CMP_UNE_S
:
25918 case R6_OPC_CMP_NE_S
:
25919 case R6_OPC_CMP_SOR_S
:
25920 case R6_OPC_CMP_SUNE_S
:
25921 case R6_OPC_CMP_SNE_S
:
25922 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
25924 case R6_OPC_CMP_AF_D
:
25925 case R6_OPC_CMP_UN_D
:
25926 case R6_OPC_CMP_EQ_D
:
25927 case R6_OPC_CMP_UEQ_D
:
25928 case R6_OPC_CMP_LT_D
:
25929 case R6_OPC_CMP_ULT_D
:
25930 case R6_OPC_CMP_LE_D
:
25931 case R6_OPC_CMP_ULE_D
:
25932 case R6_OPC_CMP_SAF_D
:
25933 case R6_OPC_CMP_SUN_D
:
25934 case R6_OPC_CMP_SEQ_D
:
25935 case R6_OPC_CMP_SEUQ_D
:
25936 case R6_OPC_CMP_SLT_D
:
25937 case R6_OPC_CMP_SULT_D
:
25938 case R6_OPC_CMP_SLE_D
:
25939 case R6_OPC_CMP_SULE_D
:
25940 case R6_OPC_CMP_OR_D
:
25941 case R6_OPC_CMP_UNE_D
:
25942 case R6_OPC_CMP_NE_D
:
25943 case R6_OPC_CMP_SOR_D
:
25944 case R6_OPC_CMP_SUNE_D
:
25945 case R6_OPC_CMP_SNE_D
:
25946 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
25949 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
25950 rt
, rd
, sa
, (imm
>> 8) & 0x7);
25955 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
25970 check_insn(ctx
, ASE_MSA
);
25971 gen_msa_branch(env
, ctx
, op1
);
25975 generate_exception_end(ctx
, EXCP_RI
);
25980 /* Compact branches [R6] and COP2 [non-R6] */
25981 case OPC_BC
: /* OPC_LWC2 */
25982 case OPC_BALC
: /* OPC_SWC2 */
25983 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25984 /* OPC_BC, OPC_BALC */
25985 gen_compute_compact_branch(ctx
, op
, 0, 0,
25986 sextract32(ctx
->opcode
<< 2, 0, 28));
25988 /* OPC_LWC2, OPC_SWC2 */
25989 /* COP2: Not implemented. */
25990 generate_exception_err(ctx
, EXCP_CpU
, 2);
25993 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
25994 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
25995 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
25997 /* OPC_BEQZC, OPC_BNEZC */
25998 gen_compute_compact_branch(ctx
, op
, rs
, 0,
25999 sextract32(ctx
->opcode
<< 2, 0, 23));
26001 /* OPC_JIC, OPC_JIALC */
26002 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
26005 /* OPC_LWC2, OPC_SWC2 */
26006 /* COP2: Not implemented. */
26007 generate_exception_err(ctx
, EXCP_CpU
, 2);
26011 check_insn(ctx
, INSN_LOONGSON2F
);
26012 /* Note that these instructions use different fields. */
26013 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
26017 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26018 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
26019 check_cp1_enabled(ctx
);
26020 op1
= MASK_CP3(ctx
->opcode
);
26024 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26030 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26031 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
26034 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26035 /* Treat as NOP. */
26038 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
26052 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
26053 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
26057 generate_exception_end(ctx
, EXCP_RI
);
26061 generate_exception_err(ctx
, EXCP_CpU
, 1);
26065 #if defined(TARGET_MIPS64)
26066 /* MIPS64 opcodes */
26070 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26074 check_insn(ctx
, ISA_MIPS3
);
26075 check_mips_64(ctx
);
26076 gen_ld(ctx
, op
, rt
, rs
, imm
);
26080 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26083 check_insn(ctx
, ISA_MIPS3
);
26084 check_mips_64(ctx
);
26085 gen_st(ctx
, op
, rt
, rs
, imm
);
26088 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26089 check_insn(ctx
, ISA_MIPS3
);
26090 check_mips_64(ctx
);
26091 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
26093 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
26094 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26095 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
26096 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26099 check_insn(ctx
, ISA_MIPS3
);
26100 check_mips_64(ctx
);
26101 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26105 check_insn(ctx
, ISA_MIPS3
);
26106 check_mips_64(ctx
);
26107 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
26110 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
26111 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26112 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
26114 MIPS_INVAL("major opcode");
26115 generate_exception_end(ctx
, EXCP_RI
);
26119 case OPC_DAUI
: /* OPC_JALX */
26120 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
26121 #if defined(TARGET_MIPS64)
26123 check_mips_64(ctx
);
26125 generate_exception(ctx
, EXCP_RI
);
26126 } else if (rt
!= 0) {
26127 TCGv t0
= tcg_temp_new();
26128 gen_load_gpr(t0
, rs
);
26129 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
26133 generate_exception_end(ctx
, EXCP_RI
);
26134 MIPS_INVAL("major opcode");
26138 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
26139 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
26140 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
26143 case OPC_MSA
: /* OPC_MDMX */
26144 /* MDMX: Not implemented. */
26148 check_insn(ctx
, ISA_MIPS32R6
);
26149 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
26151 default: /* Invalid */
26152 MIPS_INVAL("major opcode");
26153 generate_exception_end(ctx
, EXCP_RI
);
26158 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
26160 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26161 CPUMIPSState
*env
= cs
->env_ptr
;
26163 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
26164 ctx
->saved_pc
= -1;
26165 ctx
->insn_flags
= env
->insn_flags
;
26166 ctx
->CP0_Config1
= env
->CP0_Config1
;
26167 ctx
->CP0_Config2
= env
->CP0_Config2
;
26168 ctx
->CP0_Config3
= env
->CP0_Config3
;
26169 ctx
->CP0_Config5
= env
->CP0_Config5
;
26171 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
26172 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
26173 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
26174 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
26175 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
26176 ctx
->PAMask
= env
->PAMask
;
26177 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
26178 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
26179 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
26180 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
26181 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
26182 /* Restore delay slot state from the tb context. */
26183 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
26184 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
26185 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
26186 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
26187 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
26188 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
26189 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
26190 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
26191 restore_cpu_state(env
, ctx
);
26192 #ifdef CONFIG_USER_ONLY
26193 ctx
->mem_idx
= MIPS_HFLAG_UM
;
26195 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
26197 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
26198 MO_UNALN
: MO_ALIGN
;
26200 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
26204 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26208 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
26210 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26212 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
26216 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
26217 const CPUBreakpoint
*bp
)
26219 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26221 save_cpu_state(ctx
, 1);
26222 ctx
->base
.is_jmp
= DISAS_NORETURN
;
26223 gen_helper_raise_exception_debug(cpu_env
);
26224 /* The address covered by the breakpoint must be included in
26225 [tb->pc, tb->pc + tb->size) in order to for it to be
26226 properly cleared -- thus we increment the PC here so that
26227 the logic setting tb->size below does the right thing. */
26228 ctx
->base
.pc_next
+= 4;
26232 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
26234 CPUMIPSState
*env
= cs
->env_ptr
;
26235 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26239 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
26240 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
26241 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26242 insn_bytes
= decode_nanomips_opc(env
, ctx
);
26243 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
26244 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
26246 decode_opc(env
, ctx
);
26247 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
26248 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26249 insn_bytes
= decode_micromips_opc(env
, ctx
);
26250 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
26251 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
26252 insn_bytes
= decode_mips16_opc(env
, ctx
);
26254 generate_exception_end(ctx
, EXCP_RI
);
26255 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
26259 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
26260 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
26261 MIPS_HFLAG_FBNSLOT
))) {
26262 /* force to generate branch as there is neither delay nor
26266 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
26267 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
26268 /* Force to generate branch as microMIPS R6 doesn't restrict
26269 branches in the forbidden slot. */
26274 gen_branch(ctx
, insn_bytes
);
26276 ctx
->base
.pc_next
+= insn_bytes
;
26278 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
26281 /* Execute a branch and its delay slot as a single instruction.
26282 This is what GDB expects and is consistent with what the
26283 hardware does (e.g. if a delay slot instruction faults, the
26284 reported PC is the PC of the branch). */
26285 if (ctx
->base
.singlestep_enabled
&&
26286 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
26287 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26289 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
26290 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
26294 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
26296 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
26298 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
26299 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
26300 gen_helper_raise_exception_debug(cpu_env
);
26302 switch (ctx
->base
.is_jmp
) {
26304 gen_save_pc(ctx
->base
.pc_next
);
26305 tcg_gen_lookup_and_goto_ptr();
26308 case DISAS_TOO_MANY
:
26309 save_cpu_state(ctx
, 0);
26310 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
26313 tcg_gen_exit_tb(NULL
, 0);
26315 case DISAS_NORETURN
:
26318 g_assert_not_reached();
26323 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
26325 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
26326 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
26329 static const TranslatorOps mips_tr_ops
= {
26330 .init_disas_context
= mips_tr_init_disas_context
,
26331 .tb_start
= mips_tr_tb_start
,
26332 .insn_start
= mips_tr_insn_start
,
26333 .breakpoint_check
= mips_tr_breakpoint_check
,
26334 .translate_insn
= mips_tr_translate_insn
,
26335 .tb_stop
= mips_tr_tb_stop
,
26336 .disas_log
= mips_tr_disas_log
,
26339 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
26343 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
26346 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
26350 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
26352 #define printfpr(fp) \
26355 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26356 " fd:%13g fs:%13g psu: %13g\n", \
26357 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
26358 (double)(fp)->fd, \
26359 (double)(fp)->fs[FP_ENDIAN_IDX], \
26360 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
26363 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
26364 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
26365 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
26366 " fd:%13g fs:%13g psu:%13g\n", \
26367 tmp.w[FP_ENDIAN_IDX], tmp.d, \
26369 (double)tmp.fs[FP_ENDIAN_IDX], \
26370 (double)tmp.fs[!FP_ENDIAN_IDX]); \
26375 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
26376 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
26377 get_float_exception_flags(&env
->active_fpu
.fp_status
));
26378 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
26379 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
26380 printfpr(&env
->active_fpu
.fpr
[i
]);
26386 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
26389 MIPSCPU
*cpu
= MIPS_CPU(cs
);
26390 CPUMIPSState
*env
= &cpu
->env
;
26393 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
26394 " LO=0x" TARGET_FMT_lx
" ds %04x "
26395 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
26396 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
26397 env
->hflags
, env
->btarget
, env
->bcond
);
26398 for (i
= 0; i
< 32; i
++) {
26400 cpu_fprintf(f
, "GPR%02d:", i
);
26401 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
26403 cpu_fprintf(f
, "\n");
26406 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
26407 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
26408 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
26410 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
26411 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
26412 env
->CP0_Config2
, env
->CP0_Config3
);
26413 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
26414 env
->CP0_Config4
, env
->CP0_Config5
);
26415 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
26416 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
26420 void mips_tcg_init(void)
26425 for (i
= 1; i
< 32; i
++)
26426 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
26427 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
26430 for (i
= 0; i
< 32; i
++) {
26431 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
26433 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
26434 /* The scalar floating-point unit (FPU) registers are mapped on
26435 * the MSA vector registers. */
26436 fpu_f64
[i
] = msa_wr_d
[i
* 2];
26437 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
26438 msa_wr_d
[i
* 2 + 1] =
26439 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
26442 cpu_PC
= tcg_global_mem_new(cpu_env
,
26443 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
26444 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
26445 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
26446 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
26448 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
26449 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
26452 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
26453 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
26455 bcond
= tcg_global_mem_new(cpu_env
,
26456 offsetof(CPUMIPSState
, bcond
), "bcond");
26457 btarget
= tcg_global_mem_new(cpu_env
,
26458 offsetof(CPUMIPSState
, btarget
), "btarget");
26459 hflags
= tcg_global_mem_new_i32(cpu_env
,
26460 offsetof(CPUMIPSState
, hflags
), "hflags");
26462 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
26463 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
26465 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
26466 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
26470 #include "translate_init.inc.c"
26472 void cpu_mips_realize_env(CPUMIPSState
*env
)
26474 env
->exception_base
= (int32_t)0xBFC00000;
26476 #ifndef CONFIG_USER_ONLY
26477 mmu_init(env
, env
->cpu_model
);
26479 fpu_init(env
, env
->cpu_model
);
26480 mvp_init(env
, env
->cpu_model
);
26483 bool cpu_supports_cps_smp(const char *cpu_type
)
26485 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
26486 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
26489 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
26491 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
26492 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
26495 void cpu_set_exception_base(int vp_index
, target_ulong address
)
26497 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
26498 vp
->env
.exception_base
= address
;
26501 void cpu_state_reset(CPUMIPSState
*env
)
26503 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
26504 CPUState
*cs
= CPU(cpu
);
26506 /* Reset registers to their default values */
26507 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
26508 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
26509 #ifdef TARGET_WORDS_BIGENDIAN
26510 env
->CP0_Config0
|= (1 << CP0C0_BE
);
26512 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
26513 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
26514 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
26515 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
26516 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
26517 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
26518 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
26519 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
26520 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
26521 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
26522 << env
->cpu_model
->CP0_LLAddr_shift
;
26523 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
26524 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
26525 env
->CCRes
= env
->cpu_model
->CCRes
;
26526 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
26527 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
26528 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
26529 env
->current_tc
= 0;
26530 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
26531 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
26532 #if defined(TARGET_MIPS64)
26533 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
26534 env
->SEGMask
|= 3ULL << 62;
26537 env
->PABITS
= env
->cpu_model
->PABITS
;
26538 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
26539 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
26540 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
26541 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
26542 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
26543 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
26544 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
26545 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
26546 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
26547 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
26548 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
26549 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
26550 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
26551 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
26552 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
26553 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
26554 env
->msair
= env
->cpu_model
->MSAIR
;
26555 env
->insn_flags
= env
->cpu_model
->insn_flags
;
26557 #if defined(CONFIG_USER_ONLY)
26558 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
26559 # ifdef TARGET_MIPS64
26560 /* Enable 64-bit register mode. */
26561 env
->CP0_Status
|= (1 << CP0St_PX
);
26563 # ifdef TARGET_ABI_MIPSN64
26564 /* Enable 64-bit address mode. */
26565 env
->CP0_Status
|= (1 << CP0St_UX
);
26567 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
26568 hardware registers. */
26569 env
->CP0_HWREna
|= 0x0000000F;
26570 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
26571 env
->CP0_Status
|= (1 << CP0St_CU1
);
26573 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
26574 env
->CP0_Status
|= (1 << CP0St_MX
);
26576 # if defined(TARGET_MIPS64)
26577 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
26578 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
26579 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
26580 env
->CP0_Status
|= (1 << CP0St_FR
);
26584 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
26585 /* If the exception was raised from a delay slot,
26586 come back to the jump. */
26587 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
26588 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
26590 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
26592 env
->active_tc
.PC
= env
->exception_base
;
26593 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
26594 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
26595 env
->CP0_Wired
= 0;
26596 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
26597 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
26598 if (mips_um_ksegs_enabled()) {
26599 env
->CP0_EBase
|= 0x40000000;
26601 env
->CP0_EBase
|= (int32_t)0x80000000;
26603 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
26604 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
26606 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
26608 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
26609 /* vectored interrupts not implemented, timer on int 7,
26610 no performance counters. */
26611 env
->CP0_IntCtl
= 0xe0000000;
26615 for (i
= 0; i
< 7; i
++) {
26616 env
->CP0_WatchLo
[i
] = 0;
26617 env
->CP0_WatchHi
[i
] = 0x80000000;
26619 env
->CP0_WatchLo
[7] = 0;
26620 env
->CP0_WatchHi
[7] = 0;
26622 /* Count register increments in debug mode, EJTAG version 1 */
26623 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
26625 cpu_mips_store_count(env
, 1);
26627 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
26630 /* Only TC0 on VPE 0 starts as active. */
26631 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
26632 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
26633 env
->tcs
[i
].CP0_TCHalt
= 1;
26635 env
->active_tc
.CP0_TCHalt
= 1;
26638 if (cs
->cpu_index
== 0) {
26639 /* VPE0 starts up enabled. */
26640 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
26641 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
26643 /* TC0 starts up unhalted. */
26645 env
->active_tc
.CP0_TCHalt
= 0;
26646 env
->tcs
[0].CP0_TCHalt
= 0;
26647 /* With thread 0 active. */
26648 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
26649 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
26654 * Configure default legacy segmentation control. We use this regardless of
26655 * whether segmentation control is presented to the guest.
26657 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
26658 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
26659 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
26660 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
26661 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
26662 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
26664 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
26665 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
26666 (3 << CP0SC_C
)) << 16;
26667 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
26668 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
26669 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
26670 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
26671 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
26672 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
26673 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
26674 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
26676 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
26677 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
26678 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
26679 env
->CP0_Status
|= (1 << CP0St_FR
);
26682 if (env
->insn_flags
& ISA_MIPS32R6
) {
26684 env
->CP0_PWSize
= 0x40;
26690 env
->CP0_PWField
= 0x0C30C302;
26697 env
->CP0_PWField
= 0x02;
26700 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
26701 /* microMIPS on reset when Config3.ISA is 3 */
26702 env
->hflags
|= MIPS_HFLAG_M16
;
26706 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
26710 compute_hflags(env
);
26711 restore_fp_status(env
);
26712 restore_pamask(env
);
26713 cs
->exception_index
= EXCP_NONE
;
26715 if (semihosting_get_argc()) {
26716 /* UHI interface can be used to obtain argc and argv */
26717 env
->active_tc
.gpr
[4] = -1;
26721 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
26722 target_ulong
*data
)
26724 env
->active_tc
.PC
= data
[0];
26725 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
26726 env
->hflags
|= data
[1];
26727 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
26728 case MIPS_HFLAG_BR
:
26730 case MIPS_HFLAG_BC
:
26731 case MIPS_HFLAG_BL
:
26733 env
->btarget
= data
[2];