2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1392 /* global register indices */
1393 static TCGv cpu_gpr
[32], cpu_PC
;
1394 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1395 static TCGv cpu_dspctrl
, btarget
, bcond
;
1396 static TCGv_i32 hflags
;
1397 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1398 static TCGv_i64 fpu_f64
[32];
1399 static TCGv_i64 msa_wr_d
[64];
1401 #include "exec/gen-icount.h"
1403 #define gen_helper_0e0i(name, arg) do { \
1404 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1405 gen_helper_##name(cpu_env, helper_tmp); \
1406 tcg_temp_free_i32(helper_tmp); \
1409 #define gen_helper_0e1i(name, arg1, arg2) do { \
1410 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1411 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1412 tcg_temp_free_i32(helper_tmp); \
1415 #define gen_helper_1e0i(name, ret, arg1) do { \
1416 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1417 gen_helper_##name(ret, cpu_env, helper_tmp); \
1418 tcg_temp_free_i32(helper_tmp); \
1421 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1422 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1423 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1424 tcg_temp_free_i32(helper_tmp); \
1427 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1428 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1429 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1430 tcg_temp_free_i32(helper_tmp); \
1433 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1434 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1435 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1436 tcg_temp_free_i32(helper_tmp); \
1439 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1440 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1441 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1442 tcg_temp_free_i32(helper_tmp); \
1445 typedef struct DisasContext
{
1446 DisasContextBase base
;
1447 target_ulong saved_pc
;
1448 target_ulong page_start
;
1451 int32_t CP0_Config1
;
1452 /* Routine used to access memory */
1454 TCGMemOp default_tcg_memop_mask
;
1455 uint32_t hflags
, saved_hflags
;
1456 target_ulong btarget
;
1467 int CP0_LLAddr_shift
;
1476 #define DISAS_STOP DISAS_TARGET_0
1477 #define DISAS_EXIT DISAS_TARGET_1
1479 static const char * const regnames
[] = {
1480 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1481 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1482 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1483 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1486 static const char * const regnames_HI
[] = {
1487 "HI0", "HI1", "HI2", "HI3",
1490 static const char * const regnames_LO
[] = {
1491 "LO0", "LO1", "LO2", "LO3",
1494 static const char * const fregnames
[] = {
1495 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1496 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1497 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1498 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1501 static const char * const msaregnames
[] = {
1502 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1503 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1504 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1505 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1506 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1507 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1508 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1509 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1510 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1511 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1512 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1513 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1514 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1515 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1516 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1517 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1520 #define LOG_DISAS(...) \
1522 if (MIPS_DEBUG_DISAS) { \
1523 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1527 #define MIPS_INVAL(op) \
1529 if (MIPS_DEBUG_DISAS) { \
1530 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1531 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1532 ctx->base.pc_next, ctx->opcode, op, \
1533 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1534 ((ctx->opcode >> 16) & 0x1F)); \
1538 /* General purpose registers moves. */
1539 static inline void gen_load_gpr (TCGv t
, int reg
)
1542 tcg_gen_movi_tl(t
, 0);
1544 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1547 static inline void gen_store_gpr (TCGv t
, int reg
)
1550 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1553 /* Moves to/from shadow registers. */
1554 static inline void gen_load_srsgpr (int from
, int to
)
1556 TCGv t0
= tcg_temp_new();
1559 tcg_gen_movi_tl(t0
, 0);
1561 TCGv_i32 t2
= tcg_temp_new_i32();
1562 TCGv_ptr addr
= tcg_temp_new_ptr();
1564 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1565 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1566 tcg_gen_andi_i32(t2
, t2
, 0xf);
1567 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1568 tcg_gen_ext_i32_ptr(addr
, t2
);
1569 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1571 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1572 tcg_temp_free_ptr(addr
);
1573 tcg_temp_free_i32(t2
);
1575 gen_store_gpr(t0
, to
);
1579 static inline void gen_store_srsgpr (int from
, int to
)
1582 TCGv t0
= tcg_temp_new();
1583 TCGv_i32 t2
= tcg_temp_new_i32();
1584 TCGv_ptr addr
= tcg_temp_new_ptr();
1586 gen_load_gpr(t0
, from
);
1587 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1588 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1589 tcg_gen_andi_i32(t2
, t2
, 0xf);
1590 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1591 tcg_gen_ext_i32_ptr(addr
, t2
);
1592 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1594 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1595 tcg_temp_free_ptr(addr
);
1596 tcg_temp_free_i32(t2
);
1602 static inline void gen_save_pc(target_ulong pc
)
1604 tcg_gen_movi_tl(cpu_PC
, pc
);
1607 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1609 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1610 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1611 gen_save_pc(ctx
->base
.pc_next
);
1612 ctx
->saved_pc
= ctx
->base
.pc_next
;
1614 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1615 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1616 ctx
->saved_hflags
= ctx
->hflags
;
1617 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1623 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1629 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1631 ctx
->saved_hflags
= ctx
->hflags
;
1632 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1638 ctx
->btarget
= env
->btarget
;
1643 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1645 TCGv_i32 texcp
= tcg_const_i32(excp
);
1646 TCGv_i32 terr
= tcg_const_i32(err
);
1647 save_cpu_state(ctx
, 1);
1648 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1649 tcg_temp_free_i32(terr
);
1650 tcg_temp_free_i32(texcp
);
1651 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1654 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1656 gen_helper_0e0i(raise_exception
, excp
);
1659 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1661 generate_exception_err(ctx
, excp
, 0);
1664 /* Floating point register moves. */
1665 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1668 generate_exception(ctx
, EXCP_RI
);
1670 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1673 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1676 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1677 generate_exception(ctx
, EXCP_RI
);
1679 t64
= tcg_temp_new_i64();
1680 tcg_gen_extu_i32_i64(t64
, t
);
1681 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1682 tcg_temp_free_i64(t64
);
1685 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1690 gen_load_fpr32(ctx
, t
, reg
| 1);
1694 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1696 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1697 TCGv_i64 t64
= tcg_temp_new_i64();
1698 tcg_gen_extu_i32_i64(t64
, t
);
1699 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1700 tcg_temp_free_i64(t64
);
1702 gen_store_fpr32(ctx
, t
, reg
| 1);
1706 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1708 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1709 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1711 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1715 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1717 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1718 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1721 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1722 t0
= tcg_temp_new_i64();
1723 tcg_gen_shri_i64(t0
, t
, 32);
1724 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1725 tcg_temp_free_i64(t0
);
1729 static inline int get_fp_bit (int cc
)
1737 /* Addresses computation */
1738 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1740 tcg_gen_add_tl(ret
, arg0
, arg1
);
1742 #if defined(TARGET_MIPS64)
1743 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1744 tcg_gen_ext32s_i64(ret
, ret
);
1749 /* Addresses computation (translation time) */
1750 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1753 target_long sum
= base
+ offset
;
1755 #if defined(TARGET_MIPS64)
1756 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1763 /* Sign-extract the low 32-bits to a target_long. */
1764 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1766 #if defined(TARGET_MIPS64)
1767 tcg_gen_ext32s_i64(ret
, arg
);
1769 tcg_gen_extrl_i64_i32(ret
, arg
);
1773 /* Sign-extract the high 32-bits to a target_long. */
1774 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1776 #if defined(TARGET_MIPS64)
1777 tcg_gen_sari_i64(ret
, arg
, 32);
1779 tcg_gen_extrh_i64_i32(ret
, arg
);
1783 static inline void check_cp0_enabled(DisasContext
*ctx
)
1785 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1786 generate_exception_err(ctx
, EXCP_CpU
, 0);
1789 static inline void check_cp1_enabled(DisasContext
*ctx
)
1791 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1792 generate_exception_err(ctx
, EXCP_CpU
, 1);
1795 /* Verify that the processor is running with COP1X instructions enabled.
1796 This is associated with the nabla symbol in the MIPS32 and MIPS64
1799 static inline void check_cop1x(DisasContext
*ctx
)
1801 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1802 generate_exception_end(ctx
, EXCP_RI
);
1805 /* Verify that the processor is running with 64-bit floating-point
1806 operations enabled. */
1808 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1810 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1811 generate_exception_end(ctx
, EXCP_RI
);
1815 * Verify if floating point register is valid; an operation is not defined
1816 * if bit 0 of any register specification is set and the FR bit in the
1817 * Status register equals zero, since the register numbers specify an
1818 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1819 * in the Status register equals one, both even and odd register numbers
1820 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1822 * Multiple 64 bit wide registers can be checked by calling
1823 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1825 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1827 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1828 generate_exception_end(ctx
, EXCP_RI
);
1831 /* Verify that the processor is running with DSP instructions enabled.
1832 This is enabled by CP0 Status register MX(24) bit.
1835 static inline void check_dsp(DisasContext
*ctx
)
1837 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1838 if (ctx
->insn_flags
& ASE_DSP
) {
1839 generate_exception_end(ctx
, EXCP_DSPDIS
);
1841 generate_exception_end(ctx
, EXCP_RI
);
1846 static inline void check_dspr2(DisasContext
*ctx
)
1848 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1849 if (ctx
->insn_flags
& ASE_DSP
) {
1850 generate_exception_end(ctx
, EXCP_DSPDIS
);
1852 generate_exception_end(ctx
, EXCP_RI
);
1857 /* This code generates a "reserved instruction" exception if the
1858 CPU does not support the instruction set corresponding to flags. */
1859 static inline void check_insn(DisasContext
*ctx
, int flags
)
1861 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1862 generate_exception_end(ctx
, EXCP_RI
);
1866 /* This code generates a "reserved instruction" exception if the
1867 CPU has corresponding flag set which indicates that the instruction
1868 has been removed. */
1869 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1871 if (unlikely(ctx
->insn_flags
& flags
)) {
1872 generate_exception_end(ctx
, EXCP_RI
);
1876 /* This code generates a "reserved instruction" exception if the
1877 CPU does not support 64-bit paired-single (PS) floating point data type */
1878 static inline void check_ps(DisasContext
*ctx
)
1880 if (unlikely(!ctx
->ps
)) {
1881 generate_exception(ctx
, EXCP_RI
);
1883 check_cp1_64bitmode(ctx
);
1886 #ifdef TARGET_MIPS64
1887 /* This code generates a "reserved instruction" exception if 64-bit
1888 instructions are not enabled. */
1889 static inline void check_mips_64(DisasContext
*ctx
)
1891 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1892 generate_exception_end(ctx
, EXCP_RI
);
1896 #ifndef CONFIG_USER_ONLY
1897 static inline void check_mvh(DisasContext
*ctx
)
1899 if (unlikely(!ctx
->mvh
)) {
1900 generate_exception(ctx
, EXCP_RI
);
1905 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1906 calling interface for 32 and 64-bit FPRs. No sense in changing
1907 all callers for gen_load_fpr32 when we need the CTX parameter for
1909 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1910 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1911 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1912 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1913 int ft, int fs, int cc) \
1915 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1916 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1925 check_cp1_registers(ctx, fs | ft); \
1933 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1934 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1936 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1937 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1938 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1939 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1940 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1941 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1942 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1943 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1944 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1945 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1946 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1947 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1948 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1949 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1950 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1951 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1954 tcg_temp_free_i##bits (fp0); \
1955 tcg_temp_free_i##bits (fp1); \
1958 FOP_CONDS(, 0, d
, FMT_D
, 64)
1959 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1960 FOP_CONDS(, 0, s
, FMT_S
, 32)
1961 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1962 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1963 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1966 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1967 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1968 int ft, int fs, int fd) \
1970 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1971 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1972 if (ifmt == FMT_D) { \
1973 check_cp1_registers(ctx, fs | ft | fd); \
1975 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1976 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1979 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2015 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2018 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2021 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2024 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2027 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2030 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2033 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2036 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2039 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2042 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2048 tcg_temp_free_i ## bits (fp0); \
2049 tcg_temp_free_i ## bits (fp1); \
2052 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2053 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2055 #undef gen_ldcmp_fpr32
2056 #undef gen_ldcmp_fpr64
2058 /* load/store instructions. */
2059 #ifdef CONFIG_USER_ONLY
2060 #define OP_LD_ATOMIC(insn,fname) \
2061 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2062 DisasContext *ctx) \
2064 TCGv t0 = tcg_temp_new(); \
2065 tcg_gen_mov_tl(t0, arg1); \
2066 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2067 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2068 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2069 tcg_temp_free(t0); \
2072 #define OP_LD_ATOMIC(insn,fname) \
2073 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2074 DisasContext *ctx) \
2076 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2079 OP_LD_ATOMIC(ll
,ld32s
);
2080 #if defined(TARGET_MIPS64)
2081 OP_LD_ATOMIC(lld
,ld64
);
2085 #ifdef CONFIG_USER_ONLY
2086 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2087 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2088 DisasContext *ctx) \
2090 TCGv t0 = tcg_temp_new(); \
2091 TCGLabel *l1 = gen_new_label(); \
2092 TCGLabel *l2 = gen_new_label(); \
2094 tcg_gen_andi_tl(t0, arg2, almask); \
2095 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2096 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2097 generate_exception(ctx, EXCP_AdES); \
2098 gen_set_label(l1); \
2099 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2100 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2101 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2102 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2103 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2104 generate_exception_end(ctx, EXCP_SC); \
2105 gen_set_label(l2); \
2106 tcg_gen_movi_tl(t0, 0); \
2107 gen_store_gpr(t0, rt); \
2108 tcg_temp_free(t0); \
2111 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2112 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2113 DisasContext *ctx) \
2115 TCGv t0 = tcg_temp_new(); \
2116 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2117 gen_store_gpr(t0, rt); \
2118 tcg_temp_free(t0); \
2121 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2122 #if defined(TARGET_MIPS64)
2123 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2127 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2128 int base
, int offset
)
2131 tcg_gen_movi_tl(addr
, offset
);
2132 } else if (offset
== 0) {
2133 gen_load_gpr(addr
, base
);
2135 tcg_gen_movi_tl(addr
, offset
);
2136 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2140 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2142 target_ulong pc
= ctx
->base
.pc_next
;
2144 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2145 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2150 pc
&= ~(target_ulong
)3;
2155 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2156 int rt
, int base
, int offset
)
2159 int mem_idx
= ctx
->mem_idx
;
2161 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2162 /* Loongson CPU uses a load to zero register for prefetch.
2163 We emulate it as a NOP. On other CPU we must perform the
2164 actual memory access. */
2168 t0
= tcg_temp_new();
2169 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2172 #if defined(TARGET_MIPS64)
2174 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2175 ctx
->default_tcg_memop_mask
);
2176 gen_store_gpr(t0
, rt
);
2179 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2180 ctx
->default_tcg_memop_mask
);
2181 gen_store_gpr(t0
, rt
);
2185 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2186 gen_store_gpr(t0
, rt
);
2189 t1
= tcg_temp_new();
2190 /* Do a byte access to possibly trigger a page
2191 fault with the unaligned address. */
2192 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2193 tcg_gen_andi_tl(t1
, t0
, 7);
2194 #ifndef TARGET_WORDS_BIGENDIAN
2195 tcg_gen_xori_tl(t1
, t1
, 7);
2197 tcg_gen_shli_tl(t1
, t1
, 3);
2198 tcg_gen_andi_tl(t0
, t0
, ~7);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2200 tcg_gen_shl_tl(t0
, t0
, t1
);
2201 t2
= tcg_const_tl(-1);
2202 tcg_gen_shl_tl(t2
, t2
, t1
);
2203 gen_load_gpr(t1
, rt
);
2204 tcg_gen_andc_tl(t1
, t1
, t2
);
2206 tcg_gen_or_tl(t0
, t0
, t1
);
2208 gen_store_gpr(t0
, rt
);
2211 t1
= tcg_temp_new();
2212 /* Do a byte access to possibly trigger a page
2213 fault with the unaligned address. */
2214 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2215 tcg_gen_andi_tl(t1
, t0
, 7);
2216 #ifdef TARGET_WORDS_BIGENDIAN
2217 tcg_gen_xori_tl(t1
, t1
, 7);
2219 tcg_gen_shli_tl(t1
, t1
, 3);
2220 tcg_gen_andi_tl(t0
, t0
, ~7);
2221 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2222 tcg_gen_shr_tl(t0
, t0
, t1
);
2223 tcg_gen_xori_tl(t1
, t1
, 63);
2224 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2225 tcg_gen_shl_tl(t2
, t2
, t1
);
2226 gen_load_gpr(t1
, rt
);
2227 tcg_gen_and_tl(t1
, t1
, t2
);
2229 tcg_gen_or_tl(t0
, t0
, t1
);
2231 gen_store_gpr(t0
, rt
);
2234 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2235 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2237 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2238 gen_store_gpr(t0
, rt
);
2242 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2243 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2245 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2246 gen_store_gpr(t0
, rt
);
2249 mem_idx
= MIPS_HFLAG_UM
;
2252 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2253 ctx
->default_tcg_memop_mask
);
2254 gen_store_gpr(t0
, rt
);
2257 mem_idx
= MIPS_HFLAG_UM
;
2260 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2261 ctx
->default_tcg_memop_mask
);
2262 gen_store_gpr(t0
, rt
);
2265 mem_idx
= MIPS_HFLAG_UM
;
2268 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2269 ctx
->default_tcg_memop_mask
);
2270 gen_store_gpr(t0
, rt
);
2273 mem_idx
= MIPS_HFLAG_UM
;
2276 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2277 gen_store_gpr(t0
, rt
);
2280 mem_idx
= MIPS_HFLAG_UM
;
2283 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2284 gen_store_gpr(t0
, rt
);
2287 mem_idx
= MIPS_HFLAG_UM
;
2290 t1
= tcg_temp_new();
2291 /* Do a byte access to possibly trigger a page
2292 fault with the unaligned address. */
2293 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2294 tcg_gen_andi_tl(t1
, t0
, 3);
2295 #ifndef TARGET_WORDS_BIGENDIAN
2296 tcg_gen_xori_tl(t1
, t1
, 3);
2298 tcg_gen_shli_tl(t1
, t1
, 3);
2299 tcg_gen_andi_tl(t0
, t0
, ~3);
2300 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2301 tcg_gen_shl_tl(t0
, t0
, t1
);
2302 t2
= tcg_const_tl(-1);
2303 tcg_gen_shl_tl(t2
, t2
, t1
);
2304 gen_load_gpr(t1
, rt
);
2305 tcg_gen_andc_tl(t1
, t1
, t2
);
2307 tcg_gen_or_tl(t0
, t0
, t1
);
2309 tcg_gen_ext32s_tl(t0
, t0
);
2310 gen_store_gpr(t0
, rt
);
2313 mem_idx
= MIPS_HFLAG_UM
;
2316 t1
= tcg_temp_new();
2317 /* Do a byte access to possibly trigger a page
2318 fault with the unaligned address. */
2319 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2320 tcg_gen_andi_tl(t1
, t0
, 3);
2321 #ifdef TARGET_WORDS_BIGENDIAN
2322 tcg_gen_xori_tl(t1
, t1
, 3);
2324 tcg_gen_shli_tl(t1
, t1
, 3);
2325 tcg_gen_andi_tl(t0
, t0
, ~3);
2326 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2327 tcg_gen_shr_tl(t0
, t0
, t1
);
2328 tcg_gen_xori_tl(t1
, t1
, 31);
2329 t2
= tcg_const_tl(0xfffffffeull
);
2330 tcg_gen_shl_tl(t2
, t2
, t1
);
2331 gen_load_gpr(t1
, rt
);
2332 tcg_gen_and_tl(t1
, t1
, t2
);
2334 tcg_gen_or_tl(t0
, t0
, t1
);
2336 tcg_gen_ext32s_tl(t0
, t0
);
2337 gen_store_gpr(t0
, rt
);
2340 mem_idx
= MIPS_HFLAG_UM
;
2344 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2345 gen_store_gpr(t0
, rt
);
2352 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2353 int base
, int offset
)
2355 TCGv t0
= tcg_temp_new();
2356 TCGv t1
= tcg_temp_new();
2357 int mem_idx
= ctx
->mem_idx
;
2359 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2360 gen_load_gpr(t1
, rt
);
2362 #if defined(TARGET_MIPS64)
2364 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2365 ctx
->default_tcg_memop_mask
);
2368 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2371 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2375 mem_idx
= MIPS_HFLAG_UM
;
2378 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2379 ctx
->default_tcg_memop_mask
);
2382 mem_idx
= MIPS_HFLAG_UM
;
2385 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2386 ctx
->default_tcg_memop_mask
);
2389 mem_idx
= MIPS_HFLAG_UM
;
2392 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2395 mem_idx
= MIPS_HFLAG_UM
;
2398 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2401 mem_idx
= MIPS_HFLAG_UM
;
2404 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2412 /* Store conditional */
2413 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2414 int base
, int16_t offset
)
2417 int mem_idx
= ctx
->mem_idx
;
2419 #ifdef CONFIG_USER_ONLY
2420 t0
= tcg_temp_local_new();
2421 t1
= tcg_temp_local_new();
2423 t0
= tcg_temp_new();
2424 t1
= tcg_temp_new();
2426 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2427 gen_load_gpr(t1
, rt
);
2429 #if defined(TARGET_MIPS64)
2432 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2436 mem_idx
= MIPS_HFLAG_UM
;
2440 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2447 /* Load and store */
2448 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2451 /* Don't do NOP if destination is zero: we must perform the actual
2456 TCGv_i32 fp0
= tcg_temp_new_i32();
2457 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2458 ctx
->default_tcg_memop_mask
);
2459 gen_store_fpr32(ctx
, fp0
, ft
);
2460 tcg_temp_free_i32(fp0
);
2465 TCGv_i32 fp0
= tcg_temp_new_i32();
2466 gen_load_fpr32(ctx
, fp0
, ft
);
2467 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2468 ctx
->default_tcg_memop_mask
);
2469 tcg_temp_free_i32(fp0
);
2474 TCGv_i64 fp0
= tcg_temp_new_i64();
2475 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2476 ctx
->default_tcg_memop_mask
);
2477 gen_store_fpr64(ctx
, fp0
, ft
);
2478 tcg_temp_free_i64(fp0
);
2483 TCGv_i64 fp0
= tcg_temp_new_i64();
2484 gen_load_fpr64(ctx
, fp0
, ft
);
2485 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2486 ctx
->default_tcg_memop_mask
);
2487 tcg_temp_free_i64(fp0
);
2491 MIPS_INVAL("flt_ldst");
2492 generate_exception_end(ctx
, EXCP_RI
);
2497 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2498 int rs
, int16_t imm
)
2500 TCGv t0
= tcg_temp_new();
2502 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2503 check_cp1_enabled(ctx
);
2507 check_insn(ctx
, ISA_MIPS2
);
2510 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2511 gen_flt_ldst(ctx
, op
, rt
, t0
);
2514 generate_exception_err(ctx
, EXCP_CpU
, 1);
2519 /* Arithmetic with immediate operand */
2520 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2521 int rt
, int rs
, int imm
)
2523 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2525 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2526 /* If no destination, treat it as a NOP.
2527 For addi, we must generate the overflow exception when needed. */
2533 TCGv t0
= tcg_temp_local_new();
2534 TCGv t1
= tcg_temp_new();
2535 TCGv t2
= tcg_temp_new();
2536 TCGLabel
*l1
= gen_new_label();
2538 gen_load_gpr(t1
, rs
);
2539 tcg_gen_addi_tl(t0
, t1
, uimm
);
2540 tcg_gen_ext32s_tl(t0
, t0
);
2542 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2543 tcg_gen_xori_tl(t2
, t0
, uimm
);
2544 tcg_gen_and_tl(t1
, t1
, t2
);
2546 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2548 /* operands of same sign, result different sign */
2549 generate_exception(ctx
, EXCP_OVERFLOW
);
2551 tcg_gen_ext32s_tl(t0
, t0
);
2552 gen_store_gpr(t0
, rt
);
2558 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2559 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2561 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2564 #if defined(TARGET_MIPS64)
2567 TCGv t0
= tcg_temp_local_new();
2568 TCGv t1
= tcg_temp_new();
2569 TCGv t2
= tcg_temp_new();
2570 TCGLabel
*l1
= gen_new_label();
2572 gen_load_gpr(t1
, rs
);
2573 tcg_gen_addi_tl(t0
, t1
, uimm
);
2575 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2576 tcg_gen_xori_tl(t2
, t0
, uimm
);
2577 tcg_gen_and_tl(t1
, t1
, t2
);
2579 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2581 /* operands of same sign, result different sign */
2582 generate_exception(ctx
, EXCP_OVERFLOW
);
2584 gen_store_gpr(t0
, rt
);
2590 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2592 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2599 /* Logic with immediate operand */
2600 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2601 int rt
, int rs
, int16_t imm
)
2606 /* If no destination, treat it as a NOP. */
2609 uimm
= (uint16_t)imm
;
2612 if (likely(rs
!= 0))
2613 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2615 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2619 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2621 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2624 if (likely(rs
!= 0))
2625 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2627 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2630 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2632 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2633 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2635 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2644 /* Set on less than with immediate operand */
2645 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2646 int rt
, int rs
, int16_t imm
)
2648 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2652 /* If no destination, treat it as a NOP. */
2655 t0
= tcg_temp_new();
2656 gen_load_gpr(t0
, rs
);
2659 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2662 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2668 /* Shifts with immediate operand */
2669 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2670 int rt
, int rs
, int16_t imm
)
2672 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2676 /* If no destination, treat it as a NOP. */
2680 t0
= tcg_temp_new();
2681 gen_load_gpr(t0
, rs
);
2684 tcg_gen_shli_tl(t0
, t0
, uimm
);
2685 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2688 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2692 tcg_gen_ext32u_tl(t0
, t0
);
2693 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2695 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2700 TCGv_i32 t1
= tcg_temp_new_i32();
2702 tcg_gen_trunc_tl_i32(t1
, t0
);
2703 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2704 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2705 tcg_temp_free_i32(t1
);
2707 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2710 #if defined(TARGET_MIPS64)
2712 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2715 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2718 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2722 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2724 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2728 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2731 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2734 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2737 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2745 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2746 int rd
, int rs
, int rt
)
2748 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2749 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2750 /* If no destination, treat it as a NOP.
2751 For add & sub, we must generate the overflow exception when needed. */
2758 TCGv t0
= tcg_temp_local_new();
2759 TCGv t1
= tcg_temp_new();
2760 TCGv t2
= tcg_temp_new();
2761 TCGLabel
*l1
= gen_new_label();
2763 gen_load_gpr(t1
, rs
);
2764 gen_load_gpr(t2
, rt
);
2765 tcg_gen_add_tl(t0
, t1
, t2
);
2766 tcg_gen_ext32s_tl(t0
, t0
);
2767 tcg_gen_xor_tl(t1
, t1
, t2
);
2768 tcg_gen_xor_tl(t2
, t0
, t2
);
2769 tcg_gen_andc_tl(t1
, t2
, t1
);
2771 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2773 /* operands of same sign, result different sign */
2774 generate_exception(ctx
, EXCP_OVERFLOW
);
2776 gen_store_gpr(t0
, rd
);
2781 if (rs
!= 0 && rt
!= 0) {
2782 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2783 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2784 } else if (rs
== 0 && rt
!= 0) {
2785 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2786 } else if (rs
!= 0 && rt
== 0) {
2787 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2794 TCGv t0
= tcg_temp_local_new();
2795 TCGv t1
= tcg_temp_new();
2796 TCGv t2
= tcg_temp_new();
2797 TCGLabel
*l1
= gen_new_label();
2799 gen_load_gpr(t1
, rs
);
2800 gen_load_gpr(t2
, rt
);
2801 tcg_gen_sub_tl(t0
, t1
, t2
);
2802 tcg_gen_ext32s_tl(t0
, t0
);
2803 tcg_gen_xor_tl(t2
, t1
, t2
);
2804 tcg_gen_xor_tl(t1
, t0
, t1
);
2805 tcg_gen_and_tl(t1
, t1
, t2
);
2807 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2809 /* operands of different sign, first operand and result different sign */
2810 generate_exception(ctx
, EXCP_OVERFLOW
);
2812 gen_store_gpr(t0
, rd
);
2817 if (rs
!= 0 && rt
!= 0) {
2818 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2819 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2820 } else if (rs
== 0 && rt
!= 0) {
2821 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2822 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2823 } else if (rs
!= 0 && rt
== 0) {
2824 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2829 #if defined(TARGET_MIPS64)
2832 TCGv t0
= tcg_temp_local_new();
2833 TCGv t1
= tcg_temp_new();
2834 TCGv t2
= tcg_temp_new();
2835 TCGLabel
*l1
= gen_new_label();
2837 gen_load_gpr(t1
, rs
);
2838 gen_load_gpr(t2
, rt
);
2839 tcg_gen_add_tl(t0
, t1
, t2
);
2840 tcg_gen_xor_tl(t1
, t1
, t2
);
2841 tcg_gen_xor_tl(t2
, t0
, t2
);
2842 tcg_gen_andc_tl(t1
, t2
, t1
);
2844 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2846 /* operands of same sign, result different sign */
2847 generate_exception(ctx
, EXCP_OVERFLOW
);
2849 gen_store_gpr(t0
, rd
);
2854 if (rs
!= 0 && rt
!= 0) {
2855 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2856 } else if (rs
== 0 && rt
!= 0) {
2857 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2858 } else if (rs
!= 0 && rt
== 0) {
2859 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2861 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2866 TCGv t0
= tcg_temp_local_new();
2867 TCGv t1
= tcg_temp_new();
2868 TCGv t2
= tcg_temp_new();
2869 TCGLabel
*l1
= gen_new_label();
2871 gen_load_gpr(t1
, rs
);
2872 gen_load_gpr(t2
, rt
);
2873 tcg_gen_sub_tl(t0
, t1
, t2
);
2874 tcg_gen_xor_tl(t2
, t1
, t2
);
2875 tcg_gen_xor_tl(t1
, t0
, t1
);
2876 tcg_gen_and_tl(t1
, t1
, t2
);
2878 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2880 /* operands of different sign, first operand and result different sign */
2881 generate_exception(ctx
, EXCP_OVERFLOW
);
2883 gen_store_gpr(t0
, rd
);
2888 if (rs
!= 0 && rt
!= 0) {
2889 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2890 } else if (rs
== 0 && rt
!= 0) {
2891 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2892 } else if (rs
!= 0 && rt
== 0) {
2893 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2895 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2900 if (likely(rs
!= 0 && rt
!= 0)) {
2901 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2902 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2904 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2910 /* Conditional move */
2911 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2912 int rd
, int rs
, int rt
)
2917 /* If no destination, treat it as a NOP. */
2921 t0
= tcg_temp_new();
2922 gen_load_gpr(t0
, rt
);
2923 t1
= tcg_const_tl(0);
2924 t2
= tcg_temp_new();
2925 gen_load_gpr(t2
, rs
);
2928 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2931 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2934 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2937 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2946 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2947 int rd
, int rs
, int rt
)
2950 /* If no destination, treat it as a NOP. */
2956 if (likely(rs
!= 0 && rt
!= 0)) {
2957 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2959 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2963 if (rs
!= 0 && rt
!= 0) {
2964 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2965 } else if (rs
== 0 && rt
!= 0) {
2966 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2967 } else if (rs
!= 0 && rt
== 0) {
2968 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2970 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2974 if (likely(rs
!= 0 && rt
!= 0)) {
2975 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2976 } else if (rs
== 0 && rt
!= 0) {
2977 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2978 } else if (rs
!= 0 && rt
== 0) {
2979 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2981 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2985 if (likely(rs
!= 0 && rt
!= 0)) {
2986 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2987 } else if (rs
== 0 && rt
!= 0) {
2988 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2989 } else if (rs
!= 0 && rt
== 0) {
2990 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2992 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2998 /* Set on lower than */
2999 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
3000 int rd
, int rs
, int rt
)
3005 /* If no destination, treat it as a NOP. */
3009 t0
= tcg_temp_new();
3010 t1
= tcg_temp_new();
3011 gen_load_gpr(t0
, rs
);
3012 gen_load_gpr(t1
, rt
);
3015 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3018 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3026 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3027 int rd
, int rs
, int rt
)
3032 /* If no destination, treat it as a NOP.
3033 For add & sub, we must generate the overflow exception when needed. */
3037 t0
= tcg_temp_new();
3038 t1
= tcg_temp_new();
3039 gen_load_gpr(t0
, rs
);
3040 gen_load_gpr(t1
, rt
);
3043 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3044 tcg_gen_shl_tl(t0
, t1
, t0
);
3045 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3048 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3049 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3052 tcg_gen_ext32u_tl(t1
, t1
);
3053 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3054 tcg_gen_shr_tl(t0
, t1
, t0
);
3055 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3059 TCGv_i32 t2
= tcg_temp_new_i32();
3060 TCGv_i32 t3
= tcg_temp_new_i32();
3062 tcg_gen_trunc_tl_i32(t2
, t0
);
3063 tcg_gen_trunc_tl_i32(t3
, t1
);
3064 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3065 tcg_gen_rotr_i32(t2
, t3
, t2
);
3066 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3067 tcg_temp_free_i32(t2
);
3068 tcg_temp_free_i32(t3
);
3071 #if defined(TARGET_MIPS64)
3073 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3074 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3077 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3078 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3081 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3082 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3085 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3086 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3094 /* Arithmetic on HI/LO registers */
3095 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3097 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3108 #if defined(TARGET_MIPS64)
3110 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3114 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3118 #if defined(TARGET_MIPS64)
3120 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3124 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3129 #if defined(TARGET_MIPS64)
3131 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3135 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3138 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3143 #if defined(TARGET_MIPS64)
3145 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3149 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3152 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3158 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3161 TCGv t0
= tcg_const_tl(addr
);
3162 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3163 gen_store_gpr(t0
, reg
);
3167 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3173 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3176 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3177 addr
= addr_add(ctx
, pc
, offset
);
3178 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3182 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3183 addr
= addr_add(ctx
, pc
, offset
);
3184 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3186 #if defined(TARGET_MIPS64)
3189 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3190 addr
= addr_add(ctx
, pc
, offset
);
3191 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3195 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3198 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3199 addr
= addr_add(ctx
, pc
, offset
);
3200 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3205 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3206 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3207 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3210 #if defined(TARGET_MIPS64)
3211 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3212 case R6_OPC_LDPC
+ (1 << 16):
3213 case R6_OPC_LDPC
+ (2 << 16):
3214 case R6_OPC_LDPC
+ (3 << 16):
3216 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3217 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3218 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3222 MIPS_INVAL("OPC_PCREL");
3223 generate_exception_end(ctx
, EXCP_RI
);
3230 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3239 t0
= tcg_temp_new();
3240 t1
= tcg_temp_new();
3242 gen_load_gpr(t0
, rs
);
3243 gen_load_gpr(t1
, rt
);
3248 TCGv t2
= tcg_temp_new();
3249 TCGv t3
= tcg_temp_new();
3250 tcg_gen_ext32s_tl(t0
, t0
);
3251 tcg_gen_ext32s_tl(t1
, t1
);
3252 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3253 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3254 tcg_gen_and_tl(t2
, t2
, t3
);
3255 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3256 tcg_gen_or_tl(t2
, t2
, t3
);
3257 tcg_gen_movi_tl(t3
, 0);
3258 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3259 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3260 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3267 TCGv t2
= tcg_temp_new();
3268 TCGv t3
= tcg_temp_new();
3269 tcg_gen_ext32s_tl(t0
, t0
);
3270 tcg_gen_ext32s_tl(t1
, t1
);
3271 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3272 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3273 tcg_gen_and_tl(t2
, t2
, t3
);
3274 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3275 tcg_gen_or_tl(t2
, t2
, t3
);
3276 tcg_gen_movi_tl(t3
, 0);
3277 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3278 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3279 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3286 TCGv t2
= tcg_const_tl(0);
3287 TCGv t3
= tcg_const_tl(1);
3288 tcg_gen_ext32u_tl(t0
, t0
);
3289 tcg_gen_ext32u_tl(t1
, t1
);
3290 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3291 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3292 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3299 TCGv t2
= tcg_const_tl(0);
3300 TCGv t3
= tcg_const_tl(1);
3301 tcg_gen_ext32u_tl(t0
, t0
);
3302 tcg_gen_ext32u_tl(t1
, t1
);
3303 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3304 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3305 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3312 TCGv_i32 t2
= tcg_temp_new_i32();
3313 TCGv_i32 t3
= tcg_temp_new_i32();
3314 tcg_gen_trunc_tl_i32(t2
, t0
);
3315 tcg_gen_trunc_tl_i32(t3
, t1
);
3316 tcg_gen_mul_i32(t2
, t2
, t3
);
3317 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3318 tcg_temp_free_i32(t2
);
3319 tcg_temp_free_i32(t3
);
3324 TCGv_i32 t2
= tcg_temp_new_i32();
3325 TCGv_i32 t3
= tcg_temp_new_i32();
3326 tcg_gen_trunc_tl_i32(t2
, t0
);
3327 tcg_gen_trunc_tl_i32(t3
, t1
);
3328 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3329 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3330 tcg_temp_free_i32(t2
);
3331 tcg_temp_free_i32(t3
);
3336 TCGv_i32 t2
= tcg_temp_new_i32();
3337 TCGv_i32 t3
= tcg_temp_new_i32();
3338 tcg_gen_trunc_tl_i32(t2
, t0
);
3339 tcg_gen_trunc_tl_i32(t3
, t1
);
3340 tcg_gen_mul_i32(t2
, t2
, t3
);
3341 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3342 tcg_temp_free_i32(t2
);
3343 tcg_temp_free_i32(t3
);
3348 TCGv_i32 t2
= tcg_temp_new_i32();
3349 TCGv_i32 t3
= tcg_temp_new_i32();
3350 tcg_gen_trunc_tl_i32(t2
, t0
);
3351 tcg_gen_trunc_tl_i32(t3
, t1
);
3352 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3353 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3354 tcg_temp_free_i32(t2
);
3355 tcg_temp_free_i32(t3
);
3358 #if defined(TARGET_MIPS64)
3361 TCGv t2
= tcg_temp_new();
3362 TCGv t3
= tcg_temp_new();
3363 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3364 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3365 tcg_gen_and_tl(t2
, t2
, t3
);
3366 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3367 tcg_gen_or_tl(t2
, t2
, t3
);
3368 tcg_gen_movi_tl(t3
, 0);
3369 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3370 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3377 TCGv t2
= tcg_temp_new();
3378 TCGv t3
= tcg_temp_new();
3379 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3380 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3381 tcg_gen_and_tl(t2
, t2
, t3
);
3382 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3383 tcg_gen_or_tl(t2
, t2
, t3
);
3384 tcg_gen_movi_tl(t3
, 0);
3385 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3386 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3393 TCGv t2
= tcg_const_tl(0);
3394 TCGv t3
= tcg_const_tl(1);
3395 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3396 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3403 TCGv t2
= tcg_const_tl(0);
3404 TCGv t3
= tcg_const_tl(1);
3405 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3406 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3412 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3416 TCGv t2
= tcg_temp_new();
3417 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3422 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3426 TCGv t2
= tcg_temp_new();
3427 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3433 MIPS_INVAL("r6 mul/div");
3434 generate_exception_end(ctx
, EXCP_RI
);
3442 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3443 int acc
, int rs
, int rt
)
3447 t0
= tcg_temp_new();
3448 t1
= tcg_temp_new();
3450 gen_load_gpr(t0
, rs
);
3451 gen_load_gpr(t1
, rt
);
3460 TCGv t2
= tcg_temp_new();
3461 TCGv t3
= tcg_temp_new();
3462 tcg_gen_ext32s_tl(t0
, t0
);
3463 tcg_gen_ext32s_tl(t1
, t1
);
3464 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3465 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3466 tcg_gen_and_tl(t2
, t2
, t3
);
3467 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3468 tcg_gen_or_tl(t2
, t2
, t3
);
3469 tcg_gen_movi_tl(t3
, 0);
3470 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3471 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3472 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3473 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3474 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3481 TCGv t2
= tcg_const_tl(0);
3482 TCGv t3
= tcg_const_tl(1);
3483 tcg_gen_ext32u_tl(t0
, t0
);
3484 tcg_gen_ext32u_tl(t1
, t1
);
3485 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3486 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3487 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3488 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3489 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3496 TCGv_i32 t2
= tcg_temp_new_i32();
3497 TCGv_i32 t3
= tcg_temp_new_i32();
3498 tcg_gen_trunc_tl_i32(t2
, t0
);
3499 tcg_gen_trunc_tl_i32(t3
, t1
);
3500 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3501 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3502 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3503 tcg_temp_free_i32(t2
);
3504 tcg_temp_free_i32(t3
);
3509 TCGv_i32 t2
= tcg_temp_new_i32();
3510 TCGv_i32 t3
= tcg_temp_new_i32();
3511 tcg_gen_trunc_tl_i32(t2
, t0
);
3512 tcg_gen_trunc_tl_i32(t3
, t1
);
3513 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3514 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3515 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3516 tcg_temp_free_i32(t2
);
3517 tcg_temp_free_i32(t3
);
3520 #if defined(TARGET_MIPS64)
3523 TCGv t2
= tcg_temp_new();
3524 TCGv t3
= tcg_temp_new();
3525 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3526 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3527 tcg_gen_and_tl(t2
, t2
, t3
);
3528 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3529 tcg_gen_or_tl(t2
, t2
, t3
);
3530 tcg_gen_movi_tl(t3
, 0);
3531 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3532 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3533 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3540 TCGv t2
= tcg_const_tl(0);
3541 TCGv t3
= tcg_const_tl(1);
3542 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3543 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3544 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3550 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3553 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3558 TCGv_i64 t2
= tcg_temp_new_i64();
3559 TCGv_i64 t3
= tcg_temp_new_i64();
3561 tcg_gen_ext_tl_i64(t2
, t0
);
3562 tcg_gen_ext_tl_i64(t3
, t1
);
3563 tcg_gen_mul_i64(t2
, t2
, t3
);
3564 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3565 tcg_gen_add_i64(t2
, t2
, t3
);
3566 tcg_temp_free_i64(t3
);
3567 gen_move_low32(cpu_LO
[acc
], t2
);
3568 gen_move_high32(cpu_HI
[acc
], t2
);
3569 tcg_temp_free_i64(t2
);
3574 TCGv_i64 t2
= tcg_temp_new_i64();
3575 TCGv_i64 t3
= tcg_temp_new_i64();
3577 tcg_gen_ext32u_tl(t0
, t0
);
3578 tcg_gen_ext32u_tl(t1
, t1
);
3579 tcg_gen_extu_tl_i64(t2
, t0
);
3580 tcg_gen_extu_tl_i64(t3
, t1
);
3581 tcg_gen_mul_i64(t2
, t2
, t3
);
3582 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3583 tcg_gen_add_i64(t2
, t2
, t3
);
3584 tcg_temp_free_i64(t3
);
3585 gen_move_low32(cpu_LO
[acc
], t2
);
3586 gen_move_high32(cpu_HI
[acc
], t2
);
3587 tcg_temp_free_i64(t2
);
3592 TCGv_i64 t2
= tcg_temp_new_i64();
3593 TCGv_i64 t3
= tcg_temp_new_i64();
3595 tcg_gen_ext_tl_i64(t2
, t0
);
3596 tcg_gen_ext_tl_i64(t3
, t1
);
3597 tcg_gen_mul_i64(t2
, t2
, t3
);
3598 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3599 tcg_gen_sub_i64(t2
, t3
, t2
);
3600 tcg_temp_free_i64(t3
);
3601 gen_move_low32(cpu_LO
[acc
], t2
);
3602 gen_move_high32(cpu_HI
[acc
], t2
);
3603 tcg_temp_free_i64(t2
);
3608 TCGv_i64 t2
= tcg_temp_new_i64();
3609 TCGv_i64 t3
= tcg_temp_new_i64();
3611 tcg_gen_ext32u_tl(t0
, t0
);
3612 tcg_gen_ext32u_tl(t1
, t1
);
3613 tcg_gen_extu_tl_i64(t2
, t0
);
3614 tcg_gen_extu_tl_i64(t3
, t1
);
3615 tcg_gen_mul_i64(t2
, t2
, t3
);
3616 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3617 tcg_gen_sub_i64(t2
, t3
, t2
);
3618 tcg_temp_free_i64(t3
);
3619 gen_move_low32(cpu_LO
[acc
], t2
);
3620 gen_move_high32(cpu_HI
[acc
], t2
);
3621 tcg_temp_free_i64(t2
);
3625 MIPS_INVAL("mul/div");
3626 generate_exception_end(ctx
, EXCP_RI
);
3634 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3635 int rd
, int rs
, int rt
)
3637 TCGv t0
= tcg_temp_new();
3638 TCGv t1
= tcg_temp_new();
3640 gen_load_gpr(t0
, rs
);
3641 gen_load_gpr(t1
, rt
);
3644 case OPC_VR54XX_MULS
:
3645 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3647 case OPC_VR54XX_MULSU
:
3648 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3650 case OPC_VR54XX_MACC
:
3651 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3653 case OPC_VR54XX_MACCU
:
3654 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3656 case OPC_VR54XX_MSAC
:
3657 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3659 case OPC_VR54XX_MSACU
:
3660 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3662 case OPC_VR54XX_MULHI
:
3663 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3665 case OPC_VR54XX_MULHIU
:
3666 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3668 case OPC_VR54XX_MULSHI
:
3669 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3671 case OPC_VR54XX_MULSHIU
:
3672 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3674 case OPC_VR54XX_MACCHI
:
3675 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3677 case OPC_VR54XX_MACCHIU
:
3678 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3680 case OPC_VR54XX_MSACHI
:
3681 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3683 case OPC_VR54XX_MSACHIU
:
3684 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3687 MIPS_INVAL("mul vr54xx");
3688 generate_exception_end(ctx
, EXCP_RI
);
3691 gen_store_gpr(t0
, rd
);
3698 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3708 gen_load_gpr(t0
, rs
);
3713 #if defined(TARGET_MIPS64)
3717 tcg_gen_not_tl(t0
, t0
);
3726 tcg_gen_ext32u_tl(t0
, t0
);
3727 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3728 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3730 #if defined(TARGET_MIPS64)
3735 tcg_gen_clzi_i64(t0
, t0
, 64);
3741 /* Godson integer instructions */
3742 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3743 int rd
, int rs
, int rt
)
3755 case OPC_MULTU_G_2E
:
3756 case OPC_MULTU_G_2F
:
3757 #if defined(TARGET_MIPS64)
3758 case OPC_DMULT_G_2E
:
3759 case OPC_DMULT_G_2F
:
3760 case OPC_DMULTU_G_2E
:
3761 case OPC_DMULTU_G_2F
:
3763 t0
= tcg_temp_new();
3764 t1
= tcg_temp_new();
3767 t0
= tcg_temp_local_new();
3768 t1
= tcg_temp_local_new();
3772 gen_load_gpr(t0
, rs
);
3773 gen_load_gpr(t1
, rt
);
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3779 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3781 case OPC_MULTU_G_2E
:
3782 case OPC_MULTU_G_2F
:
3783 tcg_gen_ext32u_tl(t0
, t0
);
3784 tcg_gen_ext32u_tl(t1
, t1
);
3785 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3786 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3791 TCGLabel
*l1
= gen_new_label();
3792 TCGLabel
*l2
= gen_new_label();
3793 TCGLabel
*l3
= gen_new_label();
3794 tcg_gen_ext32s_tl(t0
, t0
);
3795 tcg_gen_ext32s_tl(t1
, t1
);
3796 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3797 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3800 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3801 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3802 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3805 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3806 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3813 TCGLabel
*l1
= gen_new_label();
3814 TCGLabel
*l2
= gen_new_label();
3815 tcg_gen_ext32u_tl(t0
, t0
);
3816 tcg_gen_ext32u_tl(t1
, t1
);
3817 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3821 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3822 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3829 TCGLabel
*l1
= gen_new_label();
3830 TCGLabel
*l2
= gen_new_label();
3831 TCGLabel
*l3
= gen_new_label();
3832 tcg_gen_ext32u_tl(t0
, t0
);
3833 tcg_gen_ext32u_tl(t1
, t1
);
3834 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3835 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3836 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3838 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3841 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3842 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3849 TCGLabel
*l1
= gen_new_label();
3850 TCGLabel
*l2
= gen_new_label();
3851 tcg_gen_ext32u_tl(t0
, t0
);
3852 tcg_gen_ext32u_tl(t1
, t1
);
3853 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3854 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3857 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3858 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3862 #if defined(TARGET_MIPS64)
3863 case OPC_DMULT_G_2E
:
3864 case OPC_DMULT_G_2F
:
3865 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3867 case OPC_DMULTU_G_2E
:
3868 case OPC_DMULTU_G_2F
:
3869 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3874 TCGLabel
*l1
= gen_new_label();
3875 TCGLabel
*l2
= gen_new_label();
3876 TCGLabel
*l3
= gen_new_label();
3877 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3878 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3881 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3882 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3883 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3886 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3890 case OPC_DDIVU_G_2E
:
3891 case OPC_DDIVU_G_2F
:
3893 TCGLabel
*l1
= gen_new_label();
3894 TCGLabel
*l2
= gen_new_label();
3895 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3896 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3899 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3906 TCGLabel
*l1
= gen_new_label();
3907 TCGLabel
*l2
= gen_new_label();
3908 TCGLabel
*l3
= gen_new_label();
3909 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3910 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3911 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3913 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3916 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3920 case OPC_DMODU_G_2E
:
3921 case OPC_DMODU_G_2F
:
3923 TCGLabel
*l1
= gen_new_label();
3924 TCGLabel
*l2
= gen_new_label();
3925 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3926 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3929 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3940 /* Loongson multimedia instructions */
3941 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3943 uint32_t opc
, shift_max
;
3946 opc
= MASK_LMI(ctx
->opcode
);
3952 t0
= tcg_temp_local_new_i64();
3953 t1
= tcg_temp_local_new_i64();
3956 t0
= tcg_temp_new_i64();
3957 t1
= tcg_temp_new_i64();
3961 check_cp1_enabled(ctx
);
3962 gen_load_fpr64(ctx
, t0
, rs
);
3963 gen_load_fpr64(ctx
, t1
, rt
);
3965 #define LMI_HELPER(UP, LO) \
3966 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3967 #define LMI_HELPER_1(UP, LO) \
3968 case OPC_##UP: gen_helper_##LO(t0, t0); break
3969 #define LMI_DIRECT(UP, LO, OP) \
3970 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3973 LMI_HELPER(PADDSH
, paddsh
);
3974 LMI_HELPER(PADDUSH
, paddush
);
3975 LMI_HELPER(PADDH
, paddh
);
3976 LMI_HELPER(PADDW
, paddw
);
3977 LMI_HELPER(PADDSB
, paddsb
);
3978 LMI_HELPER(PADDUSB
, paddusb
);
3979 LMI_HELPER(PADDB
, paddb
);
3981 LMI_HELPER(PSUBSH
, psubsh
);
3982 LMI_HELPER(PSUBUSH
, psubush
);
3983 LMI_HELPER(PSUBH
, psubh
);
3984 LMI_HELPER(PSUBW
, psubw
);
3985 LMI_HELPER(PSUBSB
, psubsb
);
3986 LMI_HELPER(PSUBUSB
, psubusb
);
3987 LMI_HELPER(PSUBB
, psubb
);
3989 LMI_HELPER(PSHUFH
, pshufh
);
3990 LMI_HELPER(PACKSSWH
, packsswh
);
3991 LMI_HELPER(PACKSSHB
, packsshb
);
3992 LMI_HELPER(PACKUSHB
, packushb
);
3994 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3995 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3996 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3997 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3998 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3999 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
4001 LMI_HELPER(PAVGH
, pavgh
);
4002 LMI_HELPER(PAVGB
, pavgb
);
4003 LMI_HELPER(PMAXSH
, pmaxsh
);
4004 LMI_HELPER(PMINSH
, pminsh
);
4005 LMI_HELPER(PMAXUB
, pmaxub
);
4006 LMI_HELPER(PMINUB
, pminub
);
4008 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4009 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4010 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4011 LMI_HELPER(PCMPGTH
, pcmpgth
);
4012 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4013 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4015 LMI_HELPER(PSLLW
, psllw
);
4016 LMI_HELPER(PSLLH
, psllh
);
4017 LMI_HELPER(PSRLW
, psrlw
);
4018 LMI_HELPER(PSRLH
, psrlh
);
4019 LMI_HELPER(PSRAW
, psraw
);
4020 LMI_HELPER(PSRAH
, psrah
);
4022 LMI_HELPER(PMULLH
, pmullh
);
4023 LMI_HELPER(PMULHH
, pmulhh
);
4024 LMI_HELPER(PMULHUH
, pmulhuh
);
4025 LMI_HELPER(PMADDHW
, pmaddhw
);
4027 LMI_HELPER(PASUBUB
, pasubub
);
4028 LMI_HELPER_1(BIADD
, biadd
);
4029 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4031 LMI_DIRECT(PADDD
, paddd
, add
);
4032 LMI_DIRECT(PSUBD
, psubd
, sub
);
4033 LMI_DIRECT(XOR_CP2
, xor, xor);
4034 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4035 LMI_DIRECT(AND_CP2
, and, and);
4036 LMI_DIRECT(OR_CP2
, or, or);
4039 tcg_gen_andc_i64(t0
, t1
, t0
);
4043 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4046 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4049 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4052 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4056 tcg_gen_andi_i64(t1
, t1
, 3);
4057 tcg_gen_shli_i64(t1
, t1
, 4);
4058 tcg_gen_shr_i64(t0
, t0
, t1
);
4059 tcg_gen_ext16u_i64(t0
, t0
);
4063 tcg_gen_add_i64(t0
, t0
, t1
);
4064 tcg_gen_ext32s_i64(t0
, t0
);
4067 tcg_gen_sub_i64(t0
, t0
, t1
);
4068 tcg_gen_ext32s_i64(t0
, t0
);
4090 /* Make sure shift count isn't TCG undefined behaviour. */
4091 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4096 tcg_gen_shl_i64(t0
, t0
, t1
);
4100 /* Since SRA is UndefinedResult without sign-extended inputs,
4101 we can treat SRA and DSRA the same. */
4102 tcg_gen_sar_i64(t0
, t0
, t1
);
4105 /* We want to shift in zeros for SRL; zero-extend first. */
4106 tcg_gen_ext32u_i64(t0
, t0
);
4109 tcg_gen_shr_i64(t0
, t0
, t1
);
4113 if (shift_max
== 32) {
4114 tcg_gen_ext32s_i64(t0
, t0
);
4117 /* Shifts larger than MAX produce zero. */
4118 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4119 tcg_gen_neg_i64(t1
, t1
);
4120 tcg_gen_and_i64(t0
, t0
, t1
);
4126 TCGv_i64 t2
= tcg_temp_new_i64();
4127 TCGLabel
*lab
= gen_new_label();
4129 tcg_gen_mov_i64(t2
, t0
);
4130 tcg_gen_add_i64(t0
, t1
, t2
);
4131 if (opc
== OPC_ADD_CP2
) {
4132 tcg_gen_ext32s_i64(t0
, t0
);
4134 tcg_gen_xor_i64(t1
, t1
, t2
);
4135 tcg_gen_xor_i64(t2
, t2
, t0
);
4136 tcg_gen_andc_i64(t1
, t2
, t1
);
4137 tcg_temp_free_i64(t2
);
4138 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4139 generate_exception(ctx
, EXCP_OVERFLOW
);
4147 TCGv_i64 t2
= tcg_temp_new_i64();
4148 TCGLabel
*lab
= gen_new_label();
4150 tcg_gen_mov_i64(t2
, t0
);
4151 tcg_gen_sub_i64(t0
, t1
, t2
);
4152 if (opc
== OPC_SUB_CP2
) {
4153 tcg_gen_ext32s_i64(t0
, t0
);
4155 tcg_gen_xor_i64(t1
, t1
, t2
);
4156 tcg_gen_xor_i64(t2
, t2
, t0
);
4157 tcg_gen_and_i64(t1
, t1
, t2
);
4158 tcg_temp_free_i64(t2
);
4159 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4160 generate_exception(ctx
, EXCP_OVERFLOW
);
4166 tcg_gen_ext32u_i64(t0
, t0
);
4167 tcg_gen_ext32u_i64(t1
, t1
);
4168 tcg_gen_mul_i64(t0
, t0
, t1
);
4177 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4178 FD field is the CC field? */
4180 MIPS_INVAL("loongson_cp2");
4181 generate_exception_end(ctx
, EXCP_RI
);
4188 gen_store_fpr64(ctx
, t0
, rd
);
4190 tcg_temp_free_i64(t0
);
4191 tcg_temp_free_i64(t1
);
4195 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4196 int rs
, int rt
, int16_t imm
)
4199 TCGv t0
= tcg_temp_new();
4200 TCGv t1
= tcg_temp_new();
4203 /* Load needed operands */
4211 /* Compare two registers */
4213 gen_load_gpr(t0
, rs
);
4214 gen_load_gpr(t1
, rt
);
4224 /* Compare register to immediate */
4225 if (rs
!= 0 || imm
!= 0) {
4226 gen_load_gpr(t0
, rs
);
4227 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4234 case OPC_TEQ
: /* rs == rs */
4235 case OPC_TEQI
: /* r0 == 0 */
4236 case OPC_TGE
: /* rs >= rs */
4237 case OPC_TGEI
: /* r0 >= 0 */
4238 case OPC_TGEU
: /* rs >= rs unsigned */
4239 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4241 generate_exception_end(ctx
, EXCP_TRAP
);
4243 case OPC_TLT
: /* rs < rs */
4244 case OPC_TLTI
: /* r0 < 0 */
4245 case OPC_TLTU
: /* rs < rs unsigned */
4246 case OPC_TLTIU
: /* r0 < 0 unsigned */
4247 case OPC_TNE
: /* rs != rs */
4248 case OPC_TNEI
: /* r0 != 0 */
4249 /* Never trap: treat as NOP. */
4253 TCGLabel
*l1
= gen_new_label();
4258 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4262 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4266 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4270 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4274 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4278 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4281 generate_exception(ctx
, EXCP_TRAP
);
4288 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4290 if (unlikely(ctx
->base
.singlestep_enabled
)) {
4294 #ifndef CONFIG_USER_ONLY
4295 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4301 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4303 if (use_goto_tb(ctx
, dest
)) {
4306 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4309 if (ctx
->base
.singlestep_enabled
) {
4310 save_cpu_state(ctx
, 0);
4311 gen_helper_raise_exception_debug(cpu_env
);
4313 tcg_gen_lookup_and_goto_ptr();
4317 /* Branches (before delay slot) */
4318 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4320 int rs
, int rt
, int32_t offset
,
4323 target_ulong btgt
= -1;
4325 int bcond_compute
= 0;
4326 TCGv t0
= tcg_temp_new();
4327 TCGv t1
= tcg_temp_new();
4329 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4330 #ifdef MIPS_DEBUG_DISAS
4331 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4332 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4334 generate_exception_end(ctx
, EXCP_RI
);
4338 /* Load needed operands */
4344 /* Compare two registers */
4346 gen_load_gpr(t0
, rs
);
4347 gen_load_gpr(t1
, rt
);
4350 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4364 /* Compare to zero */
4366 gen_load_gpr(t0
, rs
);
4369 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4372 #if defined(TARGET_MIPS64)
4374 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4376 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4379 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4384 /* Jump to immediate */
4385 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4390 /* Jump to register */
4391 if (offset
!= 0 && offset
!= 16) {
4392 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4393 others are reserved. */
4394 MIPS_INVAL("jump hint");
4395 generate_exception_end(ctx
, EXCP_RI
);
4398 gen_load_gpr(btarget
, rs
);
4401 MIPS_INVAL("branch/jump");
4402 generate_exception_end(ctx
, EXCP_RI
);
4405 if (bcond_compute
== 0) {
4406 /* No condition to be computed */
4408 case OPC_BEQ
: /* rx == rx */
4409 case OPC_BEQL
: /* rx == rx likely */
4410 case OPC_BGEZ
: /* 0 >= 0 */
4411 case OPC_BGEZL
: /* 0 >= 0 likely */
4412 case OPC_BLEZ
: /* 0 <= 0 */
4413 case OPC_BLEZL
: /* 0 <= 0 likely */
4415 ctx
->hflags
|= MIPS_HFLAG_B
;
4417 case OPC_BGEZAL
: /* 0 >= 0 */
4418 case OPC_BGEZALL
: /* 0 >= 0 likely */
4419 /* Always take and link */
4421 ctx
->hflags
|= MIPS_HFLAG_B
;
4423 case OPC_BNE
: /* rx != rx */
4424 case OPC_BGTZ
: /* 0 > 0 */
4425 case OPC_BLTZ
: /* 0 < 0 */
4428 case OPC_BLTZAL
: /* 0 < 0 */
4429 /* Handle as an unconditional branch to get correct delay
4432 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4433 ctx
->hflags
|= MIPS_HFLAG_B
;
4435 case OPC_BLTZALL
: /* 0 < 0 likely */
4436 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4437 /* Skip the instruction in the delay slot */
4438 ctx
->base
.pc_next
+= 4;
4440 case OPC_BNEL
: /* rx != rx likely */
4441 case OPC_BGTZL
: /* 0 > 0 likely */
4442 case OPC_BLTZL
: /* 0 < 0 likely */
4443 /* Skip the instruction in the delay slot */
4444 ctx
->base
.pc_next
+= 4;
4447 ctx
->hflags
|= MIPS_HFLAG_B
;
4450 ctx
->hflags
|= MIPS_HFLAG_BX
;
4454 ctx
->hflags
|= MIPS_HFLAG_B
;
4457 ctx
->hflags
|= MIPS_HFLAG_BR
;
4461 ctx
->hflags
|= MIPS_HFLAG_BR
;
4464 MIPS_INVAL("branch/jump");
4465 generate_exception_end(ctx
, EXCP_RI
);
4471 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4474 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4477 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4480 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4483 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4486 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4489 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4493 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4497 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4500 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4503 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4506 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4509 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4512 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4515 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4517 #if defined(TARGET_MIPS64)
4519 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4523 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4526 ctx
->hflags
|= MIPS_HFLAG_BC
;
4529 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4532 ctx
->hflags
|= MIPS_HFLAG_BL
;
4535 MIPS_INVAL("conditional branch/jump");
4536 generate_exception_end(ctx
, EXCP_RI
);
4541 ctx
->btarget
= btgt
;
4543 switch (delayslot_size
) {
4545 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4548 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4553 int post_delay
= insn_bytes
+ delayslot_size
;
4554 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4556 tcg_gen_movi_tl(cpu_gpr
[blink
],
4557 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
4561 if (insn_bytes
== 2)
4562 ctx
->hflags
|= MIPS_HFLAG_B16
;
4567 /* special3 bitfield operations */
4568 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4569 int rs
, int lsb
, int msb
)
4571 TCGv t0
= tcg_temp_new();
4572 TCGv t1
= tcg_temp_new();
4574 gen_load_gpr(t1
, rs
);
4577 if (lsb
+ msb
> 31) {
4581 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4583 /* The two checks together imply that lsb == 0,
4584 so this is a simple sign-extension. */
4585 tcg_gen_ext32s_tl(t0
, t1
);
4588 #if defined(TARGET_MIPS64)
4597 if (lsb
+ msb
> 63) {
4600 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4607 gen_load_gpr(t0
, rt
);
4608 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4609 tcg_gen_ext32s_tl(t0
, t0
);
4611 #if defined(TARGET_MIPS64)
4622 gen_load_gpr(t0
, rt
);
4623 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4628 MIPS_INVAL("bitops");
4629 generate_exception_end(ctx
, EXCP_RI
);
4634 gen_store_gpr(t0
, rt
);
4639 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4644 /* If no destination, treat it as a NOP. */
4648 t0
= tcg_temp_new();
4649 gen_load_gpr(t0
, rt
);
4653 TCGv t1
= tcg_temp_new();
4654 TCGv t2
= tcg_const_tl(0x00FF00FF);
4656 tcg_gen_shri_tl(t1
, t0
, 8);
4657 tcg_gen_and_tl(t1
, t1
, t2
);
4658 tcg_gen_and_tl(t0
, t0
, t2
);
4659 tcg_gen_shli_tl(t0
, t0
, 8);
4660 tcg_gen_or_tl(t0
, t0
, t1
);
4663 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4667 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4670 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4672 #if defined(TARGET_MIPS64)
4675 TCGv t1
= tcg_temp_new();
4676 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4678 tcg_gen_shri_tl(t1
, t0
, 8);
4679 tcg_gen_and_tl(t1
, t1
, t2
);
4680 tcg_gen_and_tl(t0
, t0
, t2
);
4681 tcg_gen_shli_tl(t0
, t0
, 8);
4682 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4689 TCGv t1
= tcg_temp_new();
4690 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4692 tcg_gen_shri_tl(t1
, t0
, 16);
4693 tcg_gen_and_tl(t1
, t1
, t2
);
4694 tcg_gen_and_tl(t0
, t0
, t2
);
4695 tcg_gen_shli_tl(t0
, t0
, 16);
4696 tcg_gen_or_tl(t0
, t0
, t1
);
4697 tcg_gen_shri_tl(t1
, t0
, 32);
4698 tcg_gen_shli_tl(t0
, t0
, 32);
4699 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4706 MIPS_INVAL("bsfhl");
4707 generate_exception_end(ctx
, EXCP_RI
);
4714 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4723 t0
= tcg_temp_new();
4724 t1
= tcg_temp_new();
4725 gen_load_gpr(t0
, rs
);
4726 gen_load_gpr(t1
, rt
);
4727 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4728 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4729 if (opc
== OPC_LSA
) {
4730 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4739 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4747 t0
= tcg_temp_new();
4748 gen_load_gpr(t0
, rt
);
4752 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4754 #if defined(TARGET_MIPS64)
4756 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4761 TCGv t1
= tcg_temp_new();
4762 gen_load_gpr(t1
, rs
);
4766 TCGv_i64 t2
= tcg_temp_new_i64();
4767 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4768 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4769 gen_move_low32(cpu_gpr
[rd
], t2
);
4770 tcg_temp_free_i64(t2
);
4773 #if defined(TARGET_MIPS64)
4775 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4776 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4777 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4787 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4794 t0
= tcg_temp_new();
4795 gen_load_gpr(t0
, rt
);
4798 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4800 #if defined(TARGET_MIPS64)
4802 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4809 #ifndef CONFIG_USER_ONLY
4810 /* CP0 (MMU and control) */
4811 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4813 TCGv_i64 t0
= tcg_temp_new_i64();
4814 TCGv_i64 t1
= tcg_temp_new_i64();
4816 tcg_gen_ext_tl_i64(t0
, arg
);
4817 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4818 #if defined(TARGET_MIPS64)
4819 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4821 tcg_gen_concat32_i64(t1
, t1
, t0
);
4823 tcg_gen_st_i64(t1
, cpu_env
, off
);
4824 tcg_temp_free_i64(t1
);
4825 tcg_temp_free_i64(t0
);
4828 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4830 TCGv_i64 t0
= tcg_temp_new_i64();
4831 TCGv_i64 t1
= tcg_temp_new_i64();
4833 tcg_gen_ext_tl_i64(t0
, arg
);
4834 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4835 tcg_gen_concat32_i64(t1
, t1
, t0
);
4836 tcg_gen_st_i64(t1
, cpu_env
, off
);
4837 tcg_temp_free_i64(t1
);
4838 tcg_temp_free_i64(t0
);
4841 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4843 TCGv_i64 t0
= tcg_temp_new_i64();
4845 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4846 #if defined(TARGET_MIPS64)
4847 tcg_gen_shri_i64(t0
, t0
, 30);
4849 tcg_gen_shri_i64(t0
, t0
, 32);
4851 gen_move_low32(arg
, t0
);
4852 tcg_temp_free_i64(t0
);
4855 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4857 TCGv_i64 t0
= tcg_temp_new_i64();
4859 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4860 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4861 gen_move_low32(arg
, t0
);
4862 tcg_temp_free_i64(t0
);
4865 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4867 TCGv_i32 t0
= tcg_temp_new_i32();
4869 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4870 tcg_gen_ext_i32_tl(arg
, t0
);
4871 tcg_temp_free_i32(t0
);
4874 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4876 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4877 tcg_gen_ext32s_tl(arg
, arg
);
4880 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4882 TCGv_i32 t0
= tcg_temp_new_i32();
4884 tcg_gen_trunc_tl_i32(t0
, arg
);
4885 tcg_gen_st_i32(t0
, cpu_env
, off
);
4886 tcg_temp_free_i32(t0
);
4889 #define CP0_CHECK(c) \
4892 goto cp0_unimplemented; \
4896 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4898 const char *rn
= "invalid";
4900 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4906 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4910 goto cp0_unimplemented
;
4916 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4920 goto cp0_unimplemented
;
4926 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4927 ctx
->CP0_LLAddr_shift
);
4931 CP0_CHECK(ctx
->mrp
);
4932 gen_helper_mfhc0_maar(arg
, cpu_env
);
4936 goto cp0_unimplemented
;
4945 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4949 goto cp0_unimplemented
;
4953 goto cp0_unimplemented
;
4955 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4959 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4960 tcg_gen_movi_tl(arg
, 0);
4963 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4965 const char *rn
= "invalid";
4966 uint64_t mask
= ctx
->PAMask
>> 36;
4968 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4974 tcg_gen_andi_tl(arg
, arg
, mask
);
4975 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4979 goto cp0_unimplemented
;
4985 tcg_gen_andi_tl(arg
, arg
, mask
);
4986 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4990 goto cp0_unimplemented
;
4996 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4997 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4998 relevant for modern MIPS cores supporting MTHC0, therefore
4999 treating MTHC0 to LLAddr as NOP. */
5003 CP0_CHECK(ctx
->mrp
);
5004 gen_helper_mthc0_maar(cpu_env
, arg
);
5008 goto cp0_unimplemented
;
5017 tcg_gen_andi_tl(arg
, arg
, mask
);
5018 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5022 goto cp0_unimplemented
;
5026 goto cp0_unimplemented
;
5028 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5031 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5034 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5036 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5037 tcg_gen_movi_tl(arg
, 0);
5039 tcg_gen_movi_tl(arg
, ~0);
5043 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5045 const char *rn
= "invalid";
5048 check_insn(ctx
, ISA_MIPS32
);
5054 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5058 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5059 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5063 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5064 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5068 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5069 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5074 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5078 goto cp0_unimplemented
;
5084 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5085 gen_helper_mfc0_random(arg
, cpu_env
);
5089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5105 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5110 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5114 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5115 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5116 rn
= "VPEScheFBack";
5119 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5120 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5124 goto cp0_unimplemented
;
5131 TCGv_i64 tmp
= tcg_temp_new_i64();
5132 tcg_gen_ld_i64(tmp
, cpu_env
,
5133 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5134 #if defined(TARGET_MIPS64)
5136 /* Move RI/XI fields to bits 31:30 */
5137 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5138 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5141 gen_move_low32(arg
, tmp
);
5142 tcg_temp_free_i64(tmp
);
5147 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5148 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5152 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5153 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5157 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5158 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5163 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5168 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5172 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5173 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5177 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5178 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5182 goto cp0_unimplemented
;
5189 TCGv_i64 tmp
= tcg_temp_new_i64();
5190 tcg_gen_ld_i64(tmp
, cpu_env
,
5191 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5192 #if defined(TARGET_MIPS64)
5194 /* Move RI/XI fields to bits 31:30 */
5195 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5196 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5199 gen_move_low32(arg
, tmp
);
5200 tcg_temp_free_i64(tmp
);
5206 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5207 rn
= "GlobalNumber";
5210 goto cp0_unimplemented
;
5216 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5217 tcg_gen_ext32s_tl(arg
, arg
);
5221 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5222 rn
= "ContextConfig";
5223 goto cp0_unimplemented
;
5225 CP0_CHECK(ctx
->ulri
);
5226 tcg_gen_ld_tl(arg
, cpu_env
,
5227 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5228 tcg_gen_ext32s_tl(arg
, arg
);
5232 goto cp0_unimplemented
;
5238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5242 check_insn(ctx
, ISA_MIPS32R2
);
5243 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5248 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5249 tcg_gen_ext32s_tl(arg
, arg
);
5254 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5255 tcg_gen_ext32s_tl(arg
, arg
);
5260 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5261 tcg_gen_ext32s_tl(arg
, arg
);
5265 goto cp0_unimplemented
;
5271 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5275 check_insn(ctx
, ISA_MIPS32R2
);
5276 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5280 check_insn(ctx
, ISA_MIPS32R2
);
5281 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5285 check_insn(ctx
, ISA_MIPS32R2
);
5286 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5290 check_insn(ctx
, ISA_MIPS32R2
);
5291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5295 check_insn(ctx
, ISA_MIPS32R2
);
5296 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5300 goto cp0_unimplemented
;
5306 check_insn(ctx
, ISA_MIPS32R2
);
5307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5311 goto cp0_unimplemented
;
5317 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5318 tcg_gen_ext32s_tl(arg
, arg
);
5323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5328 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5332 goto cp0_unimplemented
;
5338 /* Mark as an IO operation because we read the time. */
5339 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5342 gen_helper_mfc0_count(arg
, cpu_env
);
5343 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5346 /* Break the TB to be able to take timer interrupts immediately
5347 after reading count. DISAS_STOP isn't sufficient, we need to
5348 ensure we break completely out of translated code. */
5349 gen_save_pc(ctx
->base
.pc_next
+ 4);
5350 ctx
->base
.is_jmp
= DISAS_EXIT
;
5353 /* 6,7 are implementation dependent */
5355 goto cp0_unimplemented
;
5361 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5362 tcg_gen_ext32s_tl(arg
, arg
);
5366 goto cp0_unimplemented
;
5372 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5375 /* 6,7 are implementation dependent */
5377 goto cp0_unimplemented
;
5383 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5387 check_insn(ctx
, ISA_MIPS32R2
);
5388 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5392 check_insn(ctx
, ISA_MIPS32R2
);
5393 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5397 check_insn(ctx
, ISA_MIPS32R2
);
5398 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5402 goto cp0_unimplemented
;
5408 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5412 goto cp0_unimplemented
;
5418 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5419 tcg_gen_ext32s_tl(arg
, arg
);
5423 goto cp0_unimplemented
;
5429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5433 check_insn(ctx
, ISA_MIPS32R2
);
5434 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5435 tcg_gen_ext32s_tl(arg
, arg
);
5439 check_insn(ctx
, ISA_MIPS32R2
);
5440 CP0_CHECK(ctx
->cmgcr
);
5441 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5442 tcg_gen_ext32s_tl(arg
, arg
);
5446 goto cp0_unimplemented
;
5452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5464 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5468 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5475 /* 6,7 are implementation dependent */
5477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5485 goto cp0_unimplemented
;
5491 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5495 CP0_CHECK(ctx
->mrp
);
5496 gen_helper_mfc0_maar(arg
, cpu_env
);
5500 CP0_CHECK(ctx
->mrp
);
5501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5505 goto cp0_unimplemented
;
5518 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5522 goto cp0_unimplemented
;
5535 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5539 goto cp0_unimplemented
;
5545 #if defined(TARGET_MIPS64)
5546 check_insn(ctx
, ISA_MIPS3
);
5547 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5548 tcg_gen_ext32s_tl(arg
, arg
);
5553 goto cp0_unimplemented
;
5557 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5558 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5561 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5565 goto cp0_unimplemented
;
5569 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5570 rn
= "'Diagnostic"; /* implementation dependent */
5575 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5579 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5580 rn
= "TraceControl";
5581 goto cp0_unimplemented
;
5583 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5584 rn
= "TraceControl2";
5585 goto cp0_unimplemented
;
5587 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5588 rn
= "UserTraceData";
5589 goto cp0_unimplemented
;
5591 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5593 goto cp0_unimplemented
;
5595 goto cp0_unimplemented
;
5602 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5603 tcg_gen_ext32s_tl(arg
, arg
);
5607 goto cp0_unimplemented
;
5613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5614 rn
= "Performance0";
5617 // gen_helper_mfc0_performance1(arg);
5618 rn
= "Performance1";
5619 goto cp0_unimplemented
;
5621 // gen_helper_mfc0_performance2(arg);
5622 rn
= "Performance2";
5623 goto cp0_unimplemented
;
5625 // gen_helper_mfc0_performance3(arg);
5626 rn
= "Performance3";
5627 goto cp0_unimplemented
;
5629 // gen_helper_mfc0_performance4(arg);
5630 rn
= "Performance4";
5631 goto cp0_unimplemented
;
5633 // gen_helper_mfc0_performance5(arg);
5634 rn
= "Performance5";
5635 goto cp0_unimplemented
;
5637 // gen_helper_mfc0_performance6(arg);
5638 rn
= "Performance6";
5639 goto cp0_unimplemented
;
5641 // gen_helper_mfc0_performance7(arg);
5642 rn
= "Performance7";
5643 goto cp0_unimplemented
;
5645 goto cp0_unimplemented
;
5651 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5655 goto cp0_unimplemented
;
5664 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5668 goto cp0_unimplemented
;
5678 TCGv_i64 tmp
= tcg_temp_new_i64();
5679 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5680 gen_move_low32(arg
, tmp
);
5681 tcg_temp_free_i64(tmp
);
5689 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5693 goto cp0_unimplemented
;
5702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5709 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5713 goto cp0_unimplemented
;
5719 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5720 tcg_gen_ext32s_tl(arg
, arg
);
5724 goto cp0_unimplemented
;
5731 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5740 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5741 tcg_gen_ld_tl(arg
, cpu_env
,
5742 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5743 tcg_gen_ext32s_tl(arg
, arg
);
5747 goto cp0_unimplemented
;
5751 goto cp0_unimplemented
;
5753 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5757 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5758 gen_mfc0_unimplemented(ctx
, arg
);
5761 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5763 const char *rn
= "invalid";
5766 check_insn(ctx
, ISA_MIPS32
);
5768 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5776 gen_helper_mtc0_index(cpu_env
, arg
);
5780 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5781 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5785 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5790 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5800 goto cp0_unimplemented
;
5810 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5811 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5815 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5816 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5820 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5821 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5825 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5826 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5830 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5831 tcg_gen_st_tl(arg
, cpu_env
,
5832 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5836 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5837 tcg_gen_st_tl(arg
, cpu_env
,
5838 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5839 rn
= "VPEScheFBack";
5842 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5843 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5847 goto cp0_unimplemented
;
5853 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5857 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5858 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5863 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5868 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5873 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5878 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5883 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5887 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5888 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5892 goto cp0_unimplemented
;
5898 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5904 rn
= "GlobalNumber";
5907 goto cp0_unimplemented
;
5913 gen_helper_mtc0_context(cpu_env
, arg
);
5917 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5918 rn
= "ContextConfig";
5919 goto cp0_unimplemented
;
5921 CP0_CHECK(ctx
->ulri
);
5922 tcg_gen_st_tl(arg
, cpu_env
,
5923 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5927 goto cp0_unimplemented
;
5933 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5937 check_insn(ctx
, ISA_MIPS32R2
);
5938 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5940 ctx
->base
.is_jmp
= DISAS_STOP
;
5944 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5949 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5954 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5958 goto cp0_unimplemented
;
5964 gen_helper_mtc0_wired(cpu_env
, arg
);
5968 check_insn(ctx
, ISA_MIPS32R2
);
5969 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5973 check_insn(ctx
, ISA_MIPS32R2
);
5974 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5978 check_insn(ctx
, ISA_MIPS32R2
);
5979 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5983 check_insn(ctx
, ISA_MIPS32R2
);
5984 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5988 check_insn(ctx
, ISA_MIPS32R2
);
5989 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5993 goto cp0_unimplemented
;
5999 check_insn(ctx
, ISA_MIPS32R2
);
6000 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6001 ctx
->base
.is_jmp
= DISAS_STOP
;
6005 goto cp0_unimplemented
;
6023 goto cp0_unimplemented
;
6029 gen_helper_mtc0_count(cpu_env
, arg
);
6032 /* 6,7 are implementation dependent */
6034 goto cp0_unimplemented
;
6040 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6044 goto cp0_unimplemented
;
6050 gen_helper_mtc0_compare(cpu_env
, arg
);
6053 /* 6,7 are implementation dependent */
6055 goto cp0_unimplemented
;
6061 save_cpu_state(ctx
, 1);
6062 gen_helper_mtc0_status(cpu_env
, arg
);
6063 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6064 gen_save_pc(ctx
->base
.pc_next
+ 4);
6065 ctx
->base
.is_jmp
= DISAS_EXIT
;
6069 check_insn(ctx
, ISA_MIPS32R2
);
6070 gen_helper_mtc0_intctl(cpu_env
, arg
);
6071 /* Stop translation as we may have switched the execution mode */
6072 ctx
->base
.is_jmp
= DISAS_STOP
;
6076 check_insn(ctx
, ISA_MIPS32R2
);
6077 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6078 /* Stop translation as we may have switched the execution mode */
6079 ctx
->base
.is_jmp
= DISAS_STOP
;
6083 check_insn(ctx
, ISA_MIPS32R2
);
6084 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6085 /* Stop translation as we may have switched the execution mode */
6086 ctx
->base
.is_jmp
= DISAS_STOP
;
6090 goto cp0_unimplemented
;
6096 save_cpu_state(ctx
, 1);
6097 gen_helper_mtc0_cause(cpu_env
, arg
);
6098 /* Stop translation as we may have triggered an interrupt.
6099 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6100 * translated code to check for pending interrupts. */
6101 gen_save_pc(ctx
->base
.pc_next
+ 4);
6102 ctx
->base
.is_jmp
= DISAS_EXIT
;
6106 goto cp0_unimplemented
;
6112 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6116 goto cp0_unimplemented
;
6126 check_insn(ctx
, ISA_MIPS32R2
);
6127 gen_helper_mtc0_ebase(cpu_env
, arg
);
6131 goto cp0_unimplemented
;
6137 gen_helper_mtc0_config0(cpu_env
, arg
);
6139 /* Stop translation as we may have switched the execution mode */
6140 ctx
->base
.is_jmp
= DISAS_STOP
;
6143 /* ignored, read only */
6147 gen_helper_mtc0_config2(cpu_env
, arg
);
6149 /* Stop translation as we may have switched the execution mode */
6150 ctx
->base
.is_jmp
= DISAS_STOP
;
6153 gen_helper_mtc0_config3(cpu_env
, arg
);
6155 /* Stop translation as we may have switched the execution mode */
6156 ctx
->base
.is_jmp
= DISAS_STOP
;
6159 gen_helper_mtc0_config4(cpu_env
, arg
);
6161 ctx
->base
.is_jmp
= DISAS_STOP
;
6164 gen_helper_mtc0_config5(cpu_env
, arg
);
6166 /* Stop translation as we may have switched the execution mode */
6167 ctx
->base
.is_jmp
= DISAS_STOP
;
6169 /* 6,7 are implementation dependent */
6179 rn
= "Invalid config selector";
6180 goto cp0_unimplemented
;
6186 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6190 CP0_CHECK(ctx
->mrp
);
6191 gen_helper_mtc0_maar(cpu_env
, arg
);
6195 CP0_CHECK(ctx
->mrp
);
6196 gen_helper_mtc0_maari(cpu_env
, arg
);
6200 goto cp0_unimplemented
;
6213 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6217 goto cp0_unimplemented
;
6230 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6234 goto cp0_unimplemented
;
6240 #if defined(TARGET_MIPS64)
6241 check_insn(ctx
, ISA_MIPS3
);
6242 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6247 goto cp0_unimplemented
;
6251 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6252 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6255 gen_helper_mtc0_framemask(cpu_env
, arg
);
6259 goto cp0_unimplemented
;
6264 rn
= "Diagnostic"; /* implementation dependent */
6269 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6270 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6271 gen_save_pc(ctx
->base
.pc_next
+ 4);
6272 ctx
->base
.is_jmp
= DISAS_EXIT
;
6276 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6277 rn
= "TraceControl";
6278 /* Stop translation as we may have switched the execution mode */
6279 ctx
->base
.is_jmp
= DISAS_STOP
;
6280 goto cp0_unimplemented
;
6282 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6283 rn
= "TraceControl2";
6284 /* Stop translation as we may have switched the execution mode */
6285 ctx
->base
.is_jmp
= DISAS_STOP
;
6286 goto cp0_unimplemented
;
6288 /* Stop translation as we may have switched the execution mode */
6289 ctx
->base
.is_jmp
= DISAS_STOP
;
6290 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6291 rn
= "UserTraceData";
6292 /* Stop translation as we may have switched the execution mode */
6293 ctx
->base
.is_jmp
= DISAS_STOP
;
6294 goto cp0_unimplemented
;
6296 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6297 /* Stop translation as we may have switched the execution mode */
6298 ctx
->base
.is_jmp
= DISAS_STOP
;
6300 goto cp0_unimplemented
;
6302 goto cp0_unimplemented
;
6309 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6313 goto cp0_unimplemented
;
6319 gen_helper_mtc0_performance0(cpu_env
, arg
);
6320 rn
= "Performance0";
6323 // gen_helper_mtc0_performance1(arg);
6324 rn
= "Performance1";
6325 goto cp0_unimplemented
;
6327 // gen_helper_mtc0_performance2(arg);
6328 rn
= "Performance2";
6329 goto cp0_unimplemented
;
6331 // gen_helper_mtc0_performance3(arg);
6332 rn
= "Performance3";
6333 goto cp0_unimplemented
;
6335 // gen_helper_mtc0_performance4(arg);
6336 rn
= "Performance4";
6337 goto cp0_unimplemented
;
6339 // gen_helper_mtc0_performance5(arg);
6340 rn
= "Performance5";
6341 goto cp0_unimplemented
;
6343 // gen_helper_mtc0_performance6(arg);
6344 rn
= "Performance6";
6345 goto cp0_unimplemented
;
6347 // gen_helper_mtc0_performance7(arg);
6348 rn
= "Performance7";
6349 goto cp0_unimplemented
;
6351 goto cp0_unimplemented
;
6357 gen_helper_mtc0_errctl(cpu_env
, arg
);
6358 ctx
->base
.is_jmp
= DISAS_STOP
;
6362 goto cp0_unimplemented
;
6375 goto cp0_unimplemented
;
6384 gen_helper_mtc0_taglo(cpu_env
, arg
);
6391 gen_helper_mtc0_datalo(cpu_env
, arg
);
6395 goto cp0_unimplemented
;
6404 gen_helper_mtc0_taghi(cpu_env
, arg
);
6411 gen_helper_mtc0_datahi(cpu_env
, arg
);
6416 goto cp0_unimplemented
;
6422 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6426 goto cp0_unimplemented
;
6433 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6442 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6443 tcg_gen_st_tl(arg
, cpu_env
,
6444 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6448 goto cp0_unimplemented
;
6452 goto cp0_unimplemented
;
6454 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6456 /* For simplicity assume that all writes can cause interrupts. */
6457 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6459 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6460 * translated code to check for pending interrupts. */
6461 gen_save_pc(ctx
->base
.pc_next
+ 4);
6462 ctx
->base
.is_jmp
= DISAS_EXIT
;
6467 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6470 #if defined(TARGET_MIPS64)
6471 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6473 const char *rn
= "invalid";
6476 check_insn(ctx
, ISA_MIPS64
);
6482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6486 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6487 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6492 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6496 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6497 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6506 goto cp0_unimplemented
;
6512 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6513 gen_helper_mfc0_random(arg
, cpu_env
);
6517 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6522 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6527 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6528 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6532 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6533 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6537 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6538 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6542 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6543 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6544 rn
= "VPEScheFBack";
6547 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6552 goto cp0_unimplemented
;
6558 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6562 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6563 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6567 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6568 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6572 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6573 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6577 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6578 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6582 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6583 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6587 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6588 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6592 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6593 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6597 goto cp0_unimplemented
;
6603 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6609 rn
= "GlobalNumber";
6612 goto cp0_unimplemented
;
6618 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6622 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6623 rn
= "ContextConfig";
6624 goto cp0_unimplemented
;
6626 CP0_CHECK(ctx
->ulri
);
6627 tcg_gen_ld_tl(arg
, cpu_env
,
6628 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6632 goto cp0_unimplemented
;
6638 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6642 check_insn(ctx
, ISA_MIPS32R2
);
6643 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6648 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6653 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6658 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6662 goto cp0_unimplemented
;
6668 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6672 check_insn(ctx
, ISA_MIPS32R2
);
6673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6677 check_insn(ctx
, ISA_MIPS32R2
);
6678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6682 check_insn(ctx
, ISA_MIPS32R2
);
6683 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6687 check_insn(ctx
, ISA_MIPS32R2
);
6688 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6692 check_insn(ctx
, ISA_MIPS32R2
);
6693 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6697 goto cp0_unimplemented
;
6703 check_insn(ctx
, ISA_MIPS32R2
);
6704 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6708 goto cp0_unimplemented
;
6714 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6719 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6724 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6728 goto cp0_unimplemented
;
6734 /* Mark as an IO operation because we read the time. */
6735 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6738 gen_helper_mfc0_count(arg
, cpu_env
);
6739 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6742 /* Break the TB to be able to take timer interrupts immediately
6743 after reading count. DISAS_STOP isn't sufficient, we need to
6744 ensure we break completely out of translated code. */
6745 gen_save_pc(ctx
->base
.pc_next
+ 4);
6746 ctx
->base
.is_jmp
= DISAS_EXIT
;
6749 /* 6,7 are implementation dependent */
6751 goto cp0_unimplemented
;
6757 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6761 goto cp0_unimplemented
;
6767 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6770 /* 6,7 are implementation dependent */
6772 goto cp0_unimplemented
;
6778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6782 check_insn(ctx
, ISA_MIPS32R2
);
6783 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6787 check_insn(ctx
, ISA_MIPS32R2
);
6788 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6792 check_insn(ctx
, ISA_MIPS32R2
);
6793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6797 goto cp0_unimplemented
;
6803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6807 goto cp0_unimplemented
;
6813 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6817 goto cp0_unimplemented
;
6823 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6827 check_insn(ctx
, ISA_MIPS32R2
);
6828 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6832 check_insn(ctx
, ISA_MIPS32R2
);
6833 CP0_CHECK(ctx
->cmgcr
);
6834 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6838 goto cp0_unimplemented
;
6844 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6848 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6852 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6856 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6860 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6864 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6867 /* 6,7 are implementation dependent */
6869 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6873 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6877 goto cp0_unimplemented
;
6883 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6887 CP0_CHECK(ctx
->mrp
);
6888 gen_helper_dmfc0_maar(arg
, cpu_env
);
6892 CP0_CHECK(ctx
->mrp
);
6893 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6897 goto cp0_unimplemented
;
6910 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6914 goto cp0_unimplemented
;
6927 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6931 goto cp0_unimplemented
;
6937 check_insn(ctx
, ISA_MIPS3
);
6938 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6942 goto cp0_unimplemented
;
6946 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6947 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6950 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6954 goto cp0_unimplemented
;
6958 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6959 rn
= "'Diagnostic"; /* implementation dependent */
6964 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6968 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6969 rn
= "TraceControl";
6970 goto cp0_unimplemented
;
6972 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6973 rn
= "TraceControl2";
6974 goto cp0_unimplemented
;
6976 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6977 rn
= "UserTraceData";
6978 goto cp0_unimplemented
;
6980 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6982 goto cp0_unimplemented
;
6984 goto cp0_unimplemented
;
6991 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6995 goto cp0_unimplemented
;
7001 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7002 rn
= "Performance0";
7005 // gen_helper_dmfc0_performance1(arg);
7006 rn
= "Performance1";
7007 goto cp0_unimplemented
;
7009 // gen_helper_dmfc0_performance2(arg);
7010 rn
= "Performance2";
7011 goto cp0_unimplemented
;
7013 // gen_helper_dmfc0_performance3(arg);
7014 rn
= "Performance3";
7015 goto cp0_unimplemented
;
7017 // gen_helper_dmfc0_performance4(arg);
7018 rn
= "Performance4";
7019 goto cp0_unimplemented
;
7021 // gen_helper_dmfc0_performance5(arg);
7022 rn
= "Performance5";
7023 goto cp0_unimplemented
;
7025 // gen_helper_dmfc0_performance6(arg);
7026 rn
= "Performance6";
7027 goto cp0_unimplemented
;
7029 // gen_helper_dmfc0_performance7(arg);
7030 rn
= "Performance7";
7031 goto cp0_unimplemented
;
7033 goto cp0_unimplemented
;
7039 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7043 goto cp0_unimplemented
;
7053 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7057 goto cp0_unimplemented
;
7066 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7073 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7077 goto cp0_unimplemented
;
7086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7093 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7097 goto cp0_unimplemented
;
7103 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7107 goto cp0_unimplemented
;
7114 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7123 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7124 tcg_gen_ld_tl(arg
, cpu_env
,
7125 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7129 goto cp0_unimplemented
;
7133 goto cp0_unimplemented
;
7135 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7139 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7140 gen_mfc0_unimplemented(ctx
, arg
);
7143 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7145 const char *rn
= "invalid";
7148 check_insn(ctx
, ISA_MIPS64
);
7150 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7158 gen_helper_mtc0_index(cpu_env
, arg
);
7162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7163 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7172 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7182 goto cp0_unimplemented
;
7192 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7193 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7197 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7198 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7202 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7203 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7207 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7208 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7212 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7213 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7217 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7218 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7219 rn
= "VPEScheFBack";
7222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7223 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7227 goto cp0_unimplemented
;
7233 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7237 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7238 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7242 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7243 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7247 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7248 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7252 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7253 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7257 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7258 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7262 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7263 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7268 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7272 goto cp0_unimplemented
;
7278 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7284 rn
= "GlobalNumber";
7287 goto cp0_unimplemented
;
7293 gen_helper_mtc0_context(cpu_env
, arg
);
7297 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7298 rn
= "ContextConfig";
7299 goto cp0_unimplemented
;
7301 CP0_CHECK(ctx
->ulri
);
7302 tcg_gen_st_tl(arg
, cpu_env
,
7303 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7307 goto cp0_unimplemented
;
7313 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7317 check_insn(ctx
, ISA_MIPS32R2
);
7318 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7323 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7328 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7333 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7337 goto cp0_unimplemented
;
7343 gen_helper_mtc0_wired(cpu_env
, arg
);
7347 check_insn(ctx
, ISA_MIPS32R2
);
7348 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7352 check_insn(ctx
, ISA_MIPS32R2
);
7353 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7357 check_insn(ctx
, ISA_MIPS32R2
);
7358 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7362 check_insn(ctx
, ISA_MIPS32R2
);
7363 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7367 check_insn(ctx
, ISA_MIPS32R2
);
7368 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7372 goto cp0_unimplemented
;
7378 check_insn(ctx
, ISA_MIPS32R2
);
7379 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7380 ctx
->base
.is_jmp
= DISAS_STOP
;
7384 goto cp0_unimplemented
;
7402 goto cp0_unimplemented
;
7408 gen_helper_mtc0_count(cpu_env
, arg
);
7411 /* 6,7 are implementation dependent */
7413 goto cp0_unimplemented
;
7415 /* Stop translation as we may have switched the execution mode */
7416 ctx
->base
.is_jmp
= DISAS_STOP
;
7421 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7425 goto cp0_unimplemented
;
7431 gen_helper_mtc0_compare(cpu_env
, arg
);
7434 /* 6,7 are implementation dependent */
7436 goto cp0_unimplemented
;
7438 /* Stop translation as we may have switched the execution mode */
7439 ctx
->base
.is_jmp
= DISAS_STOP
;
7444 save_cpu_state(ctx
, 1);
7445 gen_helper_mtc0_status(cpu_env
, arg
);
7446 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7447 gen_save_pc(ctx
->base
.pc_next
+ 4);
7448 ctx
->base
.is_jmp
= DISAS_EXIT
;
7452 check_insn(ctx
, ISA_MIPS32R2
);
7453 gen_helper_mtc0_intctl(cpu_env
, arg
);
7454 /* Stop translation as we may have switched the execution mode */
7455 ctx
->base
.is_jmp
= DISAS_STOP
;
7459 check_insn(ctx
, ISA_MIPS32R2
);
7460 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7461 /* Stop translation as we may have switched the execution mode */
7462 ctx
->base
.is_jmp
= DISAS_STOP
;
7466 check_insn(ctx
, ISA_MIPS32R2
);
7467 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7468 /* Stop translation as we may have switched the execution mode */
7469 ctx
->base
.is_jmp
= DISAS_STOP
;
7473 goto cp0_unimplemented
;
7479 save_cpu_state(ctx
, 1);
7480 gen_helper_mtc0_cause(cpu_env
, arg
);
7481 /* Stop translation as we may have triggered an interrupt.
7482 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7483 * translated code to check for pending interrupts. */
7484 gen_save_pc(ctx
->base
.pc_next
+ 4);
7485 ctx
->base
.is_jmp
= DISAS_EXIT
;
7489 goto cp0_unimplemented
;
7495 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7499 goto cp0_unimplemented
;
7509 check_insn(ctx
, ISA_MIPS32R2
);
7510 gen_helper_mtc0_ebase(cpu_env
, arg
);
7514 goto cp0_unimplemented
;
7520 gen_helper_mtc0_config0(cpu_env
, arg
);
7522 /* Stop translation as we may have switched the execution mode */
7523 ctx
->base
.is_jmp
= DISAS_STOP
;
7526 /* ignored, read only */
7530 gen_helper_mtc0_config2(cpu_env
, arg
);
7532 /* Stop translation as we may have switched the execution mode */
7533 ctx
->base
.is_jmp
= DISAS_STOP
;
7536 gen_helper_mtc0_config3(cpu_env
, arg
);
7538 /* Stop translation as we may have switched the execution mode */
7539 ctx
->base
.is_jmp
= DISAS_STOP
;
7542 /* currently ignored */
7546 gen_helper_mtc0_config5(cpu_env
, arg
);
7548 /* Stop translation as we may have switched the execution mode */
7549 ctx
->base
.is_jmp
= DISAS_STOP
;
7551 /* 6,7 are implementation dependent */
7553 rn
= "Invalid config selector";
7554 goto cp0_unimplemented
;
7560 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7564 CP0_CHECK(ctx
->mrp
);
7565 gen_helper_mtc0_maar(cpu_env
, arg
);
7569 CP0_CHECK(ctx
->mrp
);
7570 gen_helper_mtc0_maari(cpu_env
, arg
);
7574 goto cp0_unimplemented
;
7587 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7591 goto cp0_unimplemented
;
7604 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7608 goto cp0_unimplemented
;
7614 check_insn(ctx
, ISA_MIPS3
);
7615 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7619 goto cp0_unimplemented
;
7623 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7624 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7627 gen_helper_mtc0_framemask(cpu_env
, arg
);
7631 goto cp0_unimplemented
;
7636 rn
= "Diagnostic"; /* implementation dependent */
7641 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7642 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7643 gen_save_pc(ctx
->base
.pc_next
+ 4);
7644 ctx
->base
.is_jmp
= DISAS_EXIT
;
7648 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7649 /* Stop translation as we may have switched the execution mode */
7650 ctx
->base
.is_jmp
= DISAS_STOP
;
7651 rn
= "TraceControl";
7652 goto cp0_unimplemented
;
7654 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7655 /* Stop translation as we may have switched the execution mode */
7656 ctx
->base
.is_jmp
= DISAS_STOP
;
7657 rn
= "TraceControl2";
7658 goto cp0_unimplemented
;
7660 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7661 /* Stop translation as we may have switched the execution mode */
7662 ctx
->base
.is_jmp
= DISAS_STOP
;
7663 rn
= "UserTraceData";
7664 goto cp0_unimplemented
;
7666 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7667 /* Stop translation as we may have switched the execution mode */
7668 ctx
->base
.is_jmp
= DISAS_STOP
;
7670 goto cp0_unimplemented
;
7672 goto cp0_unimplemented
;
7679 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7683 goto cp0_unimplemented
;
7689 gen_helper_mtc0_performance0(cpu_env
, arg
);
7690 rn
= "Performance0";
7693 // gen_helper_mtc0_performance1(cpu_env, arg);
7694 rn
= "Performance1";
7695 goto cp0_unimplemented
;
7697 // gen_helper_mtc0_performance2(cpu_env, arg);
7698 rn
= "Performance2";
7699 goto cp0_unimplemented
;
7701 // gen_helper_mtc0_performance3(cpu_env, arg);
7702 rn
= "Performance3";
7703 goto cp0_unimplemented
;
7705 // gen_helper_mtc0_performance4(cpu_env, arg);
7706 rn
= "Performance4";
7707 goto cp0_unimplemented
;
7709 // gen_helper_mtc0_performance5(cpu_env, arg);
7710 rn
= "Performance5";
7711 goto cp0_unimplemented
;
7713 // gen_helper_mtc0_performance6(cpu_env, arg);
7714 rn
= "Performance6";
7715 goto cp0_unimplemented
;
7717 // gen_helper_mtc0_performance7(cpu_env, arg);
7718 rn
= "Performance7";
7719 goto cp0_unimplemented
;
7721 goto cp0_unimplemented
;
7727 gen_helper_mtc0_errctl(cpu_env
, arg
);
7728 ctx
->base
.is_jmp
= DISAS_STOP
;
7732 goto cp0_unimplemented
;
7745 goto cp0_unimplemented
;
7754 gen_helper_mtc0_taglo(cpu_env
, arg
);
7761 gen_helper_mtc0_datalo(cpu_env
, arg
);
7765 goto cp0_unimplemented
;
7774 gen_helper_mtc0_taghi(cpu_env
, arg
);
7781 gen_helper_mtc0_datahi(cpu_env
, arg
);
7786 goto cp0_unimplemented
;
7792 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7796 goto cp0_unimplemented
;
7803 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7812 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7813 tcg_gen_st_tl(arg
, cpu_env
,
7814 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7818 goto cp0_unimplemented
;
7822 goto cp0_unimplemented
;
7824 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7826 /* For simplicity assume that all writes can cause interrupts. */
7827 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7829 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7830 * translated code to check for pending interrupts. */
7831 gen_save_pc(ctx
->base
.pc_next
+ 4);
7832 ctx
->base
.is_jmp
= DISAS_EXIT
;
7837 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7839 #endif /* TARGET_MIPS64 */
7841 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7842 int u
, int sel
, int h
)
7844 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7845 TCGv t0
= tcg_temp_local_new();
7847 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7848 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7849 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7850 tcg_gen_movi_tl(t0
, -1);
7851 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7852 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7853 tcg_gen_movi_tl(t0
, -1);
7859 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7862 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7872 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7875 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7878 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7881 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7884 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7887 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7890 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7893 gen_mfc0(ctx
, t0
, rt
, sel
);
7900 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7903 gen_mfc0(ctx
, t0
, rt
, sel
);
7909 gen_helper_mftc0_status(t0
, cpu_env
);
7912 gen_mfc0(ctx
, t0
, rt
, sel
);
7918 gen_helper_mftc0_cause(t0
, cpu_env
);
7928 gen_helper_mftc0_epc(t0
, cpu_env
);
7938 gen_helper_mftc0_ebase(t0
, cpu_env
);
7955 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7965 gen_helper_mftc0_debug(t0
, cpu_env
);
7968 gen_mfc0(ctx
, t0
, rt
, sel
);
7973 gen_mfc0(ctx
, t0
, rt
, sel
);
7975 } else switch (sel
) {
7976 /* GPR registers. */
7978 gen_helper_1e0i(mftgpr
, t0
, rt
);
7980 /* Auxiliary CPU registers */
7984 gen_helper_1e0i(mftlo
, t0
, 0);
7987 gen_helper_1e0i(mfthi
, t0
, 0);
7990 gen_helper_1e0i(mftacx
, t0
, 0);
7993 gen_helper_1e0i(mftlo
, t0
, 1);
7996 gen_helper_1e0i(mfthi
, t0
, 1);
7999 gen_helper_1e0i(mftacx
, t0
, 1);
8002 gen_helper_1e0i(mftlo
, t0
, 2);
8005 gen_helper_1e0i(mfthi
, t0
, 2);
8008 gen_helper_1e0i(mftacx
, t0
, 2);
8011 gen_helper_1e0i(mftlo
, t0
, 3);
8014 gen_helper_1e0i(mfthi
, t0
, 3);
8017 gen_helper_1e0i(mftacx
, t0
, 3);
8020 gen_helper_mftdsp(t0
, cpu_env
);
8026 /* Floating point (COP1). */
8028 /* XXX: For now we support only a single FPU context. */
8030 TCGv_i32 fp0
= tcg_temp_new_i32();
8032 gen_load_fpr32(ctx
, fp0
, rt
);
8033 tcg_gen_ext_i32_tl(t0
, fp0
);
8034 tcg_temp_free_i32(fp0
);
8036 TCGv_i32 fp0
= tcg_temp_new_i32();
8038 gen_load_fpr32h(ctx
, fp0
, rt
);
8039 tcg_gen_ext_i32_tl(t0
, fp0
);
8040 tcg_temp_free_i32(fp0
);
8044 /* XXX: For now we support only a single FPU context. */
8045 gen_helper_1e0i(cfc1
, t0
, rt
);
8047 /* COP2: Not implemented. */
8054 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8055 gen_store_gpr(t0
, rd
);
8061 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8062 generate_exception_end(ctx
, EXCP_RI
);
8065 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8066 int u
, int sel
, int h
)
8068 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8069 TCGv t0
= tcg_temp_local_new();
8071 gen_load_gpr(t0
, rt
);
8072 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8073 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8074 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8076 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8077 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
8084 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8087 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8097 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8100 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8103 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8106 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8109 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8112 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8115 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8118 gen_mtc0(ctx
, t0
, rd
, sel
);
8125 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8128 gen_mtc0(ctx
, t0
, rd
, sel
);
8134 gen_helper_mttc0_status(cpu_env
, t0
);
8137 gen_mtc0(ctx
, t0
, rd
, sel
);
8143 gen_helper_mttc0_cause(cpu_env
, t0
);
8153 gen_helper_mttc0_ebase(cpu_env
, t0
);
8163 gen_helper_mttc0_debug(cpu_env
, t0
);
8166 gen_mtc0(ctx
, t0
, rd
, sel
);
8171 gen_mtc0(ctx
, t0
, rd
, sel
);
8173 } else switch (sel
) {
8174 /* GPR registers. */
8176 gen_helper_0e1i(mttgpr
, t0
, rd
);
8178 /* Auxiliary CPU registers */
8182 gen_helper_0e1i(mttlo
, t0
, 0);
8185 gen_helper_0e1i(mtthi
, t0
, 0);
8188 gen_helper_0e1i(mttacx
, t0
, 0);
8191 gen_helper_0e1i(mttlo
, t0
, 1);
8194 gen_helper_0e1i(mtthi
, t0
, 1);
8197 gen_helper_0e1i(mttacx
, t0
, 1);
8200 gen_helper_0e1i(mttlo
, t0
, 2);
8203 gen_helper_0e1i(mtthi
, t0
, 2);
8206 gen_helper_0e1i(mttacx
, t0
, 2);
8209 gen_helper_0e1i(mttlo
, t0
, 3);
8212 gen_helper_0e1i(mtthi
, t0
, 3);
8215 gen_helper_0e1i(mttacx
, t0
, 3);
8218 gen_helper_mttdsp(cpu_env
, t0
);
8224 /* Floating point (COP1). */
8226 /* XXX: For now we support only a single FPU context. */
8228 TCGv_i32 fp0
= tcg_temp_new_i32();
8230 tcg_gen_trunc_tl_i32(fp0
, t0
);
8231 gen_store_fpr32(ctx
, fp0
, rd
);
8232 tcg_temp_free_i32(fp0
);
8234 TCGv_i32 fp0
= tcg_temp_new_i32();
8236 tcg_gen_trunc_tl_i32(fp0
, t0
);
8237 gen_store_fpr32h(ctx
, fp0
, rd
);
8238 tcg_temp_free_i32(fp0
);
8242 /* XXX: For now we support only a single FPU context. */
8244 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8246 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8247 tcg_temp_free_i32(fs_tmp
);
8249 /* Stop translation as we may have changed hflags */
8250 ctx
->base
.is_jmp
= DISAS_STOP
;
8252 /* COP2: Not implemented. */
8259 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8265 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8266 generate_exception_end(ctx
, EXCP_RI
);
8269 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8271 const char *opn
= "ldst";
8273 check_cp0_enabled(ctx
);
8280 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8285 TCGv t0
= tcg_temp_new();
8287 gen_load_gpr(t0
, rt
);
8288 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8293 #if defined(TARGET_MIPS64)
8295 check_insn(ctx
, ISA_MIPS3
);
8300 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8304 check_insn(ctx
, ISA_MIPS3
);
8306 TCGv t0
= tcg_temp_new();
8308 gen_load_gpr(t0
, rt
);
8309 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8321 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8327 TCGv t0
= tcg_temp_new();
8328 gen_load_gpr(t0
, rt
);
8329 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8335 check_insn(ctx
, ASE_MT
);
8340 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8341 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8345 check_insn(ctx
, ASE_MT
);
8346 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8347 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8352 if (!env
->tlb
->helper_tlbwi
)
8354 gen_helper_tlbwi(cpu_env
);
8359 if (!env
->tlb
->helper_tlbinv
) {
8362 gen_helper_tlbinv(cpu_env
);
8363 } /* treat as nop if TLBINV not supported */
8368 if (!env
->tlb
->helper_tlbinvf
) {
8371 gen_helper_tlbinvf(cpu_env
);
8372 } /* treat as nop if TLBINV not supported */
8376 if (!env
->tlb
->helper_tlbwr
)
8378 gen_helper_tlbwr(cpu_env
);
8382 if (!env
->tlb
->helper_tlbp
)
8384 gen_helper_tlbp(cpu_env
);
8388 if (!env
->tlb
->helper_tlbr
)
8390 gen_helper_tlbr(cpu_env
);
8392 case OPC_ERET
: /* OPC_ERETNC */
8393 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8394 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8397 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8398 if (ctx
->opcode
& (1 << bit_shift
)) {
8401 check_insn(ctx
, ISA_MIPS32R5
);
8402 gen_helper_eretnc(cpu_env
);
8406 check_insn(ctx
, ISA_MIPS2
);
8407 gen_helper_eret(cpu_env
);
8409 ctx
->base
.is_jmp
= DISAS_EXIT
;
8414 check_insn(ctx
, ISA_MIPS32
);
8415 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8416 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8419 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8421 generate_exception_end(ctx
, EXCP_RI
);
8423 gen_helper_deret(cpu_env
);
8424 ctx
->base
.is_jmp
= DISAS_EXIT
;
8429 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8430 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8431 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8434 /* If we get an exception, we want to restart at next instruction */
8435 ctx
->base
.pc_next
+= 4;
8436 save_cpu_state(ctx
, 1);
8437 ctx
->base
.pc_next
-= 4;
8438 gen_helper_wait(cpu_env
);
8439 ctx
->base
.is_jmp
= DISAS_NORETURN
;
8444 generate_exception_end(ctx
, EXCP_RI
);
8447 (void)opn
; /* avoid a compiler warning */
8449 #endif /* !CONFIG_USER_ONLY */
8451 /* CP1 Branches (before delay slot) */
8452 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8453 int32_t cc
, int32_t offset
)
8455 target_ulong btarget
;
8456 TCGv_i32 t0
= tcg_temp_new_i32();
8458 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8459 generate_exception_end(ctx
, EXCP_RI
);
8464 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8466 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
8470 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8471 tcg_gen_not_i32(t0
, t0
);
8472 tcg_gen_andi_i32(t0
, t0
, 1);
8473 tcg_gen_extu_i32_tl(bcond
, t0
);
8476 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8477 tcg_gen_not_i32(t0
, t0
);
8478 tcg_gen_andi_i32(t0
, t0
, 1);
8479 tcg_gen_extu_i32_tl(bcond
, t0
);
8482 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8483 tcg_gen_andi_i32(t0
, t0
, 1);
8484 tcg_gen_extu_i32_tl(bcond
, t0
);
8487 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8488 tcg_gen_andi_i32(t0
, t0
, 1);
8489 tcg_gen_extu_i32_tl(bcond
, t0
);
8491 ctx
->hflags
|= MIPS_HFLAG_BL
;
8495 TCGv_i32 t1
= tcg_temp_new_i32();
8496 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8497 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8498 tcg_gen_nand_i32(t0
, t0
, t1
);
8499 tcg_temp_free_i32(t1
);
8500 tcg_gen_andi_i32(t0
, t0
, 1);
8501 tcg_gen_extu_i32_tl(bcond
, t0
);
8506 TCGv_i32 t1
= tcg_temp_new_i32();
8507 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8508 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8509 tcg_gen_or_i32(t0
, t0
, t1
);
8510 tcg_temp_free_i32(t1
);
8511 tcg_gen_andi_i32(t0
, t0
, 1);
8512 tcg_gen_extu_i32_tl(bcond
, t0
);
8517 TCGv_i32 t1
= tcg_temp_new_i32();
8518 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8519 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8520 tcg_gen_and_i32(t0
, t0
, t1
);
8521 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8522 tcg_gen_and_i32(t0
, t0
, t1
);
8523 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8524 tcg_gen_nand_i32(t0
, t0
, t1
);
8525 tcg_temp_free_i32(t1
);
8526 tcg_gen_andi_i32(t0
, t0
, 1);
8527 tcg_gen_extu_i32_tl(bcond
, t0
);
8532 TCGv_i32 t1
= tcg_temp_new_i32();
8533 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8534 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8535 tcg_gen_or_i32(t0
, t0
, t1
);
8536 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8537 tcg_gen_or_i32(t0
, t0
, t1
);
8538 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8539 tcg_gen_or_i32(t0
, t0
, t1
);
8540 tcg_temp_free_i32(t1
);
8541 tcg_gen_andi_i32(t0
, t0
, 1);
8542 tcg_gen_extu_i32_tl(bcond
, t0
);
8545 ctx
->hflags
|= MIPS_HFLAG_BC
;
8548 MIPS_INVAL("cp1 cond branch");
8549 generate_exception_end(ctx
, EXCP_RI
);
8552 ctx
->btarget
= btarget
;
8553 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8555 tcg_temp_free_i32(t0
);
8558 /* R6 CP1 Branches */
8559 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8560 int32_t ft
, int32_t offset
,
8563 target_ulong btarget
;
8564 TCGv_i64 t0
= tcg_temp_new_i64();
8566 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8567 #ifdef MIPS_DEBUG_DISAS
8568 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8569 "\n", ctx
->base
.pc_next
);
8571 generate_exception_end(ctx
, EXCP_RI
);
8575 gen_load_fpr64(ctx
, t0
, ft
);
8576 tcg_gen_andi_i64(t0
, t0
, 1);
8578 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
8582 tcg_gen_xori_i64(t0
, t0
, 1);
8583 ctx
->hflags
|= MIPS_HFLAG_BC
;
8586 /* t0 already set */
8587 ctx
->hflags
|= MIPS_HFLAG_BC
;
8590 MIPS_INVAL("cp1 cond branch");
8591 generate_exception_end(ctx
, EXCP_RI
);
8595 tcg_gen_trunc_i64_tl(bcond
, t0
);
8597 ctx
->btarget
= btarget
;
8599 switch (delayslot_size
) {
8601 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8604 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8609 tcg_temp_free_i64(t0
);
8612 /* Coprocessor 1 (FPU) */
8614 #define FOP(func, fmt) (((fmt) << 21) | (func))
8617 OPC_ADD_S
= FOP(0, FMT_S
),
8618 OPC_SUB_S
= FOP(1, FMT_S
),
8619 OPC_MUL_S
= FOP(2, FMT_S
),
8620 OPC_DIV_S
= FOP(3, FMT_S
),
8621 OPC_SQRT_S
= FOP(4, FMT_S
),
8622 OPC_ABS_S
= FOP(5, FMT_S
),
8623 OPC_MOV_S
= FOP(6, FMT_S
),
8624 OPC_NEG_S
= FOP(7, FMT_S
),
8625 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8626 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8627 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8628 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8629 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8630 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8631 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8632 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8633 OPC_SEL_S
= FOP(16, FMT_S
),
8634 OPC_MOVCF_S
= FOP(17, FMT_S
),
8635 OPC_MOVZ_S
= FOP(18, FMT_S
),
8636 OPC_MOVN_S
= FOP(19, FMT_S
),
8637 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8638 OPC_RECIP_S
= FOP(21, FMT_S
),
8639 OPC_RSQRT_S
= FOP(22, FMT_S
),
8640 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8641 OPC_MADDF_S
= FOP(24, FMT_S
),
8642 OPC_MSUBF_S
= FOP(25, FMT_S
),
8643 OPC_RINT_S
= FOP(26, FMT_S
),
8644 OPC_CLASS_S
= FOP(27, FMT_S
),
8645 OPC_MIN_S
= FOP(28, FMT_S
),
8646 OPC_RECIP2_S
= FOP(28, FMT_S
),
8647 OPC_MINA_S
= FOP(29, FMT_S
),
8648 OPC_RECIP1_S
= FOP(29, FMT_S
),
8649 OPC_MAX_S
= FOP(30, FMT_S
),
8650 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8651 OPC_MAXA_S
= FOP(31, FMT_S
),
8652 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8653 OPC_CVT_D_S
= FOP(33, FMT_S
),
8654 OPC_CVT_W_S
= FOP(36, FMT_S
),
8655 OPC_CVT_L_S
= FOP(37, FMT_S
),
8656 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8657 OPC_CMP_F_S
= FOP (48, FMT_S
),
8658 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8659 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8660 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8661 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8662 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8663 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8664 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8665 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8666 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8667 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8668 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8669 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8670 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8671 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8672 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8674 OPC_ADD_D
= FOP(0, FMT_D
),
8675 OPC_SUB_D
= FOP(1, FMT_D
),
8676 OPC_MUL_D
= FOP(2, FMT_D
),
8677 OPC_DIV_D
= FOP(3, FMT_D
),
8678 OPC_SQRT_D
= FOP(4, FMT_D
),
8679 OPC_ABS_D
= FOP(5, FMT_D
),
8680 OPC_MOV_D
= FOP(6, FMT_D
),
8681 OPC_NEG_D
= FOP(7, FMT_D
),
8682 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8683 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8684 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8685 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8686 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8687 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8688 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8689 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8690 OPC_SEL_D
= FOP(16, FMT_D
),
8691 OPC_MOVCF_D
= FOP(17, FMT_D
),
8692 OPC_MOVZ_D
= FOP(18, FMT_D
),
8693 OPC_MOVN_D
= FOP(19, FMT_D
),
8694 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8695 OPC_RECIP_D
= FOP(21, FMT_D
),
8696 OPC_RSQRT_D
= FOP(22, FMT_D
),
8697 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8698 OPC_MADDF_D
= FOP(24, FMT_D
),
8699 OPC_MSUBF_D
= FOP(25, FMT_D
),
8700 OPC_RINT_D
= FOP(26, FMT_D
),
8701 OPC_CLASS_D
= FOP(27, FMT_D
),
8702 OPC_MIN_D
= FOP(28, FMT_D
),
8703 OPC_RECIP2_D
= FOP(28, FMT_D
),
8704 OPC_MINA_D
= FOP(29, FMT_D
),
8705 OPC_RECIP1_D
= FOP(29, FMT_D
),
8706 OPC_MAX_D
= FOP(30, FMT_D
),
8707 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8708 OPC_MAXA_D
= FOP(31, FMT_D
),
8709 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8710 OPC_CVT_S_D
= FOP(32, FMT_D
),
8711 OPC_CVT_W_D
= FOP(36, FMT_D
),
8712 OPC_CVT_L_D
= FOP(37, FMT_D
),
8713 OPC_CMP_F_D
= FOP (48, FMT_D
),
8714 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8715 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8716 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8717 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8718 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8719 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8720 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8721 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8722 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8723 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8724 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8725 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8726 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8727 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8728 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8730 OPC_CVT_S_W
= FOP(32, FMT_W
),
8731 OPC_CVT_D_W
= FOP(33, FMT_W
),
8732 OPC_CVT_S_L
= FOP(32, FMT_L
),
8733 OPC_CVT_D_L
= FOP(33, FMT_L
),
8734 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8736 OPC_ADD_PS
= FOP(0, FMT_PS
),
8737 OPC_SUB_PS
= FOP(1, FMT_PS
),
8738 OPC_MUL_PS
= FOP(2, FMT_PS
),
8739 OPC_DIV_PS
= FOP(3, FMT_PS
),
8740 OPC_ABS_PS
= FOP(5, FMT_PS
),
8741 OPC_MOV_PS
= FOP(6, FMT_PS
),
8742 OPC_NEG_PS
= FOP(7, FMT_PS
),
8743 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8744 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8745 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8746 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8747 OPC_MULR_PS
= FOP(26, FMT_PS
),
8748 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8749 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8750 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8751 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8753 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8754 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8755 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8756 OPC_PLL_PS
= FOP(44, FMT_PS
),
8757 OPC_PLU_PS
= FOP(45, FMT_PS
),
8758 OPC_PUL_PS
= FOP(46, FMT_PS
),
8759 OPC_PUU_PS
= FOP(47, FMT_PS
),
8760 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8761 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8762 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8763 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8764 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8765 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8766 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8767 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8768 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8769 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8770 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8771 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8772 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8773 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8774 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8775 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8779 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8780 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8781 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8782 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8783 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8784 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8785 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8786 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8787 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8788 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8789 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8790 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8791 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8792 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8793 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8794 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8795 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8796 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8797 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8798 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8799 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8800 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8802 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8803 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8804 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8805 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8806 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8807 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8808 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8809 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8810 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8811 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8812 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8813 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8814 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8815 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8816 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8817 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8818 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8819 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8820 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8821 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8822 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8823 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8825 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8827 TCGv t0
= tcg_temp_new();
8832 TCGv_i32 fp0
= tcg_temp_new_i32();
8834 gen_load_fpr32(ctx
, fp0
, fs
);
8835 tcg_gen_ext_i32_tl(t0
, fp0
);
8836 tcg_temp_free_i32(fp0
);
8838 gen_store_gpr(t0
, rt
);
8841 gen_load_gpr(t0
, rt
);
8843 TCGv_i32 fp0
= tcg_temp_new_i32();
8845 tcg_gen_trunc_tl_i32(fp0
, t0
);
8846 gen_store_fpr32(ctx
, fp0
, fs
);
8847 tcg_temp_free_i32(fp0
);
8851 gen_helper_1e0i(cfc1
, t0
, fs
);
8852 gen_store_gpr(t0
, rt
);
8855 gen_load_gpr(t0
, rt
);
8856 save_cpu_state(ctx
, 0);
8858 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8860 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8861 tcg_temp_free_i32(fs_tmp
);
8863 /* Stop translation as we may have changed hflags */
8864 ctx
->base
.is_jmp
= DISAS_STOP
;
8866 #if defined(TARGET_MIPS64)
8868 gen_load_fpr64(ctx
, t0
, fs
);
8869 gen_store_gpr(t0
, rt
);
8872 gen_load_gpr(t0
, rt
);
8873 gen_store_fpr64(ctx
, t0
, fs
);
8878 TCGv_i32 fp0
= tcg_temp_new_i32();
8880 gen_load_fpr32h(ctx
, fp0
, fs
);
8881 tcg_gen_ext_i32_tl(t0
, fp0
);
8882 tcg_temp_free_i32(fp0
);
8884 gen_store_gpr(t0
, rt
);
8887 gen_load_gpr(t0
, rt
);
8889 TCGv_i32 fp0
= tcg_temp_new_i32();
8891 tcg_gen_trunc_tl_i32(fp0
, t0
);
8892 gen_store_fpr32h(ctx
, fp0
, fs
);
8893 tcg_temp_free_i32(fp0
);
8897 MIPS_INVAL("cp1 move");
8898 generate_exception_end(ctx
, EXCP_RI
);
8906 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8922 l1
= gen_new_label();
8923 t0
= tcg_temp_new_i32();
8924 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8925 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8926 tcg_temp_free_i32(t0
);
8928 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8930 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8935 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8939 TCGv_i32 t0
= tcg_temp_new_i32();
8940 TCGLabel
*l1
= gen_new_label();
8947 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8948 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8949 gen_load_fpr32(ctx
, t0
, fs
);
8950 gen_store_fpr32(ctx
, t0
, fd
);
8952 tcg_temp_free_i32(t0
);
8955 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8958 TCGv_i32 t0
= tcg_temp_new_i32();
8960 TCGLabel
*l1
= gen_new_label();
8967 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8968 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8969 tcg_temp_free_i32(t0
);
8970 fp0
= tcg_temp_new_i64();
8971 gen_load_fpr64(ctx
, fp0
, fs
);
8972 gen_store_fpr64(ctx
, fp0
, fd
);
8973 tcg_temp_free_i64(fp0
);
8977 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8981 TCGv_i32 t0
= tcg_temp_new_i32();
8982 TCGLabel
*l1
= gen_new_label();
8983 TCGLabel
*l2
= gen_new_label();
8990 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8991 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8992 gen_load_fpr32(ctx
, t0
, fs
);
8993 gen_store_fpr32(ctx
, t0
, fd
);
8996 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8997 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8998 gen_load_fpr32h(ctx
, t0
, fs
);
8999 gen_store_fpr32h(ctx
, t0
, fd
);
9000 tcg_temp_free_i32(t0
);
9004 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9007 TCGv_i32 t1
= tcg_const_i32(0);
9008 TCGv_i32 fp0
= tcg_temp_new_i32();
9009 TCGv_i32 fp1
= tcg_temp_new_i32();
9010 TCGv_i32 fp2
= tcg_temp_new_i32();
9011 gen_load_fpr32(ctx
, fp0
, fd
);
9012 gen_load_fpr32(ctx
, fp1
, ft
);
9013 gen_load_fpr32(ctx
, fp2
, fs
);
9017 tcg_gen_andi_i32(fp0
, fp0
, 1);
9018 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9021 tcg_gen_andi_i32(fp1
, fp1
, 1);
9022 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9025 tcg_gen_andi_i32(fp1
, fp1
, 1);
9026 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9029 MIPS_INVAL("gen_sel_s");
9030 generate_exception_end(ctx
, EXCP_RI
);
9034 gen_store_fpr32(ctx
, fp0
, fd
);
9035 tcg_temp_free_i32(fp2
);
9036 tcg_temp_free_i32(fp1
);
9037 tcg_temp_free_i32(fp0
);
9038 tcg_temp_free_i32(t1
);
9041 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9044 TCGv_i64 t1
= tcg_const_i64(0);
9045 TCGv_i64 fp0
= tcg_temp_new_i64();
9046 TCGv_i64 fp1
= tcg_temp_new_i64();
9047 TCGv_i64 fp2
= tcg_temp_new_i64();
9048 gen_load_fpr64(ctx
, fp0
, fd
);
9049 gen_load_fpr64(ctx
, fp1
, ft
);
9050 gen_load_fpr64(ctx
, fp2
, fs
);
9054 tcg_gen_andi_i64(fp0
, fp0
, 1);
9055 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9058 tcg_gen_andi_i64(fp1
, fp1
, 1);
9059 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9062 tcg_gen_andi_i64(fp1
, fp1
, 1);
9063 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9066 MIPS_INVAL("gen_sel_d");
9067 generate_exception_end(ctx
, EXCP_RI
);
9071 gen_store_fpr64(ctx
, fp0
, fd
);
9072 tcg_temp_free_i64(fp2
);
9073 tcg_temp_free_i64(fp1
);
9074 tcg_temp_free_i64(fp0
);
9075 tcg_temp_free_i64(t1
);
9078 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
9079 int ft
, int fs
, int fd
, int cc
)
9081 uint32_t func
= ctx
->opcode
& 0x3f;
9085 TCGv_i32 fp0
= tcg_temp_new_i32();
9086 TCGv_i32 fp1
= tcg_temp_new_i32();
9088 gen_load_fpr32(ctx
, fp0
, fs
);
9089 gen_load_fpr32(ctx
, fp1
, ft
);
9090 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9091 tcg_temp_free_i32(fp1
);
9092 gen_store_fpr32(ctx
, fp0
, fd
);
9093 tcg_temp_free_i32(fp0
);
9098 TCGv_i32 fp0
= tcg_temp_new_i32();
9099 TCGv_i32 fp1
= tcg_temp_new_i32();
9101 gen_load_fpr32(ctx
, fp0
, fs
);
9102 gen_load_fpr32(ctx
, fp1
, ft
);
9103 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9104 tcg_temp_free_i32(fp1
);
9105 gen_store_fpr32(ctx
, fp0
, fd
);
9106 tcg_temp_free_i32(fp0
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9112 TCGv_i32 fp1
= tcg_temp_new_i32();
9114 gen_load_fpr32(ctx
, fp0
, fs
);
9115 gen_load_fpr32(ctx
, fp1
, ft
);
9116 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9117 tcg_temp_free_i32(fp1
);
9118 gen_store_fpr32(ctx
, fp0
, fd
);
9119 tcg_temp_free_i32(fp0
);
9124 TCGv_i32 fp0
= tcg_temp_new_i32();
9125 TCGv_i32 fp1
= tcg_temp_new_i32();
9127 gen_load_fpr32(ctx
, fp0
, fs
);
9128 gen_load_fpr32(ctx
, fp1
, ft
);
9129 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9130 tcg_temp_free_i32(fp1
);
9131 gen_store_fpr32(ctx
, fp0
, fd
);
9132 tcg_temp_free_i32(fp0
);
9137 TCGv_i32 fp0
= tcg_temp_new_i32();
9139 gen_load_fpr32(ctx
, fp0
, fs
);
9140 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9141 gen_store_fpr32(ctx
, fp0
, fd
);
9142 tcg_temp_free_i32(fp0
);
9147 TCGv_i32 fp0
= tcg_temp_new_i32();
9149 gen_load_fpr32(ctx
, fp0
, fs
);
9151 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9153 gen_helper_float_abs_s(fp0
, fp0
);
9155 gen_store_fpr32(ctx
, fp0
, fd
);
9156 tcg_temp_free_i32(fp0
);
9161 TCGv_i32 fp0
= tcg_temp_new_i32();
9163 gen_load_fpr32(ctx
, fp0
, fs
);
9164 gen_store_fpr32(ctx
, fp0
, fd
);
9165 tcg_temp_free_i32(fp0
);
9170 TCGv_i32 fp0
= tcg_temp_new_i32();
9172 gen_load_fpr32(ctx
, fp0
, fs
);
9174 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9176 gen_helper_float_chs_s(fp0
, fp0
);
9178 gen_store_fpr32(ctx
, fp0
, fd
);
9179 tcg_temp_free_i32(fp0
);
9183 check_cp1_64bitmode(ctx
);
9185 TCGv_i32 fp32
= tcg_temp_new_i32();
9186 TCGv_i64 fp64
= tcg_temp_new_i64();
9188 gen_load_fpr32(ctx
, fp32
, fs
);
9190 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9192 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9194 tcg_temp_free_i32(fp32
);
9195 gen_store_fpr64(ctx
, fp64
, fd
);
9196 tcg_temp_free_i64(fp64
);
9200 check_cp1_64bitmode(ctx
);
9202 TCGv_i32 fp32
= tcg_temp_new_i32();
9203 TCGv_i64 fp64
= tcg_temp_new_i64();
9205 gen_load_fpr32(ctx
, fp32
, fs
);
9207 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9209 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9211 tcg_temp_free_i32(fp32
);
9212 gen_store_fpr64(ctx
, fp64
, fd
);
9213 tcg_temp_free_i64(fp64
);
9217 check_cp1_64bitmode(ctx
);
9219 TCGv_i32 fp32
= tcg_temp_new_i32();
9220 TCGv_i64 fp64
= tcg_temp_new_i64();
9222 gen_load_fpr32(ctx
, fp32
, fs
);
9224 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9226 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9228 tcg_temp_free_i32(fp32
);
9229 gen_store_fpr64(ctx
, fp64
, fd
);
9230 tcg_temp_free_i64(fp64
);
9234 check_cp1_64bitmode(ctx
);
9236 TCGv_i32 fp32
= tcg_temp_new_i32();
9237 TCGv_i64 fp64
= tcg_temp_new_i64();
9239 gen_load_fpr32(ctx
, fp32
, fs
);
9241 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9243 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9245 tcg_temp_free_i32(fp32
);
9246 gen_store_fpr64(ctx
, fp64
, fd
);
9247 tcg_temp_free_i64(fp64
);
9252 TCGv_i32 fp0
= tcg_temp_new_i32();
9254 gen_load_fpr32(ctx
, fp0
, fs
);
9256 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9258 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9260 gen_store_fpr32(ctx
, fp0
, fd
);
9261 tcg_temp_free_i32(fp0
);
9266 TCGv_i32 fp0
= tcg_temp_new_i32();
9268 gen_load_fpr32(ctx
, fp0
, fs
);
9270 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9272 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9274 gen_store_fpr32(ctx
, fp0
, fd
);
9275 tcg_temp_free_i32(fp0
);
9280 TCGv_i32 fp0
= tcg_temp_new_i32();
9282 gen_load_fpr32(ctx
, fp0
, fs
);
9284 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9286 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9288 gen_store_fpr32(ctx
, fp0
, fd
);
9289 tcg_temp_free_i32(fp0
);
9294 TCGv_i32 fp0
= tcg_temp_new_i32();
9296 gen_load_fpr32(ctx
, fp0
, fs
);
9298 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9300 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9302 gen_store_fpr32(ctx
, fp0
, fd
);
9303 tcg_temp_free_i32(fp0
);
9307 check_insn(ctx
, ISA_MIPS32R6
);
9308 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9311 check_insn(ctx
, ISA_MIPS32R6
);
9312 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9315 check_insn(ctx
, ISA_MIPS32R6
);
9316 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9319 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9320 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9323 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9325 TCGLabel
*l1
= gen_new_label();
9329 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9331 fp0
= tcg_temp_new_i32();
9332 gen_load_fpr32(ctx
, fp0
, fs
);
9333 gen_store_fpr32(ctx
, fp0
, fd
);
9334 tcg_temp_free_i32(fp0
);
9339 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9341 TCGLabel
*l1
= gen_new_label();
9345 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9346 fp0
= tcg_temp_new_i32();
9347 gen_load_fpr32(ctx
, fp0
, fs
);
9348 gen_store_fpr32(ctx
, fp0
, fd
);
9349 tcg_temp_free_i32(fp0
);
9356 TCGv_i32 fp0
= tcg_temp_new_i32();
9358 gen_load_fpr32(ctx
, fp0
, fs
);
9359 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9360 gen_store_fpr32(ctx
, fp0
, fd
);
9361 tcg_temp_free_i32(fp0
);
9366 TCGv_i32 fp0
= tcg_temp_new_i32();
9368 gen_load_fpr32(ctx
, fp0
, fs
);
9369 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9370 gen_store_fpr32(ctx
, fp0
, fd
);
9371 tcg_temp_free_i32(fp0
);
9375 check_insn(ctx
, ISA_MIPS32R6
);
9377 TCGv_i32 fp0
= tcg_temp_new_i32();
9378 TCGv_i32 fp1
= tcg_temp_new_i32();
9379 TCGv_i32 fp2
= tcg_temp_new_i32();
9380 gen_load_fpr32(ctx
, fp0
, fs
);
9381 gen_load_fpr32(ctx
, fp1
, ft
);
9382 gen_load_fpr32(ctx
, fp2
, fd
);
9383 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9384 gen_store_fpr32(ctx
, fp2
, fd
);
9385 tcg_temp_free_i32(fp2
);
9386 tcg_temp_free_i32(fp1
);
9387 tcg_temp_free_i32(fp0
);
9391 check_insn(ctx
, ISA_MIPS32R6
);
9393 TCGv_i32 fp0
= tcg_temp_new_i32();
9394 TCGv_i32 fp1
= tcg_temp_new_i32();
9395 TCGv_i32 fp2
= tcg_temp_new_i32();
9396 gen_load_fpr32(ctx
, fp0
, fs
);
9397 gen_load_fpr32(ctx
, fp1
, ft
);
9398 gen_load_fpr32(ctx
, fp2
, fd
);
9399 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9400 gen_store_fpr32(ctx
, fp2
, fd
);
9401 tcg_temp_free_i32(fp2
);
9402 tcg_temp_free_i32(fp1
);
9403 tcg_temp_free_i32(fp0
);
9407 check_insn(ctx
, ISA_MIPS32R6
);
9409 TCGv_i32 fp0
= tcg_temp_new_i32();
9410 gen_load_fpr32(ctx
, fp0
, fs
);
9411 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9412 gen_store_fpr32(ctx
, fp0
, fd
);
9413 tcg_temp_free_i32(fp0
);
9417 check_insn(ctx
, ISA_MIPS32R6
);
9419 TCGv_i32 fp0
= tcg_temp_new_i32();
9420 gen_load_fpr32(ctx
, fp0
, fs
);
9421 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9422 gen_store_fpr32(ctx
, fp0
, fd
);
9423 tcg_temp_free_i32(fp0
);
9426 case OPC_MIN_S
: /* OPC_RECIP2_S */
9427 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9429 TCGv_i32 fp0
= tcg_temp_new_i32();
9430 TCGv_i32 fp1
= tcg_temp_new_i32();
9431 TCGv_i32 fp2
= tcg_temp_new_i32();
9432 gen_load_fpr32(ctx
, fp0
, fs
);
9433 gen_load_fpr32(ctx
, fp1
, ft
);
9434 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9435 gen_store_fpr32(ctx
, fp2
, fd
);
9436 tcg_temp_free_i32(fp2
);
9437 tcg_temp_free_i32(fp1
);
9438 tcg_temp_free_i32(fp0
);
9441 check_cp1_64bitmode(ctx
);
9443 TCGv_i32 fp0
= tcg_temp_new_i32();
9444 TCGv_i32 fp1
= tcg_temp_new_i32();
9446 gen_load_fpr32(ctx
, fp0
, fs
);
9447 gen_load_fpr32(ctx
, fp1
, ft
);
9448 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9449 tcg_temp_free_i32(fp1
);
9450 gen_store_fpr32(ctx
, fp0
, fd
);
9451 tcg_temp_free_i32(fp0
);
9455 case OPC_MINA_S
: /* OPC_RECIP1_S */
9456 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9458 TCGv_i32 fp0
= tcg_temp_new_i32();
9459 TCGv_i32 fp1
= tcg_temp_new_i32();
9460 TCGv_i32 fp2
= tcg_temp_new_i32();
9461 gen_load_fpr32(ctx
, fp0
, fs
);
9462 gen_load_fpr32(ctx
, fp1
, ft
);
9463 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9464 gen_store_fpr32(ctx
, fp2
, fd
);
9465 tcg_temp_free_i32(fp2
);
9466 tcg_temp_free_i32(fp1
);
9467 tcg_temp_free_i32(fp0
);
9470 check_cp1_64bitmode(ctx
);
9472 TCGv_i32 fp0
= tcg_temp_new_i32();
9474 gen_load_fpr32(ctx
, fp0
, fs
);
9475 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9476 gen_store_fpr32(ctx
, fp0
, fd
);
9477 tcg_temp_free_i32(fp0
);
9481 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9482 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9484 TCGv_i32 fp0
= tcg_temp_new_i32();
9485 TCGv_i32 fp1
= tcg_temp_new_i32();
9486 gen_load_fpr32(ctx
, fp0
, fs
);
9487 gen_load_fpr32(ctx
, fp1
, ft
);
9488 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9489 gen_store_fpr32(ctx
, fp1
, fd
);
9490 tcg_temp_free_i32(fp1
);
9491 tcg_temp_free_i32(fp0
);
9494 check_cp1_64bitmode(ctx
);
9496 TCGv_i32 fp0
= tcg_temp_new_i32();
9498 gen_load_fpr32(ctx
, fp0
, fs
);
9499 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9500 gen_store_fpr32(ctx
, fp0
, fd
);
9501 tcg_temp_free_i32(fp0
);
9505 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9506 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9508 TCGv_i32 fp0
= tcg_temp_new_i32();
9509 TCGv_i32 fp1
= tcg_temp_new_i32();
9510 gen_load_fpr32(ctx
, fp0
, fs
);
9511 gen_load_fpr32(ctx
, fp1
, ft
);
9512 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9513 gen_store_fpr32(ctx
, fp1
, fd
);
9514 tcg_temp_free_i32(fp1
);
9515 tcg_temp_free_i32(fp0
);
9518 check_cp1_64bitmode(ctx
);
9520 TCGv_i32 fp0
= tcg_temp_new_i32();
9521 TCGv_i32 fp1
= tcg_temp_new_i32();
9523 gen_load_fpr32(ctx
, fp0
, fs
);
9524 gen_load_fpr32(ctx
, fp1
, ft
);
9525 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9526 tcg_temp_free_i32(fp1
);
9527 gen_store_fpr32(ctx
, fp0
, fd
);
9528 tcg_temp_free_i32(fp0
);
9533 check_cp1_registers(ctx
, fd
);
9535 TCGv_i32 fp32
= tcg_temp_new_i32();
9536 TCGv_i64 fp64
= tcg_temp_new_i64();
9538 gen_load_fpr32(ctx
, fp32
, fs
);
9539 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9540 tcg_temp_free_i32(fp32
);
9541 gen_store_fpr64(ctx
, fp64
, fd
);
9542 tcg_temp_free_i64(fp64
);
9547 TCGv_i32 fp0
= tcg_temp_new_i32();
9549 gen_load_fpr32(ctx
, fp0
, fs
);
9551 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9553 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9555 gen_store_fpr32(ctx
, fp0
, fd
);
9556 tcg_temp_free_i32(fp0
);
9560 check_cp1_64bitmode(ctx
);
9562 TCGv_i32 fp32
= tcg_temp_new_i32();
9563 TCGv_i64 fp64
= tcg_temp_new_i64();
9565 gen_load_fpr32(ctx
, fp32
, fs
);
9567 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9569 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9571 tcg_temp_free_i32(fp32
);
9572 gen_store_fpr64(ctx
, fp64
, fd
);
9573 tcg_temp_free_i64(fp64
);
9579 TCGv_i64 fp64
= tcg_temp_new_i64();
9580 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9581 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9583 gen_load_fpr32(ctx
, fp32_0
, fs
);
9584 gen_load_fpr32(ctx
, fp32_1
, ft
);
9585 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9586 tcg_temp_free_i32(fp32_1
);
9587 tcg_temp_free_i32(fp32_0
);
9588 gen_store_fpr64(ctx
, fp64
, fd
);
9589 tcg_temp_free_i64(fp64
);
9601 case OPC_CMP_NGLE_S
:
9608 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9609 if (ctx
->opcode
& (1 << 6)) {
9610 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9612 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9616 check_cp1_registers(ctx
, fs
| ft
| fd
);
9618 TCGv_i64 fp0
= tcg_temp_new_i64();
9619 TCGv_i64 fp1
= tcg_temp_new_i64();
9621 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_load_fpr64(ctx
, fp1
, ft
);
9623 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9624 tcg_temp_free_i64(fp1
);
9625 gen_store_fpr64(ctx
, fp0
, fd
);
9626 tcg_temp_free_i64(fp0
);
9630 check_cp1_registers(ctx
, fs
| ft
| fd
);
9632 TCGv_i64 fp0
= tcg_temp_new_i64();
9633 TCGv_i64 fp1
= tcg_temp_new_i64();
9635 gen_load_fpr64(ctx
, fp0
, fs
);
9636 gen_load_fpr64(ctx
, fp1
, ft
);
9637 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9638 tcg_temp_free_i64(fp1
);
9639 gen_store_fpr64(ctx
, fp0
, fd
);
9640 tcg_temp_free_i64(fp0
);
9644 check_cp1_registers(ctx
, fs
| ft
| fd
);
9646 TCGv_i64 fp0
= tcg_temp_new_i64();
9647 TCGv_i64 fp1
= tcg_temp_new_i64();
9649 gen_load_fpr64(ctx
, fp0
, fs
);
9650 gen_load_fpr64(ctx
, fp1
, ft
);
9651 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9652 tcg_temp_free_i64(fp1
);
9653 gen_store_fpr64(ctx
, fp0
, fd
);
9654 tcg_temp_free_i64(fp0
);
9658 check_cp1_registers(ctx
, fs
| ft
| fd
);
9660 TCGv_i64 fp0
= tcg_temp_new_i64();
9661 TCGv_i64 fp1
= tcg_temp_new_i64();
9663 gen_load_fpr64(ctx
, fp0
, fs
);
9664 gen_load_fpr64(ctx
, fp1
, ft
);
9665 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9666 tcg_temp_free_i64(fp1
);
9667 gen_store_fpr64(ctx
, fp0
, fd
);
9668 tcg_temp_free_i64(fp0
);
9672 check_cp1_registers(ctx
, fs
| fd
);
9674 TCGv_i64 fp0
= tcg_temp_new_i64();
9676 gen_load_fpr64(ctx
, fp0
, fs
);
9677 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9678 gen_store_fpr64(ctx
, fp0
, fd
);
9679 tcg_temp_free_i64(fp0
);
9683 check_cp1_registers(ctx
, fs
| fd
);
9685 TCGv_i64 fp0
= tcg_temp_new_i64();
9687 gen_load_fpr64(ctx
, fp0
, fs
);
9689 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9691 gen_helper_float_abs_d(fp0
, fp0
);
9693 gen_store_fpr64(ctx
, fp0
, fd
);
9694 tcg_temp_free_i64(fp0
);
9698 check_cp1_registers(ctx
, fs
| fd
);
9700 TCGv_i64 fp0
= tcg_temp_new_i64();
9702 gen_load_fpr64(ctx
, fp0
, fs
);
9703 gen_store_fpr64(ctx
, fp0
, fd
);
9704 tcg_temp_free_i64(fp0
);
9708 check_cp1_registers(ctx
, fs
| fd
);
9710 TCGv_i64 fp0
= tcg_temp_new_i64();
9712 gen_load_fpr64(ctx
, fp0
, fs
);
9714 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9716 gen_helper_float_chs_d(fp0
, fp0
);
9718 gen_store_fpr64(ctx
, fp0
, fd
);
9719 tcg_temp_free_i64(fp0
);
9723 check_cp1_64bitmode(ctx
);
9725 TCGv_i64 fp0
= tcg_temp_new_i64();
9727 gen_load_fpr64(ctx
, fp0
, fs
);
9729 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9731 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9733 gen_store_fpr64(ctx
, fp0
, fd
);
9734 tcg_temp_free_i64(fp0
);
9738 check_cp1_64bitmode(ctx
);
9740 TCGv_i64 fp0
= tcg_temp_new_i64();
9742 gen_load_fpr64(ctx
, fp0
, fs
);
9744 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9746 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9748 gen_store_fpr64(ctx
, fp0
, fd
);
9749 tcg_temp_free_i64(fp0
);
9753 check_cp1_64bitmode(ctx
);
9755 TCGv_i64 fp0
= tcg_temp_new_i64();
9757 gen_load_fpr64(ctx
, fp0
, fs
);
9759 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9761 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9763 gen_store_fpr64(ctx
, fp0
, fd
);
9764 tcg_temp_free_i64(fp0
);
9768 check_cp1_64bitmode(ctx
);
9770 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 gen_load_fpr64(ctx
, fp0
, fs
);
9774 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9776 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9778 gen_store_fpr64(ctx
, fp0
, fd
);
9779 tcg_temp_free_i64(fp0
);
9783 check_cp1_registers(ctx
, fs
);
9785 TCGv_i32 fp32
= tcg_temp_new_i32();
9786 TCGv_i64 fp64
= tcg_temp_new_i64();
9788 gen_load_fpr64(ctx
, fp64
, fs
);
9790 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9792 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9794 tcg_temp_free_i64(fp64
);
9795 gen_store_fpr32(ctx
, fp32
, fd
);
9796 tcg_temp_free_i32(fp32
);
9800 check_cp1_registers(ctx
, fs
);
9802 TCGv_i32 fp32
= tcg_temp_new_i32();
9803 TCGv_i64 fp64
= tcg_temp_new_i64();
9805 gen_load_fpr64(ctx
, fp64
, fs
);
9807 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9809 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9811 tcg_temp_free_i64(fp64
);
9812 gen_store_fpr32(ctx
, fp32
, fd
);
9813 tcg_temp_free_i32(fp32
);
9817 check_cp1_registers(ctx
, fs
);
9819 TCGv_i32 fp32
= tcg_temp_new_i32();
9820 TCGv_i64 fp64
= tcg_temp_new_i64();
9822 gen_load_fpr64(ctx
, fp64
, fs
);
9824 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9826 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9828 tcg_temp_free_i64(fp64
);
9829 gen_store_fpr32(ctx
, fp32
, fd
);
9830 tcg_temp_free_i32(fp32
);
9834 check_cp1_registers(ctx
, fs
);
9836 TCGv_i32 fp32
= tcg_temp_new_i32();
9837 TCGv_i64 fp64
= tcg_temp_new_i64();
9839 gen_load_fpr64(ctx
, fp64
, fs
);
9841 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9843 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9845 tcg_temp_free_i64(fp64
);
9846 gen_store_fpr32(ctx
, fp32
, fd
);
9847 tcg_temp_free_i32(fp32
);
9851 check_insn(ctx
, ISA_MIPS32R6
);
9852 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9855 check_insn(ctx
, ISA_MIPS32R6
);
9856 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9859 check_insn(ctx
, ISA_MIPS32R6
);
9860 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9864 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9869 TCGLabel
*l1
= gen_new_label();
9873 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9875 fp0
= tcg_temp_new_i64();
9876 gen_load_fpr64(ctx
, fp0
, fs
);
9877 gen_store_fpr64(ctx
, fp0
, fd
);
9878 tcg_temp_free_i64(fp0
);
9883 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9885 TCGLabel
*l1
= gen_new_label();
9889 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9890 fp0
= tcg_temp_new_i64();
9891 gen_load_fpr64(ctx
, fp0
, fs
);
9892 gen_store_fpr64(ctx
, fp0
, fd
);
9893 tcg_temp_free_i64(fp0
);
9899 check_cp1_registers(ctx
, fs
| fd
);
9901 TCGv_i64 fp0
= tcg_temp_new_i64();
9903 gen_load_fpr64(ctx
, fp0
, fs
);
9904 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9905 gen_store_fpr64(ctx
, fp0
, fd
);
9906 tcg_temp_free_i64(fp0
);
9910 check_cp1_registers(ctx
, fs
| fd
);
9912 TCGv_i64 fp0
= tcg_temp_new_i64();
9914 gen_load_fpr64(ctx
, fp0
, fs
);
9915 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9916 gen_store_fpr64(ctx
, fp0
, fd
);
9917 tcg_temp_free_i64(fp0
);
9921 check_insn(ctx
, ISA_MIPS32R6
);
9923 TCGv_i64 fp0
= tcg_temp_new_i64();
9924 TCGv_i64 fp1
= tcg_temp_new_i64();
9925 TCGv_i64 fp2
= tcg_temp_new_i64();
9926 gen_load_fpr64(ctx
, fp0
, fs
);
9927 gen_load_fpr64(ctx
, fp1
, ft
);
9928 gen_load_fpr64(ctx
, fp2
, fd
);
9929 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9930 gen_store_fpr64(ctx
, fp2
, fd
);
9931 tcg_temp_free_i64(fp2
);
9932 tcg_temp_free_i64(fp1
);
9933 tcg_temp_free_i64(fp0
);
9937 check_insn(ctx
, ISA_MIPS32R6
);
9939 TCGv_i64 fp0
= tcg_temp_new_i64();
9940 TCGv_i64 fp1
= tcg_temp_new_i64();
9941 TCGv_i64 fp2
= tcg_temp_new_i64();
9942 gen_load_fpr64(ctx
, fp0
, fs
);
9943 gen_load_fpr64(ctx
, fp1
, ft
);
9944 gen_load_fpr64(ctx
, fp2
, fd
);
9945 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9946 gen_store_fpr64(ctx
, fp2
, fd
);
9947 tcg_temp_free_i64(fp2
);
9948 tcg_temp_free_i64(fp1
);
9949 tcg_temp_free_i64(fp0
);
9953 check_insn(ctx
, ISA_MIPS32R6
);
9955 TCGv_i64 fp0
= tcg_temp_new_i64();
9956 gen_load_fpr64(ctx
, fp0
, fs
);
9957 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9958 gen_store_fpr64(ctx
, fp0
, fd
);
9959 tcg_temp_free_i64(fp0
);
9963 check_insn(ctx
, ISA_MIPS32R6
);
9965 TCGv_i64 fp0
= tcg_temp_new_i64();
9966 gen_load_fpr64(ctx
, fp0
, fs
);
9967 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9968 gen_store_fpr64(ctx
, fp0
, fd
);
9969 tcg_temp_free_i64(fp0
);
9972 case OPC_MIN_D
: /* OPC_RECIP2_D */
9973 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9975 TCGv_i64 fp0
= tcg_temp_new_i64();
9976 TCGv_i64 fp1
= tcg_temp_new_i64();
9977 gen_load_fpr64(ctx
, fp0
, fs
);
9978 gen_load_fpr64(ctx
, fp1
, ft
);
9979 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9980 gen_store_fpr64(ctx
, fp1
, fd
);
9981 tcg_temp_free_i64(fp1
);
9982 tcg_temp_free_i64(fp0
);
9985 check_cp1_64bitmode(ctx
);
9987 TCGv_i64 fp0
= tcg_temp_new_i64();
9988 TCGv_i64 fp1
= tcg_temp_new_i64();
9990 gen_load_fpr64(ctx
, fp0
, fs
);
9991 gen_load_fpr64(ctx
, fp1
, ft
);
9992 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9993 tcg_temp_free_i64(fp1
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
9999 case OPC_MINA_D
: /* OPC_RECIP1_D */
10000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10002 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 TCGv_i64 fp1
= tcg_temp_new_i64();
10004 gen_load_fpr64(ctx
, fp0
, fs
);
10005 gen_load_fpr64(ctx
, fp1
, ft
);
10006 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10007 gen_store_fpr64(ctx
, fp1
, fd
);
10008 tcg_temp_free_i64(fp1
);
10009 tcg_temp_free_i64(fp0
);
10012 check_cp1_64bitmode(ctx
);
10014 TCGv_i64 fp0
= tcg_temp_new_i64();
10016 gen_load_fpr64(ctx
, fp0
, fs
);
10017 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10018 gen_store_fpr64(ctx
, fp0
, fd
);
10019 tcg_temp_free_i64(fp0
);
10023 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10024 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10026 TCGv_i64 fp0
= tcg_temp_new_i64();
10027 TCGv_i64 fp1
= tcg_temp_new_i64();
10028 gen_load_fpr64(ctx
, fp0
, fs
);
10029 gen_load_fpr64(ctx
, fp1
, ft
);
10030 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10031 gen_store_fpr64(ctx
, fp1
, fd
);
10032 tcg_temp_free_i64(fp1
);
10033 tcg_temp_free_i64(fp0
);
10036 check_cp1_64bitmode(ctx
);
10038 TCGv_i64 fp0
= tcg_temp_new_i64();
10040 gen_load_fpr64(ctx
, fp0
, fs
);
10041 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10042 gen_store_fpr64(ctx
, fp0
, fd
);
10043 tcg_temp_free_i64(fp0
);
10047 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10048 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10050 TCGv_i64 fp0
= tcg_temp_new_i64();
10051 TCGv_i64 fp1
= tcg_temp_new_i64();
10052 gen_load_fpr64(ctx
, fp0
, fs
);
10053 gen_load_fpr64(ctx
, fp1
, ft
);
10054 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10055 gen_store_fpr64(ctx
, fp1
, fd
);
10056 tcg_temp_free_i64(fp1
);
10057 tcg_temp_free_i64(fp0
);
10060 check_cp1_64bitmode(ctx
);
10062 TCGv_i64 fp0
= tcg_temp_new_i64();
10063 TCGv_i64 fp1
= tcg_temp_new_i64();
10065 gen_load_fpr64(ctx
, fp0
, fs
);
10066 gen_load_fpr64(ctx
, fp1
, ft
);
10067 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10068 tcg_temp_free_i64(fp1
);
10069 gen_store_fpr64(ctx
, fp0
, fd
);
10070 tcg_temp_free_i64(fp0
);
10077 case OPC_CMP_UEQ_D
:
10078 case OPC_CMP_OLT_D
:
10079 case OPC_CMP_ULT_D
:
10080 case OPC_CMP_OLE_D
:
10081 case OPC_CMP_ULE_D
:
10083 case OPC_CMP_NGLE_D
:
10084 case OPC_CMP_SEQ_D
:
10085 case OPC_CMP_NGL_D
:
10087 case OPC_CMP_NGE_D
:
10089 case OPC_CMP_NGT_D
:
10090 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10091 if (ctx
->opcode
& (1 << 6)) {
10092 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
10094 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
10098 check_cp1_registers(ctx
, fs
);
10100 TCGv_i32 fp32
= tcg_temp_new_i32();
10101 TCGv_i64 fp64
= tcg_temp_new_i64();
10103 gen_load_fpr64(ctx
, fp64
, fs
);
10104 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10105 tcg_temp_free_i64(fp64
);
10106 gen_store_fpr32(ctx
, fp32
, fd
);
10107 tcg_temp_free_i32(fp32
);
10111 check_cp1_registers(ctx
, fs
);
10113 TCGv_i32 fp32
= tcg_temp_new_i32();
10114 TCGv_i64 fp64
= tcg_temp_new_i64();
10116 gen_load_fpr64(ctx
, fp64
, fs
);
10117 if (ctx
->nan2008
) {
10118 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10120 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10122 tcg_temp_free_i64(fp64
);
10123 gen_store_fpr32(ctx
, fp32
, fd
);
10124 tcg_temp_free_i32(fp32
);
10128 check_cp1_64bitmode(ctx
);
10130 TCGv_i64 fp0
= tcg_temp_new_i64();
10132 gen_load_fpr64(ctx
, fp0
, fs
);
10133 if (ctx
->nan2008
) {
10134 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10136 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10138 gen_store_fpr64(ctx
, fp0
, fd
);
10139 tcg_temp_free_i64(fp0
);
10144 TCGv_i32 fp0
= tcg_temp_new_i32();
10146 gen_load_fpr32(ctx
, fp0
, fs
);
10147 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10148 gen_store_fpr32(ctx
, fp0
, fd
);
10149 tcg_temp_free_i32(fp0
);
10153 check_cp1_registers(ctx
, fd
);
10155 TCGv_i32 fp32
= tcg_temp_new_i32();
10156 TCGv_i64 fp64
= tcg_temp_new_i64();
10158 gen_load_fpr32(ctx
, fp32
, fs
);
10159 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10160 tcg_temp_free_i32(fp32
);
10161 gen_store_fpr64(ctx
, fp64
, fd
);
10162 tcg_temp_free_i64(fp64
);
10166 check_cp1_64bitmode(ctx
);
10168 TCGv_i32 fp32
= tcg_temp_new_i32();
10169 TCGv_i64 fp64
= tcg_temp_new_i64();
10171 gen_load_fpr64(ctx
, fp64
, fs
);
10172 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10173 tcg_temp_free_i64(fp64
);
10174 gen_store_fpr32(ctx
, fp32
, fd
);
10175 tcg_temp_free_i32(fp32
);
10179 check_cp1_64bitmode(ctx
);
10181 TCGv_i64 fp0
= tcg_temp_new_i64();
10183 gen_load_fpr64(ctx
, fp0
, fs
);
10184 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10185 gen_store_fpr64(ctx
, fp0
, fd
);
10186 tcg_temp_free_i64(fp0
);
10189 case OPC_CVT_PS_PW
:
10192 TCGv_i64 fp0
= tcg_temp_new_i64();
10194 gen_load_fpr64(ctx
, fp0
, fs
);
10195 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10196 gen_store_fpr64(ctx
, fp0
, fd
);
10197 tcg_temp_free_i64(fp0
);
10203 TCGv_i64 fp0
= tcg_temp_new_i64();
10204 TCGv_i64 fp1
= tcg_temp_new_i64();
10206 gen_load_fpr64(ctx
, fp0
, fs
);
10207 gen_load_fpr64(ctx
, fp1
, ft
);
10208 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10209 tcg_temp_free_i64(fp1
);
10210 gen_store_fpr64(ctx
, fp0
, fd
);
10211 tcg_temp_free_i64(fp0
);
10217 TCGv_i64 fp0
= tcg_temp_new_i64();
10218 TCGv_i64 fp1
= tcg_temp_new_i64();
10220 gen_load_fpr64(ctx
, fp0
, fs
);
10221 gen_load_fpr64(ctx
, fp1
, ft
);
10222 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10223 tcg_temp_free_i64(fp1
);
10224 gen_store_fpr64(ctx
, fp0
, fd
);
10225 tcg_temp_free_i64(fp0
);
10231 TCGv_i64 fp0
= tcg_temp_new_i64();
10232 TCGv_i64 fp1
= tcg_temp_new_i64();
10234 gen_load_fpr64(ctx
, fp0
, fs
);
10235 gen_load_fpr64(ctx
, fp1
, ft
);
10236 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10237 tcg_temp_free_i64(fp1
);
10238 gen_store_fpr64(ctx
, fp0
, fd
);
10239 tcg_temp_free_i64(fp0
);
10245 TCGv_i64 fp0
= tcg_temp_new_i64();
10247 gen_load_fpr64(ctx
, fp0
, fs
);
10248 gen_helper_float_abs_ps(fp0
, fp0
);
10249 gen_store_fpr64(ctx
, fp0
, fd
);
10250 tcg_temp_free_i64(fp0
);
10256 TCGv_i64 fp0
= tcg_temp_new_i64();
10258 gen_load_fpr64(ctx
, fp0
, fs
);
10259 gen_store_fpr64(ctx
, fp0
, fd
);
10260 tcg_temp_free_i64(fp0
);
10266 TCGv_i64 fp0
= tcg_temp_new_i64();
10268 gen_load_fpr64(ctx
, fp0
, fs
);
10269 gen_helper_float_chs_ps(fp0
, fp0
);
10270 gen_store_fpr64(ctx
, fp0
, fd
);
10271 tcg_temp_free_i64(fp0
);
10276 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10281 TCGLabel
*l1
= gen_new_label();
10285 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10286 fp0
= tcg_temp_new_i64();
10287 gen_load_fpr64(ctx
, fp0
, fs
);
10288 gen_store_fpr64(ctx
, fp0
, fd
);
10289 tcg_temp_free_i64(fp0
);
10296 TCGLabel
*l1
= gen_new_label();
10300 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10301 fp0
= tcg_temp_new_i64();
10302 gen_load_fpr64(ctx
, fp0
, fs
);
10303 gen_store_fpr64(ctx
, fp0
, fd
);
10304 tcg_temp_free_i64(fp0
);
10312 TCGv_i64 fp0
= tcg_temp_new_i64();
10313 TCGv_i64 fp1
= tcg_temp_new_i64();
10315 gen_load_fpr64(ctx
, fp0
, ft
);
10316 gen_load_fpr64(ctx
, fp1
, fs
);
10317 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10318 tcg_temp_free_i64(fp1
);
10319 gen_store_fpr64(ctx
, fp0
, fd
);
10320 tcg_temp_free_i64(fp0
);
10326 TCGv_i64 fp0
= tcg_temp_new_i64();
10327 TCGv_i64 fp1
= tcg_temp_new_i64();
10329 gen_load_fpr64(ctx
, fp0
, ft
);
10330 gen_load_fpr64(ctx
, fp1
, fs
);
10331 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10332 tcg_temp_free_i64(fp1
);
10333 gen_store_fpr64(ctx
, fp0
, fd
);
10334 tcg_temp_free_i64(fp0
);
10337 case OPC_RECIP2_PS
:
10340 TCGv_i64 fp0
= tcg_temp_new_i64();
10341 TCGv_i64 fp1
= tcg_temp_new_i64();
10343 gen_load_fpr64(ctx
, fp0
, fs
);
10344 gen_load_fpr64(ctx
, fp1
, ft
);
10345 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10346 tcg_temp_free_i64(fp1
);
10347 gen_store_fpr64(ctx
, fp0
, fd
);
10348 tcg_temp_free_i64(fp0
);
10351 case OPC_RECIP1_PS
:
10354 TCGv_i64 fp0
= tcg_temp_new_i64();
10356 gen_load_fpr64(ctx
, fp0
, fs
);
10357 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10358 gen_store_fpr64(ctx
, fp0
, fd
);
10359 tcg_temp_free_i64(fp0
);
10362 case OPC_RSQRT1_PS
:
10365 TCGv_i64 fp0
= tcg_temp_new_i64();
10367 gen_load_fpr64(ctx
, fp0
, fs
);
10368 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10369 gen_store_fpr64(ctx
, fp0
, fd
);
10370 tcg_temp_free_i64(fp0
);
10373 case OPC_RSQRT2_PS
:
10376 TCGv_i64 fp0
= tcg_temp_new_i64();
10377 TCGv_i64 fp1
= tcg_temp_new_i64();
10379 gen_load_fpr64(ctx
, fp0
, fs
);
10380 gen_load_fpr64(ctx
, fp1
, ft
);
10381 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10382 tcg_temp_free_i64(fp1
);
10383 gen_store_fpr64(ctx
, fp0
, fd
);
10384 tcg_temp_free_i64(fp0
);
10388 check_cp1_64bitmode(ctx
);
10390 TCGv_i32 fp0
= tcg_temp_new_i32();
10392 gen_load_fpr32h(ctx
, fp0
, fs
);
10393 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10394 gen_store_fpr32(ctx
, fp0
, fd
);
10395 tcg_temp_free_i32(fp0
);
10398 case OPC_CVT_PW_PS
:
10401 TCGv_i64 fp0
= tcg_temp_new_i64();
10403 gen_load_fpr64(ctx
, fp0
, fs
);
10404 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10405 gen_store_fpr64(ctx
, fp0
, fd
);
10406 tcg_temp_free_i64(fp0
);
10410 check_cp1_64bitmode(ctx
);
10412 TCGv_i32 fp0
= tcg_temp_new_i32();
10414 gen_load_fpr32(ctx
, fp0
, fs
);
10415 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10416 gen_store_fpr32(ctx
, fp0
, fd
);
10417 tcg_temp_free_i32(fp0
);
10423 TCGv_i32 fp0
= tcg_temp_new_i32();
10424 TCGv_i32 fp1
= tcg_temp_new_i32();
10426 gen_load_fpr32(ctx
, fp0
, fs
);
10427 gen_load_fpr32(ctx
, fp1
, ft
);
10428 gen_store_fpr32h(ctx
, fp0
, fd
);
10429 gen_store_fpr32(ctx
, fp1
, fd
);
10430 tcg_temp_free_i32(fp0
);
10431 tcg_temp_free_i32(fp1
);
10437 TCGv_i32 fp0
= tcg_temp_new_i32();
10438 TCGv_i32 fp1
= tcg_temp_new_i32();
10440 gen_load_fpr32(ctx
, fp0
, fs
);
10441 gen_load_fpr32h(ctx
, fp1
, ft
);
10442 gen_store_fpr32(ctx
, fp1
, fd
);
10443 gen_store_fpr32h(ctx
, fp0
, fd
);
10444 tcg_temp_free_i32(fp0
);
10445 tcg_temp_free_i32(fp1
);
10451 TCGv_i32 fp0
= tcg_temp_new_i32();
10452 TCGv_i32 fp1
= tcg_temp_new_i32();
10454 gen_load_fpr32h(ctx
, fp0
, fs
);
10455 gen_load_fpr32(ctx
, fp1
, ft
);
10456 gen_store_fpr32(ctx
, fp1
, fd
);
10457 gen_store_fpr32h(ctx
, fp0
, fd
);
10458 tcg_temp_free_i32(fp0
);
10459 tcg_temp_free_i32(fp1
);
10465 TCGv_i32 fp0
= tcg_temp_new_i32();
10466 TCGv_i32 fp1
= tcg_temp_new_i32();
10468 gen_load_fpr32h(ctx
, fp0
, fs
);
10469 gen_load_fpr32h(ctx
, fp1
, ft
);
10470 gen_store_fpr32(ctx
, fp1
, fd
);
10471 gen_store_fpr32h(ctx
, fp0
, fd
);
10472 tcg_temp_free_i32(fp0
);
10473 tcg_temp_free_i32(fp1
);
10477 case OPC_CMP_UN_PS
:
10478 case OPC_CMP_EQ_PS
:
10479 case OPC_CMP_UEQ_PS
:
10480 case OPC_CMP_OLT_PS
:
10481 case OPC_CMP_ULT_PS
:
10482 case OPC_CMP_OLE_PS
:
10483 case OPC_CMP_ULE_PS
:
10484 case OPC_CMP_SF_PS
:
10485 case OPC_CMP_NGLE_PS
:
10486 case OPC_CMP_SEQ_PS
:
10487 case OPC_CMP_NGL_PS
:
10488 case OPC_CMP_LT_PS
:
10489 case OPC_CMP_NGE_PS
:
10490 case OPC_CMP_LE_PS
:
10491 case OPC_CMP_NGT_PS
:
10492 if (ctx
->opcode
& (1 << 6)) {
10493 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10495 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10499 MIPS_INVAL("farith");
10500 generate_exception_end(ctx
, EXCP_RI
);
10505 /* Coprocessor 3 (FPU) */
10506 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10507 int fd
, int fs
, int base
, int index
)
10509 TCGv t0
= tcg_temp_new();
10512 gen_load_gpr(t0
, index
);
10513 } else if (index
== 0) {
10514 gen_load_gpr(t0
, base
);
10516 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10518 /* Don't do NOP if destination is zero: we must perform the actual
10524 TCGv_i32 fp0
= tcg_temp_new_i32();
10526 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10527 tcg_gen_trunc_tl_i32(fp0
, t0
);
10528 gen_store_fpr32(ctx
, fp0
, fd
);
10529 tcg_temp_free_i32(fp0
);
10534 check_cp1_registers(ctx
, fd
);
10536 TCGv_i64 fp0
= tcg_temp_new_i64();
10537 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10538 gen_store_fpr64(ctx
, fp0
, fd
);
10539 tcg_temp_free_i64(fp0
);
10543 check_cp1_64bitmode(ctx
);
10544 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10546 TCGv_i64 fp0
= tcg_temp_new_i64();
10548 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10549 gen_store_fpr64(ctx
, fp0
, fd
);
10550 tcg_temp_free_i64(fp0
);
10556 TCGv_i32 fp0
= tcg_temp_new_i32();
10557 gen_load_fpr32(ctx
, fp0
, fs
);
10558 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10559 tcg_temp_free_i32(fp0
);
10564 check_cp1_registers(ctx
, fs
);
10566 TCGv_i64 fp0
= tcg_temp_new_i64();
10567 gen_load_fpr64(ctx
, fp0
, fs
);
10568 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10569 tcg_temp_free_i64(fp0
);
10573 check_cp1_64bitmode(ctx
);
10574 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10576 TCGv_i64 fp0
= tcg_temp_new_i64();
10577 gen_load_fpr64(ctx
, fp0
, fs
);
10578 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10579 tcg_temp_free_i64(fp0
);
10586 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10587 int fd
, int fr
, int fs
, int ft
)
10593 TCGv t0
= tcg_temp_local_new();
10594 TCGv_i32 fp
= tcg_temp_new_i32();
10595 TCGv_i32 fph
= tcg_temp_new_i32();
10596 TCGLabel
*l1
= gen_new_label();
10597 TCGLabel
*l2
= gen_new_label();
10599 gen_load_gpr(t0
, fr
);
10600 tcg_gen_andi_tl(t0
, t0
, 0x7);
10602 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10603 gen_load_fpr32(ctx
, fp
, fs
);
10604 gen_load_fpr32h(ctx
, fph
, fs
);
10605 gen_store_fpr32(ctx
, fp
, fd
);
10606 gen_store_fpr32h(ctx
, fph
, fd
);
10609 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10611 #ifdef TARGET_WORDS_BIGENDIAN
10612 gen_load_fpr32(ctx
, fp
, fs
);
10613 gen_load_fpr32h(ctx
, fph
, ft
);
10614 gen_store_fpr32h(ctx
, fp
, fd
);
10615 gen_store_fpr32(ctx
, fph
, fd
);
10617 gen_load_fpr32h(ctx
, fph
, fs
);
10618 gen_load_fpr32(ctx
, fp
, ft
);
10619 gen_store_fpr32(ctx
, fph
, fd
);
10620 gen_store_fpr32h(ctx
, fp
, fd
);
10623 tcg_temp_free_i32(fp
);
10624 tcg_temp_free_i32(fph
);
10630 TCGv_i32 fp0
= tcg_temp_new_i32();
10631 TCGv_i32 fp1
= tcg_temp_new_i32();
10632 TCGv_i32 fp2
= tcg_temp_new_i32();
10634 gen_load_fpr32(ctx
, fp0
, fs
);
10635 gen_load_fpr32(ctx
, fp1
, ft
);
10636 gen_load_fpr32(ctx
, fp2
, fr
);
10637 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10638 tcg_temp_free_i32(fp0
);
10639 tcg_temp_free_i32(fp1
);
10640 gen_store_fpr32(ctx
, fp2
, fd
);
10641 tcg_temp_free_i32(fp2
);
10646 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10648 TCGv_i64 fp0
= tcg_temp_new_i64();
10649 TCGv_i64 fp1
= tcg_temp_new_i64();
10650 TCGv_i64 fp2
= tcg_temp_new_i64();
10652 gen_load_fpr64(ctx
, fp0
, fs
);
10653 gen_load_fpr64(ctx
, fp1
, ft
);
10654 gen_load_fpr64(ctx
, fp2
, fr
);
10655 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10656 tcg_temp_free_i64(fp0
);
10657 tcg_temp_free_i64(fp1
);
10658 gen_store_fpr64(ctx
, fp2
, fd
);
10659 tcg_temp_free_i64(fp2
);
10665 TCGv_i64 fp0
= tcg_temp_new_i64();
10666 TCGv_i64 fp1
= tcg_temp_new_i64();
10667 TCGv_i64 fp2
= tcg_temp_new_i64();
10669 gen_load_fpr64(ctx
, fp0
, fs
);
10670 gen_load_fpr64(ctx
, fp1
, ft
);
10671 gen_load_fpr64(ctx
, fp2
, fr
);
10672 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10673 tcg_temp_free_i64(fp0
);
10674 tcg_temp_free_i64(fp1
);
10675 gen_store_fpr64(ctx
, fp2
, fd
);
10676 tcg_temp_free_i64(fp2
);
10682 TCGv_i32 fp0
= tcg_temp_new_i32();
10683 TCGv_i32 fp1
= tcg_temp_new_i32();
10684 TCGv_i32 fp2
= tcg_temp_new_i32();
10686 gen_load_fpr32(ctx
, fp0
, fs
);
10687 gen_load_fpr32(ctx
, fp1
, ft
);
10688 gen_load_fpr32(ctx
, fp2
, fr
);
10689 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10690 tcg_temp_free_i32(fp0
);
10691 tcg_temp_free_i32(fp1
);
10692 gen_store_fpr32(ctx
, fp2
, fd
);
10693 tcg_temp_free_i32(fp2
);
10698 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10700 TCGv_i64 fp0
= tcg_temp_new_i64();
10701 TCGv_i64 fp1
= tcg_temp_new_i64();
10702 TCGv_i64 fp2
= tcg_temp_new_i64();
10704 gen_load_fpr64(ctx
, fp0
, fs
);
10705 gen_load_fpr64(ctx
, fp1
, ft
);
10706 gen_load_fpr64(ctx
, fp2
, fr
);
10707 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10708 tcg_temp_free_i64(fp0
);
10709 tcg_temp_free_i64(fp1
);
10710 gen_store_fpr64(ctx
, fp2
, fd
);
10711 tcg_temp_free_i64(fp2
);
10717 TCGv_i64 fp0
= tcg_temp_new_i64();
10718 TCGv_i64 fp1
= tcg_temp_new_i64();
10719 TCGv_i64 fp2
= tcg_temp_new_i64();
10721 gen_load_fpr64(ctx
, fp0
, fs
);
10722 gen_load_fpr64(ctx
, fp1
, ft
);
10723 gen_load_fpr64(ctx
, fp2
, fr
);
10724 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10725 tcg_temp_free_i64(fp0
);
10726 tcg_temp_free_i64(fp1
);
10727 gen_store_fpr64(ctx
, fp2
, fd
);
10728 tcg_temp_free_i64(fp2
);
10734 TCGv_i32 fp0
= tcg_temp_new_i32();
10735 TCGv_i32 fp1
= tcg_temp_new_i32();
10736 TCGv_i32 fp2
= tcg_temp_new_i32();
10738 gen_load_fpr32(ctx
, fp0
, fs
);
10739 gen_load_fpr32(ctx
, fp1
, ft
);
10740 gen_load_fpr32(ctx
, fp2
, fr
);
10741 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10742 tcg_temp_free_i32(fp0
);
10743 tcg_temp_free_i32(fp1
);
10744 gen_store_fpr32(ctx
, fp2
, fd
);
10745 tcg_temp_free_i32(fp2
);
10750 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10752 TCGv_i64 fp0
= tcg_temp_new_i64();
10753 TCGv_i64 fp1
= tcg_temp_new_i64();
10754 TCGv_i64 fp2
= tcg_temp_new_i64();
10756 gen_load_fpr64(ctx
, fp0
, fs
);
10757 gen_load_fpr64(ctx
, fp1
, ft
);
10758 gen_load_fpr64(ctx
, fp2
, fr
);
10759 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10760 tcg_temp_free_i64(fp0
);
10761 tcg_temp_free_i64(fp1
);
10762 gen_store_fpr64(ctx
, fp2
, fd
);
10763 tcg_temp_free_i64(fp2
);
10769 TCGv_i64 fp0
= tcg_temp_new_i64();
10770 TCGv_i64 fp1
= tcg_temp_new_i64();
10771 TCGv_i64 fp2
= tcg_temp_new_i64();
10773 gen_load_fpr64(ctx
, fp0
, fs
);
10774 gen_load_fpr64(ctx
, fp1
, ft
);
10775 gen_load_fpr64(ctx
, fp2
, fr
);
10776 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10777 tcg_temp_free_i64(fp0
);
10778 tcg_temp_free_i64(fp1
);
10779 gen_store_fpr64(ctx
, fp2
, fd
);
10780 tcg_temp_free_i64(fp2
);
10786 TCGv_i32 fp0
= tcg_temp_new_i32();
10787 TCGv_i32 fp1
= tcg_temp_new_i32();
10788 TCGv_i32 fp2
= tcg_temp_new_i32();
10790 gen_load_fpr32(ctx
, fp0
, fs
);
10791 gen_load_fpr32(ctx
, fp1
, ft
);
10792 gen_load_fpr32(ctx
, fp2
, fr
);
10793 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10794 tcg_temp_free_i32(fp0
);
10795 tcg_temp_free_i32(fp1
);
10796 gen_store_fpr32(ctx
, fp2
, fd
);
10797 tcg_temp_free_i32(fp2
);
10802 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10804 TCGv_i64 fp0
= tcg_temp_new_i64();
10805 TCGv_i64 fp1
= tcg_temp_new_i64();
10806 TCGv_i64 fp2
= tcg_temp_new_i64();
10808 gen_load_fpr64(ctx
, fp0
, fs
);
10809 gen_load_fpr64(ctx
, fp1
, ft
);
10810 gen_load_fpr64(ctx
, fp2
, fr
);
10811 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10812 tcg_temp_free_i64(fp0
);
10813 tcg_temp_free_i64(fp1
);
10814 gen_store_fpr64(ctx
, fp2
, fd
);
10815 tcg_temp_free_i64(fp2
);
10821 TCGv_i64 fp0
= tcg_temp_new_i64();
10822 TCGv_i64 fp1
= tcg_temp_new_i64();
10823 TCGv_i64 fp2
= tcg_temp_new_i64();
10825 gen_load_fpr64(ctx
, fp0
, fs
);
10826 gen_load_fpr64(ctx
, fp1
, ft
);
10827 gen_load_fpr64(ctx
, fp2
, fr
);
10828 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10829 tcg_temp_free_i64(fp0
);
10830 tcg_temp_free_i64(fp1
);
10831 gen_store_fpr64(ctx
, fp2
, fd
);
10832 tcg_temp_free_i64(fp2
);
10836 MIPS_INVAL("flt3_arith");
10837 generate_exception_end(ctx
, EXCP_RI
);
10842 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10846 #if !defined(CONFIG_USER_ONLY)
10847 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10848 Therefore only check the ISA in system mode. */
10849 check_insn(ctx
, ISA_MIPS32R2
);
10851 t0
= tcg_temp_new();
10855 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10856 gen_store_gpr(t0
, rt
);
10859 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10860 gen_store_gpr(t0
, rt
);
10863 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10866 gen_helper_rdhwr_cc(t0
, cpu_env
);
10867 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10870 gen_store_gpr(t0
, rt
);
10871 /* Break the TB to be able to take timer interrupts immediately
10872 after reading count. DISAS_STOP isn't sufficient, we need to ensure
10873 we break completely out of translated code. */
10874 gen_save_pc(ctx
->base
.pc_next
+ 4);
10875 ctx
->base
.is_jmp
= DISAS_EXIT
;
10878 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10879 gen_store_gpr(t0
, rt
);
10882 check_insn(ctx
, ISA_MIPS32R6
);
10884 /* Performance counter registers are not implemented other than
10885 * control register 0.
10887 generate_exception(ctx
, EXCP_RI
);
10889 gen_helper_rdhwr_performance(t0
, cpu_env
);
10890 gen_store_gpr(t0
, rt
);
10893 check_insn(ctx
, ISA_MIPS32R6
);
10894 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10895 gen_store_gpr(t0
, rt
);
10898 #if defined(CONFIG_USER_ONLY)
10899 tcg_gen_ld_tl(t0
, cpu_env
,
10900 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10901 gen_store_gpr(t0
, rt
);
10904 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10905 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10906 tcg_gen_ld_tl(t0
, cpu_env
,
10907 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10908 gen_store_gpr(t0
, rt
);
10910 generate_exception_end(ctx
, EXCP_RI
);
10914 default: /* Invalid */
10915 MIPS_INVAL("rdhwr");
10916 generate_exception_end(ctx
, EXCP_RI
);
10922 static inline void clear_branch_hflags(DisasContext
*ctx
)
10924 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10925 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
10926 save_cpu_state(ctx
, 0);
10928 /* it is not safe to save ctx->hflags as hflags may be changed
10929 in execution time by the instruction in delay / forbidden slot. */
10930 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10934 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10936 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10937 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10938 /* Branches completion */
10939 clear_branch_hflags(ctx
);
10940 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10941 /* FIXME: Need to clear can_do_io. */
10942 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10943 case MIPS_HFLAG_FBNSLOT
:
10944 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
10947 /* unconditional branch */
10948 if (proc_hflags
& MIPS_HFLAG_BX
) {
10949 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10951 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10953 case MIPS_HFLAG_BL
:
10954 /* blikely taken case */
10955 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10957 case MIPS_HFLAG_BC
:
10958 /* Conditional branch */
10960 TCGLabel
*l1
= gen_new_label();
10962 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10963 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
10965 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10968 case MIPS_HFLAG_BR
:
10969 /* unconditional branch to register */
10970 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10971 TCGv t0
= tcg_temp_new();
10972 TCGv_i32 t1
= tcg_temp_new_i32();
10974 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10975 tcg_gen_trunc_tl_i32(t1
, t0
);
10977 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10978 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10979 tcg_gen_or_i32(hflags
, hflags
, t1
);
10980 tcg_temp_free_i32(t1
);
10982 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10984 tcg_gen_mov_tl(cpu_PC
, btarget
);
10986 if (ctx
->base
.singlestep_enabled
) {
10987 save_cpu_state(ctx
, 0);
10988 gen_helper_raise_exception_debug(cpu_env
);
10990 tcg_gen_lookup_and_goto_ptr();
10993 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10999 /* Compact Branches */
11000 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11001 int rs
, int rt
, int32_t offset
)
11003 int bcond_compute
= 0;
11004 TCGv t0
= tcg_temp_new();
11005 TCGv t1
= tcg_temp_new();
11006 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11008 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11009 #ifdef MIPS_DEBUG_DISAS
11010 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11011 "\n", ctx
->base
.pc_next
);
11013 generate_exception_end(ctx
, EXCP_RI
);
11017 /* Load needed operands and calculate btarget */
11019 /* compact branch */
11020 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11021 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11022 gen_load_gpr(t0
, rs
);
11023 gen_load_gpr(t1
, rt
);
11025 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11026 if (rs
<= rt
&& rs
== 0) {
11027 /* OPC_BEQZALC, OPC_BNEZALC */
11028 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11031 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11032 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11033 gen_load_gpr(t0
, rs
);
11034 gen_load_gpr(t1
, rt
);
11036 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11038 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11039 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11040 if (rs
== 0 || rs
== rt
) {
11041 /* OPC_BLEZALC, OPC_BGEZALC */
11042 /* OPC_BGTZALC, OPC_BLTZALC */
11043 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11045 gen_load_gpr(t0
, rs
);
11046 gen_load_gpr(t1
, rt
);
11048 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11052 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11057 /* OPC_BEQZC, OPC_BNEZC */
11058 gen_load_gpr(t0
, rs
);
11060 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11062 /* OPC_JIC, OPC_JIALC */
11063 TCGv tbase
= tcg_temp_new();
11064 TCGv toffset
= tcg_temp_new();
11066 gen_load_gpr(tbase
, rt
);
11067 tcg_gen_movi_tl(toffset
, offset
);
11068 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11069 tcg_temp_free(tbase
);
11070 tcg_temp_free(toffset
);
11074 MIPS_INVAL("Compact branch/jump");
11075 generate_exception_end(ctx
, EXCP_RI
);
11079 if (bcond_compute
== 0) {
11080 /* Uncoditional compact branch */
11083 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11086 ctx
->hflags
|= MIPS_HFLAG_BR
;
11089 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11092 ctx
->hflags
|= MIPS_HFLAG_B
;
11095 MIPS_INVAL("Compact branch/jump");
11096 generate_exception_end(ctx
, EXCP_RI
);
11100 /* Generating branch here as compact branches don't have delay slot */
11101 gen_branch(ctx
, 4);
11103 /* Conditional compact branch */
11104 TCGLabel
*fs
= gen_new_label();
11105 save_cpu_state(ctx
, 0);
11108 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11109 if (rs
== 0 && rt
!= 0) {
11111 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11112 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11114 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11117 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11120 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11121 if (rs
== 0 && rt
!= 0) {
11123 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11124 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11126 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11129 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11132 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11133 if (rs
== 0 && rt
!= 0) {
11135 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11136 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11138 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11141 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11144 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11145 if (rs
== 0 && rt
!= 0) {
11147 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11148 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11150 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11153 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11156 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11157 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11159 /* OPC_BOVC, OPC_BNVC */
11160 TCGv t2
= tcg_temp_new();
11161 TCGv t3
= tcg_temp_new();
11162 TCGv t4
= tcg_temp_new();
11163 TCGv input_overflow
= tcg_temp_new();
11165 gen_load_gpr(t0
, rs
);
11166 gen_load_gpr(t1
, rt
);
11167 tcg_gen_ext32s_tl(t2
, t0
);
11168 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11169 tcg_gen_ext32s_tl(t3
, t1
);
11170 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11171 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11173 tcg_gen_add_tl(t4
, t2
, t3
);
11174 tcg_gen_ext32s_tl(t4
, t4
);
11175 tcg_gen_xor_tl(t2
, t2
, t3
);
11176 tcg_gen_xor_tl(t3
, t4
, t3
);
11177 tcg_gen_andc_tl(t2
, t3
, t2
);
11178 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11179 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11180 if (opc
== OPC_BOVC
) {
11182 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11185 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11187 tcg_temp_free(input_overflow
);
11191 } else if (rs
< rt
&& rs
== 0) {
11192 /* OPC_BEQZALC, OPC_BNEZALC */
11193 if (opc
== OPC_BEQZALC
) {
11195 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11198 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11201 /* OPC_BEQC, OPC_BNEC */
11202 if (opc
== OPC_BEQC
) {
11204 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11207 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11212 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11215 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11218 MIPS_INVAL("Compact conditional branch/jump");
11219 generate_exception_end(ctx
, EXCP_RI
);
11223 /* Generating branch here as compact branches don't have delay slot */
11224 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11227 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11235 /* ISA extensions (ASEs) */
11236 /* MIPS16 extension to MIPS32 */
11238 /* MIPS16 major opcodes */
11240 M16_OPC_ADDIUSP
= 0x00,
11241 M16_OPC_ADDIUPC
= 0x01,
11243 M16_OPC_JAL
= 0x03,
11244 M16_OPC_BEQZ
= 0x04,
11245 M16_OPC_BNEQZ
= 0x05,
11246 M16_OPC_SHIFT
= 0x06,
11248 M16_OPC_RRIA
= 0x08,
11249 M16_OPC_ADDIU8
= 0x09,
11250 M16_OPC_SLTI
= 0x0a,
11251 M16_OPC_SLTIU
= 0x0b,
11254 M16_OPC_CMPI
= 0x0e,
11258 M16_OPC_LWSP
= 0x12,
11260 M16_OPC_LBU
= 0x14,
11261 M16_OPC_LHU
= 0x15,
11262 M16_OPC_LWPC
= 0x16,
11263 M16_OPC_LWU
= 0x17,
11266 M16_OPC_SWSP
= 0x1a,
11268 M16_OPC_RRR
= 0x1c,
11270 M16_OPC_EXTEND
= 0x1e,
11274 /* I8 funct field */
11293 /* RR funct field */
11327 /* I64 funct field */
11335 I64_DADDIUPC
= 0x6,
11339 /* RR ry field for CNVT */
11341 RR_RY_CNVT_ZEB
= 0x0,
11342 RR_RY_CNVT_ZEH
= 0x1,
11343 RR_RY_CNVT_ZEW
= 0x2,
11344 RR_RY_CNVT_SEB
= 0x4,
11345 RR_RY_CNVT_SEH
= 0x5,
11346 RR_RY_CNVT_SEW
= 0x6,
11349 static int xlat (int r
)
11351 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11356 static void gen_mips16_save (DisasContext
*ctx
,
11357 int xsregs
, int aregs
,
11358 int do_ra
, int do_s0
, int do_s1
,
11361 TCGv t0
= tcg_temp_new();
11362 TCGv t1
= tcg_temp_new();
11363 TCGv t2
= tcg_temp_new();
11393 generate_exception_end(ctx
, EXCP_RI
);
11399 gen_base_offset_addr(ctx
, t0
, 29, 12);
11400 gen_load_gpr(t1
, 7);
11401 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11404 gen_base_offset_addr(ctx
, t0
, 29, 8);
11405 gen_load_gpr(t1
, 6);
11406 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11409 gen_base_offset_addr(ctx
, t0
, 29, 4);
11410 gen_load_gpr(t1
, 5);
11411 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11414 gen_base_offset_addr(ctx
, t0
, 29, 0);
11415 gen_load_gpr(t1
, 4);
11416 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11419 gen_load_gpr(t0
, 29);
11421 #define DECR_AND_STORE(reg) do { \
11422 tcg_gen_movi_tl(t2, -4); \
11423 gen_op_addr_add(ctx, t0, t0, t2); \
11424 gen_load_gpr(t1, reg); \
11425 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11429 DECR_AND_STORE(31);
11434 DECR_AND_STORE(30);
11437 DECR_AND_STORE(23);
11440 DECR_AND_STORE(22);
11443 DECR_AND_STORE(21);
11446 DECR_AND_STORE(20);
11449 DECR_AND_STORE(19);
11452 DECR_AND_STORE(18);
11456 DECR_AND_STORE(17);
11459 DECR_AND_STORE(16);
11489 generate_exception_end(ctx
, EXCP_RI
);
11505 #undef DECR_AND_STORE
11507 tcg_gen_movi_tl(t2
, -framesize
);
11508 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11514 static void gen_mips16_restore (DisasContext
*ctx
,
11515 int xsregs
, int aregs
,
11516 int do_ra
, int do_s0
, int do_s1
,
11520 TCGv t0
= tcg_temp_new();
11521 TCGv t1
= tcg_temp_new();
11522 TCGv t2
= tcg_temp_new();
11524 tcg_gen_movi_tl(t2
, framesize
);
11525 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11527 #define DECR_AND_LOAD(reg) do { \
11528 tcg_gen_movi_tl(t2, -4); \
11529 gen_op_addr_add(ctx, t0, t0, t2); \
11530 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11531 gen_store_gpr(t1, reg); \
11595 generate_exception_end(ctx
, EXCP_RI
);
11611 #undef DECR_AND_LOAD
11613 tcg_gen_movi_tl(t2
, framesize
);
11614 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11620 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11621 int is_64_bit
, int extended
)
11625 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11626 generate_exception_end(ctx
, EXCP_RI
);
11630 t0
= tcg_temp_new();
11632 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11633 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11635 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11641 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11644 TCGv_i32 t0
= tcg_const_i32(op
);
11645 TCGv t1
= tcg_temp_new();
11646 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11647 gen_helper_cache(cpu_env
, t1
, t0
);
11650 #if defined(TARGET_MIPS64)
11651 static void decode_i64_mips16 (DisasContext
*ctx
,
11652 int ry
, int funct
, int16_t offset
,
11657 check_insn(ctx
, ISA_MIPS3
);
11658 check_mips_64(ctx
);
11659 offset
= extended
? offset
: offset
<< 3;
11660 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11663 check_insn(ctx
, ISA_MIPS3
);
11664 check_mips_64(ctx
);
11665 offset
= extended
? offset
: offset
<< 3;
11666 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11669 check_insn(ctx
, ISA_MIPS3
);
11670 check_mips_64(ctx
);
11671 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11672 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11675 check_insn(ctx
, ISA_MIPS3
);
11676 check_mips_64(ctx
);
11677 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11678 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11681 check_insn(ctx
, ISA_MIPS3
);
11682 check_mips_64(ctx
);
11683 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11684 generate_exception_end(ctx
, EXCP_RI
);
11686 offset
= extended
? offset
: offset
<< 3;
11687 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11691 check_insn(ctx
, ISA_MIPS3
);
11692 check_mips_64(ctx
);
11693 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11694 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11697 check_insn(ctx
, ISA_MIPS3
);
11698 check_mips_64(ctx
);
11699 offset
= extended
? offset
: offset
<< 2;
11700 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11703 check_insn(ctx
, ISA_MIPS3
);
11704 check_mips_64(ctx
);
11705 offset
= extended
? offset
: offset
<< 2;
11706 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11712 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11714 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11715 int op
, rx
, ry
, funct
, sa
;
11716 int16_t imm
, offset
;
11718 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11719 op
= (ctx
->opcode
>> 11) & 0x1f;
11720 sa
= (ctx
->opcode
>> 22) & 0x1f;
11721 funct
= (ctx
->opcode
>> 8) & 0x7;
11722 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11723 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11724 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11725 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11726 | (ctx
->opcode
& 0x1f));
11728 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11731 case M16_OPC_ADDIUSP
:
11732 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11734 case M16_OPC_ADDIUPC
:
11735 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11738 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11739 /* No delay slot, so just process as a normal instruction */
11742 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11743 /* No delay slot, so just process as a normal instruction */
11745 case M16_OPC_BNEQZ
:
11746 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11747 /* No delay slot, so just process as a normal instruction */
11749 case M16_OPC_SHIFT
:
11750 switch (ctx
->opcode
& 0x3) {
11752 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11755 #if defined(TARGET_MIPS64)
11756 check_mips_64(ctx
);
11757 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11759 generate_exception_end(ctx
, EXCP_RI
);
11763 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11766 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11770 #if defined(TARGET_MIPS64)
11772 check_insn(ctx
, ISA_MIPS3
);
11773 check_mips_64(ctx
);
11774 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11778 imm
= ctx
->opcode
& 0xf;
11779 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11780 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11781 imm
= (int16_t) (imm
<< 1) >> 1;
11782 if ((ctx
->opcode
>> 4) & 0x1) {
11783 #if defined(TARGET_MIPS64)
11784 check_mips_64(ctx
);
11785 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11787 generate_exception_end(ctx
, EXCP_RI
);
11790 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11793 case M16_OPC_ADDIU8
:
11794 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11797 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11799 case M16_OPC_SLTIU
:
11800 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11805 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11808 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11811 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11814 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11817 check_insn(ctx
, ISA_MIPS32
);
11819 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11820 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11821 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11822 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11823 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11824 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11825 | (ctx
->opcode
& 0xf)) << 3;
11827 if (ctx
->opcode
& (1 << 7)) {
11828 gen_mips16_save(ctx
, xsregs
, aregs
,
11829 do_ra
, do_s0
, do_s1
,
11832 gen_mips16_restore(ctx
, xsregs
, aregs
,
11833 do_ra
, do_s0
, do_s1
,
11839 generate_exception_end(ctx
, EXCP_RI
);
11844 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11847 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11849 #if defined(TARGET_MIPS64)
11851 check_insn(ctx
, ISA_MIPS3
);
11852 check_mips_64(ctx
);
11853 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11857 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11860 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11863 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11866 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11869 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11872 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11875 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11877 #if defined(TARGET_MIPS64)
11879 check_insn(ctx
, ISA_MIPS3
);
11880 check_mips_64(ctx
);
11881 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11885 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11888 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11891 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11894 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11896 #if defined(TARGET_MIPS64)
11898 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11902 generate_exception_end(ctx
, EXCP_RI
);
11909 static inline bool is_uhi(int sdbbp_code
)
11911 #ifdef CONFIG_USER_ONLY
11914 return semihosting_enabled() && sdbbp_code
== 1;
11918 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11922 int op
, cnvt_op
, op1
, offset
;
11926 op
= (ctx
->opcode
>> 11) & 0x1f;
11927 sa
= (ctx
->opcode
>> 2) & 0x7;
11928 sa
= sa
== 0 ? 8 : sa
;
11929 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11930 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11931 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11932 op1
= offset
= ctx
->opcode
& 0x1f;
11937 case M16_OPC_ADDIUSP
:
11939 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11941 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11944 case M16_OPC_ADDIUPC
:
11945 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11948 offset
= (ctx
->opcode
& 0x7ff) << 1;
11949 offset
= (int16_t)(offset
<< 4) >> 4;
11950 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11951 /* No delay slot, so just process as a normal instruction */
11954 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11955 offset
= (((ctx
->opcode
& 0x1f) << 21)
11956 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11958 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11959 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11963 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11964 ((int8_t)ctx
->opcode
) << 1, 0);
11965 /* No delay slot, so just process as a normal instruction */
11967 case M16_OPC_BNEQZ
:
11968 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11969 ((int8_t)ctx
->opcode
) << 1, 0);
11970 /* No delay slot, so just process as a normal instruction */
11972 case M16_OPC_SHIFT
:
11973 switch (ctx
->opcode
& 0x3) {
11975 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11978 #if defined(TARGET_MIPS64)
11979 check_insn(ctx
, ISA_MIPS3
);
11980 check_mips_64(ctx
);
11981 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11983 generate_exception_end(ctx
, EXCP_RI
);
11987 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11990 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11994 #if defined(TARGET_MIPS64)
11996 check_insn(ctx
, ISA_MIPS3
);
11997 check_mips_64(ctx
);
11998 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
12003 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
12005 if ((ctx
->opcode
>> 4) & 1) {
12006 #if defined(TARGET_MIPS64)
12007 check_insn(ctx
, ISA_MIPS3
);
12008 check_mips_64(ctx
);
12009 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12011 generate_exception_end(ctx
, EXCP_RI
);
12014 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12018 case M16_OPC_ADDIU8
:
12020 int16_t imm
= (int8_t) ctx
->opcode
;
12022 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12027 int16_t imm
= (uint8_t) ctx
->opcode
;
12028 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12031 case M16_OPC_SLTIU
:
12033 int16_t imm
= (uint8_t) ctx
->opcode
;
12034 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12041 funct
= (ctx
->opcode
>> 8) & 0x7;
12044 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
12045 ((int8_t)ctx
->opcode
) << 1, 0);
12048 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
12049 ((int8_t)ctx
->opcode
) << 1, 0);
12052 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
12055 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
12056 ((int8_t)ctx
->opcode
) << 3);
12059 check_insn(ctx
, ISA_MIPS32
);
12061 int do_ra
= ctx
->opcode
& (1 << 6);
12062 int do_s0
= ctx
->opcode
& (1 << 5);
12063 int do_s1
= ctx
->opcode
& (1 << 4);
12064 int framesize
= ctx
->opcode
& 0xf;
12066 if (framesize
== 0) {
12069 framesize
= framesize
<< 3;
12072 if (ctx
->opcode
& (1 << 7)) {
12073 gen_mips16_save(ctx
, 0, 0,
12074 do_ra
, do_s0
, do_s1
, framesize
);
12076 gen_mips16_restore(ctx
, 0, 0,
12077 do_ra
, do_s0
, do_s1
, framesize
);
12083 int rz
= xlat(ctx
->opcode
& 0x7);
12085 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
12086 ((ctx
->opcode
>> 5) & 0x7);
12087 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
12091 reg32
= ctx
->opcode
& 0x1f;
12092 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
12095 generate_exception_end(ctx
, EXCP_RI
);
12102 int16_t imm
= (uint8_t) ctx
->opcode
;
12104 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12109 int16_t imm
= (uint8_t) ctx
->opcode
;
12110 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12113 #if defined(TARGET_MIPS64)
12115 check_insn(ctx
, ISA_MIPS3
);
12116 check_mips_64(ctx
);
12117 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12121 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12124 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12127 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12130 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12133 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12136 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12139 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12141 #if defined (TARGET_MIPS64)
12143 check_insn(ctx
, ISA_MIPS3
);
12144 check_mips_64(ctx
);
12145 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12149 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12152 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12155 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12158 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12162 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12165 switch (ctx
->opcode
& 0x3) {
12167 mips32_op
= OPC_ADDU
;
12170 mips32_op
= OPC_SUBU
;
12172 #if defined(TARGET_MIPS64)
12174 mips32_op
= OPC_DADDU
;
12175 check_insn(ctx
, ISA_MIPS3
);
12176 check_mips_64(ctx
);
12179 mips32_op
= OPC_DSUBU
;
12180 check_insn(ctx
, ISA_MIPS3
);
12181 check_mips_64(ctx
);
12185 generate_exception_end(ctx
, EXCP_RI
);
12189 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12198 int nd
= (ctx
->opcode
>> 7) & 0x1;
12199 int link
= (ctx
->opcode
>> 6) & 0x1;
12200 int ra
= (ctx
->opcode
>> 5) & 0x1;
12203 check_insn(ctx
, ISA_MIPS32
);
12212 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12217 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12218 gen_helper_do_semihosting(cpu_env
);
12220 /* XXX: not clear which exception should be raised
12221 * when in debug mode...
12223 check_insn(ctx
, ISA_MIPS32
);
12224 generate_exception_end(ctx
, EXCP_DBp
);
12228 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12231 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12234 generate_exception_end(ctx
, EXCP_BREAK
);
12237 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12240 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12243 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12245 #if defined (TARGET_MIPS64)
12247 check_insn(ctx
, ISA_MIPS3
);
12248 check_mips_64(ctx
);
12249 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12253 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12256 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12259 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12262 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12265 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12268 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12271 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12274 check_insn(ctx
, ISA_MIPS32
);
12276 case RR_RY_CNVT_ZEB
:
12277 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12279 case RR_RY_CNVT_ZEH
:
12280 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12282 case RR_RY_CNVT_SEB
:
12283 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12285 case RR_RY_CNVT_SEH
:
12286 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12288 #if defined (TARGET_MIPS64)
12289 case RR_RY_CNVT_ZEW
:
12290 check_insn(ctx
, ISA_MIPS64
);
12291 check_mips_64(ctx
);
12292 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12294 case RR_RY_CNVT_SEW
:
12295 check_insn(ctx
, ISA_MIPS64
);
12296 check_mips_64(ctx
);
12297 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12301 generate_exception_end(ctx
, EXCP_RI
);
12306 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12308 #if defined (TARGET_MIPS64)
12310 check_insn(ctx
, ISA_MIPS3
);
12311 check_mips_64(ctx
);
12312 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12315 check_insn(ctx
, ISA_MIPS3
);
12316 check_mips_64(ctx
);
12317 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12320 check_insn(ctx
, ISA_MIPS3
);
12321 check_mips_64(ctx
);
12322 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12325 check_insn(ctx
, ISA_MIPS3
);
12326 check_mips_64(ctx
);
12327 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12331 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12334 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12337 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12340 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12342 #if defined (TARGET_MIPS64)
12344 check_insn(ctx
, ISA_MIPS3
);
12345 check_mips_64(ctx
);
12346 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12349 check_insn(ctx
, ISA_MIPS3
);
12350 check_mips_64(ctx
);
12351 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12354 check_insn(ctx
, ISA_MIPS3
);
12355 check_mips_64(ctx
);
12356 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12359 check_insn(ctx
, ISA_MIPS3
);
12360 check_mips_64(ctx
);
12361 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12365 generate_exception_end(ctx
, EXCP_RI
);
12369 case M16_OPC_EXTEND
:
12370 decode_extended_mips16_opc(env
, ctx
);
12373 #if defined(TARGET_MIPS64)
12375 funct
= (ctx
->opcode
>> 8) & 0x7;
12376 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12380 generate_exception_end(ctx
, EXCP_RI
);
12387 /* microMIPS extension to MIPS32/MIPS64 */
12390 * microMIPS32/microMIPS64 major opcodes
12392 * 1. MIPS Architecture for Programmers Volume II-B:
12393 * The microMIPS32 Instruction Set (Revision 3.05)
12395 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12397 * 2. MIPS Architecture For Programmers Volume II-A:
12398 * The MIPS64 Instruction Set (Revision 3.51)
12428 POOL32S
= 0x16, /* MIPS64 */
12429 DADDIU32
= 0x17, /* MIPS64 */
12458 /* 0x29 is reserved */
12471 /* 0x31 is reserved */
12484 SD32
= 0x36, /* MIPS64 */
12485 LD32
= 0x37, /* MIPS64 */
12487 /* 0x39 is reserved */
12503 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12525 /* POOL32A encoding of minor opcode field */
12528 /* These opcodes are distinguished only by bits 9..6; those bits are
12529 * what are recorded below. */
12566 /* The following can be distinguished by their lower 6 bits. */
12576 /* POOL32AXF encoding of minor opcode field extension */
12579 * 1. MIPS Architecture for Programmers Volume II-B:
12580 * The microMIPS32 Instruction Set (Revision 3.05)
12582 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12584 * 2. MIPS Architecture for Programmers VolumeIV-e:
12585 * The MIPS DSP Application-Specific Extension
12586 * to the microMIPS32 Architecture (Revision 2.34)
12588 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12603 /* begin of microMIPS32 DSP */
12605 /* bits 13..12 for 0x01 */
12611 /* bits 13..12 for 0x2a */
12617 /* bits 13..12 for 0x32 */
12621 /* end of microMIPS32 DSP */
12623 /* bits 15..12 for 0x2c */
12640 /* bits 15..12 for 0x34 */
12648 /* bits 15..12 for 0x3c */
12650 JR
= 0x0, /* alias */
12658 /* bits 15..12 for 0x05 */
12662 /* bits 15..12 for 0x0d */
12674 /* bits 15..12 for 0x15 */
12680 /* bits 15..12 for 0x1d */
12684 /* bits 15..12 for 0x2d */
12689 /* bits 15..12 for 0x35 */
12696 /* POOL32B encoding of minor opcode field (bits 15..12) */
12712 /* POOL32C encoding of minor opcode field (bits 15..12) */
12733 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12746 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12759 /* POOL32F encoding of minor opcode field (bits 5..0) */
12762 /* These are the bit 7..6 values */
12771 /* These are the bit 8..6 values */
12796 MOVZ_FMT_05
= 0x05,
12830 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12837 /* POOL32Fxf encoding of minor opcode extension field */
12875 /* POOL32I encoding of minor opcode field (bits 25..21) */
12905 /* These overlap and are distinguished by bit16 of the instruction */
12914 /* POOL16A encoding of minor opcode field */
12921 /* POOL16B encoding of minor opcode field */
12928 /* POOL16C encoding of minor opcode field */
12948 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12972 /* POOL16D encoding of minor opcode field */
12979 /* POOL16E encoding of minor opcode field */
12986 static int mmreg (int r
)
12988 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12993 /* Used for 16-bit store instructions. */
12994 static int mmreg2 (int r
)
12996 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
13001 #define uMIPS_RD(op) ((op >> 7) & 0x7)
13002 #define uMIPS_RS(op) ((op >> 4) & 0x7)
13003 #define uMIPS_RS2(op) uMIPS_RS(op)
13004 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
13005 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
13006 #define uMIPS_RS5(op) (op & 0x1f)
13008 /* Signed immediate */
13009 #define SIMM(op, start, width) \
13010 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
13013 /* Zero-extended immediate */
13014 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
13016 static void gen_addiur1sp(DisasContext
*ctx
)
13018 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13020 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
13023 static void gen_addiur2(DisasContext
*ctx
)
13025 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
13026 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13027 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13029 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
13032 static void gen_addiusp(DisasContext
*ctx
)
13034 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
13037 if (encoded
<= 1) {
13038 decoded
= 256 + encoded
;
13039 } else if (encoded
<= 255) {
13041 } else if (encoded
<= 509) {
13042 decoded
= encoded
- 512;
13044 decoded
= encoded
- 768;
13047 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
13050 static void gen_addius5(DisasContext
*ctx
)
13052 int imm
= SIMM(ctx
->opcode
, 1, 4);
13053 int rd
= (ctx
->opcode
>> 5) & 0x1f;
13055 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
13058 static void gen_andi16(DisasContext
*ctx
)
13060 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
13061 31, 32, 63, 64, 255, 32768, 65535 };
13062 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13063 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13064 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
13066 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
13069 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
13070 int base
, int16_t offset
)
13075 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
13076 generate_exception_end(ctx
, EXCP_RI
);
13080 t0
= tcg_temp_new();
13082 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13084 t1
= tcg_const_tl(reglist
);
13085 t2
= tcg_const_i32(ctx
->mem_idx
);
13087 save_cpu_state(ctx
, 1);
13090 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
13093 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
13095 #ifdef TARGET_MIPS64
13097 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
13100 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
13106 tcg_temp_free_i32(t2
);
13110 static void gen_pool16c_insn(DisasContext
*ctx
)
13112 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
13113 int rs
= mmreg(ctx
->opcode
& 0x7);
13115 switch (((ctx
->opcode
) >> 4) & 0x3f) {
13120 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13126 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13132 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13138 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13145 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13146 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13148 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13157 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13158 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13160 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13167 int reg
= ctx
->opcode
& 0x1f;
13169 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13175 int reg
= ctx
->opcode
& 0x1f;
13176 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13177 /* Let normal delay slot handling in our caller take us
13178 to the branch target. */
13183 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13184 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13188 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13189 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13193 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13197 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13200 generate_exception_end(ctx
, EXCP_BREAK
);
13203 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13204 gen_helper_do_semihosting(cpu_env
);
13206 /* XXX: not clear which exception should be raised
13207 * when in debug mode...
13209 check_insn(ctx
, ISA_MIPS32
);
13210 generate_exception_end(ctx
, EXCP_DBp
);
13213 case JRADDIUSP
+ 0:
13214 case JRADDIUSP
+ 1:
13216 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13217 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13218 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13219 /* Let normal delay slot handling in our caller take us
13220 to the branch target. */
13224 generate_exception_end(ctx
, EXCP_RI
);
13229 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13232 int rd
, rs
, re
, rt
;
13233 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13234 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13235 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13236 rd
= rd_enc
[enc_dest
];
13237 re
= re_enc
[enc_dest
];
13238 rs
= rs_rt_enc
[enc_rs
];
13239 rt
= rs_rt_enc
[enc_rt
];
13241 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13243 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13246 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13248 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13252 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13254 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13255 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13257 switch (ctx
->opcode
& 0xf) {
13259 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13262 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13266 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13267 int offset
= extract32(ctx
->opcode
, 4, 4);
13268 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13271 case R6_JRC16
: /* JRCADDIUSP */
13272 if ((ctx
->opcode
>> 4) & 1) {
13274 int imm
= extract32(ctx
->opcode
, 5, 5);
13275 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13276 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13279 int rs
= extract32(ctx
->opcode
, 5, 5);
13280 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13292 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13293 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13294 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13295 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13299 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13302 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13306 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13307 int offset
= extract32(ctx
->opcode
, 4, 4);
13308 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13311 case JALRC16
: /* BREAK16, SDBBP16 */
13312 switch (ctx
->opcode
& 0x3f) {
13314 case JALRC16
+ 0x20:
13316 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13321 generate_exception(ctx
, EXCP_BREAK
);
13325 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13326 gen_helper_do_semihosting(cpu_env
);
13328 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13329 generate_exception(ctx
, EXCP_RI
);
13331 generate_exception(ctx
, EXCP_DBp
);
13338 generate_exception(ctx
, EXCP_RI
);
13343 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13345 TCGv t0
= tcg_temp_new();
13346 TCGv t1
= tcg_temp_new();
13348 gen_load_gpr(t0
, base
);
13351 gen_load_gpr(t1
, index
);
13352 tcg_gen_shli_tl(t1
, t1
, 2);
13353 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13356 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13357 gen_store_gpr(t1
, rd
);
13363 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13364 int base
, int16_t offset
)
13368 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13369 generate_exception_end(ctx
, EXCP_RI
);
13373 t0
= tcg_temp_new();
13374 t1
= tcg_temp_new();
13376 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13381 generate_exception_end(ctx
, EXCP_RI
);
13384 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13385 gen_store_gpr(t1
, rd
);
13386 tcg_gen_movi_tl(t1
, 4);
13387 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13388 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13389 gen_store_gpr(t1
, rd
+1);
13392 gen_load_gpr(t1
, rd
);
13393 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13394 tcg_gen_movi_tl(t1
, 4);
13395 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13396 gen_load_gpr(t1
, rd
+1);
13397 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13399 #ifdef TARGET_MIPS64
13402 generate_exception_end(ctx
, EXCP_RI
);
13405 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13406 gen_store_gpr(t1
, rd
);
13407 tcg_gen_movi_tl(t1
, 8);
13408 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13409 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13410 gen_store_gpr(t1
, rd
+1);
13413 gen_load_gpr(t1
, rd
);
13414 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13415 tcg_gen_movi_tl(t1
, 8);
13416 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13417 gen_load_gpr(t1
, rd
+1);
13418 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13426 static void gen_sync(int stype
)
13428 TCGBar tcg_mo
= TCG_BAR_SC
;
13431 case 0x4: /* SYNC_WMB */
13432 tcg_mo
|= TCG_MO_ST_ST
;
13434 case 0x10: /* SYNC_MB */
13435 tcg_mo
|= TCG_MO_ALL
;
13437 case 0x11: /* SYNC_ACQUIRE */
13438 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13440 case 0x12: /* SYNC_RELEASE */
13441 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13443 case 0x13: /* SYNC_RMB */
13444 tcg_mo
|= TCG_MO_LD_LD
;
13447 tcg_mo
|= TCG_MO_ALL
;
13451 tcg_gen_mb(tcg_mo
);
13454 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13456 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13457 int minor
= (ctx
->opcode
>> 12) & 0xf;
13458 uint32_t mips32_op
;
13460 switch (extension
) {
13462 mips32_op
= OPC_TEQ
;
13465 mips32_op
= OPC_TGE
;
13468 mips32_op
= OPC_TGEU
;
13471 mips32_op
= OPC_TLT
;
13474 mips32_op
= OPC_TLTU
;
13477 mips32_op
= OPC_TNE
;
13479 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13481 #ifndef CONFIG_USER_ONLY
13484 check_cp0_enabled(ctx
);
13486 /* Treat as NOP. */
13489 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13493 check_cp0_enabled(ctx
);
13495 TCGv t0
= tcg_temp_new();
13497 gen_load_gpr(t0
, rt
);
13498 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13504 switch (minor
& 3) {
13506 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13509 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13512 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13515 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13518 goto pool32axf_invalid
;
13522 switch (minor
& 3) {
13524 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13527 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13530 goto pool32axf_invalid
;
13536 check_insn(ctx
, ISA_MIPS32R6
);
13537 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13540 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13543 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13546 mips32_op
= OPC_CLO
;
13549 mips32_op
= OPC_CLZ
;
13551 check_insn(ctx
, ISA_MIPS32
);
13552 gen_cl(ctx
, mips32_op
, rt
, rs
);
13555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13556 gen_rdhwr(ctx
, rt
, rs
, 0);
13559 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13562 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13563 mips32_op
= OPC_MULT
;
13566 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13567 mips32_op
= OPC_MULTU
;
13570 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13571 mips32_op
= OPC_DIV
;
13574 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13575 mips32_op
= OPC_DIVU
;
13578 check_insn(ctx
, ISA_MIPS32
);
13579 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13582 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13583 mips32_op
= OPC_MADD
;
13586 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13587 mips32_op
= OPC_MADDU
;
13590 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13591 mips32_op
= OPC_MSUB
;
13594 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13595 mips32_op
= OPC_MSUBU
;
13597 check_insn(ctx
, ISA_MIPS32
);
13598 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13601 goto pool32axf_invalid
;
13612 generate_exception_err(ctx
, EXCP_CpU
, 2);
13615 goto pool32axf_invalid
;
13620 case JALR
: /* JALRC */
13621 case JALR_HB
: /* JALRC_HB */
13622 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13623 /* JALRC, JALRC_HB */
13624 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13626 /* JALR, JALR_HB */
13627 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13628 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13633 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13634 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13635 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13638 goto pool32axf_invalid
;
13644 check_cp0_enabled(ctx
);
13645 check_insn(ctx
, ISA_MIPS32R2
);
13646 gen_load_srsgpr(rs
, rt
);
13649 check_cp0_enabled(ctx
);
13650 check_insn(ctx
, ISA_MIPS32R2
);
13651 gen_store_srsgpr(rs
, rt
);
13654 goto pool32axf_invalid
;
13657 #ifndef CONFIG_USER_ONLY
13661 mips32_op
= OPC_TLBP
;
13664 mips32_op
= OPC_TLBR
;
13667 mips32_op
= OPC_TLBWI
;
13670 mips32_op
= OPC_TLBWR
;
13673 mips32_op
= OPC_TLBINV
;
13676 mips32_op
= OPC_TLBINVF
;
13679 mips32_op
= OPC_WAIT
;
13682 mips32_op
= OPC_DERET
;
13685 mips32_op
= OPC_ERET
;
13687 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13690 goto pool32axf_invalid
;
13696 check_cp0_enabled(ctx
);
13698 TCGv t0
= tcg_temp_new();
13700 save_cpu_state(ctx
, 1);
13701 gen_helper_di(t0
, cpu_env
);
13702 gen_store_gpr(t0
, rs
);
13703 /* Stop translation as we may have switched the execution mode */
13704 ctx
->base
.is_jmp
= DISAS_STOP
;
13709 check_cp0_enabled(ctx
);
13711 TCGv t0
= tcg_temp_new();
13713 save_cpu_state(ctx
, 1);
13714 gen_helper_ei(t0
, cpu_env
);
13715 gen_store_gpr(t0
, rs
);
13716 /* DISAS_STOP isn't sufficient, we need to ensure we break out
13717 of translated code to check for pending interrupts. */
13718 gen_save_pc(ctx
->base
.pc_next
+ 4);
13719 ctx
->base
.is_jmp
= DISAS_EXIT
;
13724 goto pool32axf_invalid
;
13731 gen_sync(extract32(ctx
->opcode
, 16, 5));
13734 generate_exception_end(ctx
, EXCP_SYSCALL
);
13737 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13738 gen_helper_do_semihosting(cpu_env
);
13740 check_insn(ctx
, ISA_MIPS32
);
13741 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13742 generate_exception_end(ctx
, EXCP_RI
);
13744 generate_exception_end(ctx
, EXCP_DBp
);
13749 goto pool32axf_invalid
;
13753 switch (minor
& 3) {
13755 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13758 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13761 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13764 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13767 goto pool32axf_invalid
;
13771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13774 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13777 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13780 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13783 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13786 goto pool32axf_invalid
;
13791 MIPS_INVAL("pool32axf");
13792 generate_exception_end(ctx
, EXCP_RI
);
13797 /* Values for microMIPS fmt field. Variable-width, depending on which
13798 formats the instruction supports. */
13817 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13819 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13820 uint32_t mips32_op
;
13822 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13823 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13824 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13826 switch (extension
) {
13827 case FLOAT_1BIT_FMT(CFC1
, 0):
13828 mips32_op
= OPC_CFC1
;
13830 case FLOAT_1BIT_FMT(CTC1
, 0):
13831 mips32_op
= OPC_CTC1
;
13833 case FLOAT_1BIT_FMT(MFC1
, 0):
13834 mips32_op
= OPC_MFC1
;
13836 case FLOAT_1BIT_FMT(MTC1
, 0):
13837 mips32_op
= OPC_MTC1
;
13839 case FLOAT_1BIT_FMT(MFHC1
, 0):
13840 mips32_op
= OPC_MFHC1
;
13842 case FLOAT_1BIT_FMT(MTHC1
, 0):
13843 mips32_op
= OPC_MTHC1
;
13845 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13848 /* Reciprocal square root */
13849 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13850 mips32_op
= OPC_RSQRT_S
;
13852 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13853 mips32_op
= OPC_RSQRT_D
;
13857 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13858 mips32_op
= OPC_SQRT_S
;
13860 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13861 mips32_op
= OPC_SQRT_D
;
13865 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13866 mips32_op
= OPC_RECIP_S
;
13868 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13869 mips32_op
= OPC_RECIP_D
;
13873 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13874 mips32_op
= OPC_FLOOR_L_S
;
13876 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13877 mips32_op
= OPC_FLOOR_L_D
;
13879 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13880 mips32_op
= OPC_FLOOR_W_S
;
13882 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13883 mips32_op
= OPC_FLOOR_W_D
;
13887 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13888 mips32_op
= OPC_CEIL_L_S
;
13890 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13891 mips32_op
= OPC_CEIL_L_D
;
13893 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13894 mips32_op
= OPC_CEIL_W_S
;
13896 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13897 mips32_op
= OPC_CEIL_W_D
;
13901 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13902 mips32_op
= OPC_TRUNC_L_S
;
13904 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13905 mips32_op
= OPC_TRUNC_L_D
;
13907 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13908 mips32_op
= OPC_TRUNC_W_S
;
13910 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13911 mips32_op
= OPC_TRUNC_W_D
;
13915 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13916 mips32_op
= OPC_ROUND_L_S
;
13918 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13919 mips32_op
= OPC_ROUND_L_D
;
13921 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13922 mips32_op
= OPC_ROUND_W_S
;
13924 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13925 mips32_op
= OPC_ROUND_W_D
;
13928 /* Integer to floating-point conversion */
13929 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13930 mips32_op
= OPC_CVT_L_S
;
13932 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13933 mips32_op
= OPC_CVT_L_D
;
13935 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13936 mips32_op
= OPC_CVT_W_S
;
13938 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13939 mips32_op
= OPC_CVT_W_D
;
13942 /* Paired-foo conversions */
13943 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13944 mips32_op
= OPC_CVT_S_PL
;
13946 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13947 mips32_op
= OPC_CVT_S_PU
;
13949 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13950 mips32_op
= OPC_CVT_PW_PS
;
13952 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13953 mips32_op
= OPC_CVT_PS_PW
;
13956 /* Floating-point moves */
13957 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13958 mips32_op
= OPC_MOV_S
;
13960 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13961 mips32_op
= OPC_MOV_D
;
13963 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13964 mips32_op
= OPC_MOV_PS
;
13967 /* Absolute value */
13968 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13969 mips32_op
= OPC_ABS_S
;
13971 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13972 mips32_op
= OPC_ABS_D
;
13974 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13975 mips32_op
= OPC_ABS_PS
;
13979 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13980 mips32_op
= OPC_NEG_S
;
13982 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13983 mips32_op
= OPC_NEG_D
;
13985 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13986 mips32_op
= OPC_NEG_PS
;
13989 /* Reciprocal square root step */
13990 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13991 mips32_op
= OPC_RSQRT1_S
;
13993 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13994 mips32_op
= OPC_RSQRT1_D
;
13996 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13997 mips32_op
= OPC_RSQRT1_PS
;
14000 /* Reciprocal step */
14001 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
14002 mips32_op
= OPC_RECIP1_S
;
14004 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
14005 mips32_op
= OPC_RECIP1_S
;
14007 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
14008 mips32_op
= OPC_RECIP1_PS
;
14011 /* Conversions from double */
14012 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
14013 mips32_op
= OPC_CVT_D_S
;
14015 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
14016 mips32_op
= OPC_CVT_D_W
;
14018 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
14019 mips32_op
= OPC_CVT_D_L
;
14022 /* Conversions from single */
14023 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
14024 mips32_op
= OPC_CVT_S_D
;
14026 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
14027 mips32_op
= OPC_CVT_S_W
;
14029 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
14030 mips32_op
= OPC_CVT_S_L
;
14032 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
14035 /* Conditional moves on floating-point codes */
14036 case COND_FLOAT_MOV(MOVT
, 0):
14037 case COND_FLOAT_MOV(MOVT
, 1):
14038 case COND_FLOAT_MOV(MOVT
, 2):
14039 case COND_FLOAT_MOV(MOVT
, 3):
14040 case COND_FLOAT_MOV(MOVT
, 4):
14041 case COND_FLOAT_MOV(MOVT
, 5):
14042 case COND_FLOAT_MOV(MOVT
, 6):
14043 case COND_FLOAT_MOV(MOVT
, 7):
14044 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14045 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
14047 case COND_FLOAT_MOV(MOVF
, 0):
14048 case COND_FLOAT_MOV(MOVF
, 1):
14049 case COND_FLOAT_MOV(MOVF
, 2):
14050 case COND_FLOAT_MOV(MOVF
, 3):
14051 case COND_FLOAT_MOV(MOVF
, 4):
14052 case COND_FLOAT_MOV(MOVF
, 5):
14053 case COND_FLOAT_MOV(MOVF
, 6):
14054 case COND_FLOAT_MOV(MOVF
, 7):
14055 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14056 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
14059 MIPS_INVAL("pool32fxf");
14060 generate_exception_end(ctx
, EXCP_RI
);
14065 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
14069 int rt
, rs
, rd
, rr
;
14071 uint32_t op
, minor
, minor2
, mips32_op
;
14072 uint32_t cond
, fmt
, cc
;
14074 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
14075 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
14077 rt
= (ctx
->opcode
>> 21) & 0x1f;
14078 rs
= (ctx
->opcode
>> 16) & 0x1f;
14079 rd
= (ctx
->opcode
>> 11) & 0x1f;
14080 rr
= (ctx
->opcode
>> 6) & 0x1f;
14081 imm
= (int16_t) ctx
->opcode
;
14083 op
= (ctx
->opcode
>> 26) & 0x3f;
14086 minor
= ctx
->opcode
& 0x3f;
14089 minor
= (ctx
->opcode
>> 6) & 0xf;
14092 mips32_op
= OPC_SLL
;
14095 mips32_op
= OPC_SRA
;
14098 mips32_op
= OPC_SRL
;
14101 mips32_op
= OPC_ROTR
;
14103 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
14106 check_insn(ctx
, ISA_MIPS32R6
);
14107 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
14110 check_insn(ctx
, ISA_MIPS32R6
);
14111 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
14114 check_insn(ctx
, ISA_MIPS32R6
);
14115 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
14118 goto pool32a_invalid
;
14122 minor
= (ctx
->opcode
>> 6) & 0xf;
14126 mips32_op
= OPC_ADD
;
14129 mips32_op
= OPC_ADDU
;
14132 mips32_op
= OPC_SUB
;
14135 mips32_op
= OPC_SUBU
;
14138 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14139 mips32_op
= OPC_MUL
;
14141 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14145 mips32_op
= OPC_SLLV
;
14148 mips32_op
= OPC_SRLV
;
14151 mips32_op
= OPC_SRAV
;
14154 mips32_op
= OPC_ROTRV
;
14156 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14158 /* Logical operations */
14160 mips32_op
= OPC_AND
;
14163 mips32_op
= OPC_OR
;
14166 mips32_op
= OPC_NOR
;
14169 mips32_op
= OPC_XOR
;
14171 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14173 /* Set less than */
14175 mips32_op
= OPC_SLT
;
14178 mips32_op
= OPC_SLTU
;
14180 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14183 goto pool32a_invalid
;
14187 minor
= (ctx
->opcode
>> 6) & 0xf;
14189 /* Conditional moves */
14190 case MOVN
: /* MUL */
14191 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14193 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14196 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14199 case MOVZ
: /* MUH */
14200 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14202 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14205 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14209 check_insn(ctx
, ISA_MIPS32R6
);
14210 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14213 check_insn(ctx
, ISA_MIPS32R6
);
14214 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14216 case LWXS
: /* DIV */
14217 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14219 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14222 gen_ldxs(ctx
, rs
, rt
, rd
);
14226 check_insn(ctx
, ISA_MIPS32R6
);
14227 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14230 check_insn(ctx
, ISA_MIPS32R6
);
14231 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14234 check_insn(ctx
, ISA_MIPS32R6
);
14235 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14238 goto pool32a_invalid
;
14242 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14245 check_insn(ctx
, ISA_MIPS32R6
);
14246 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14247 extract32(ctx
->opcode
, 9, 2));
14250 check_insn(ctx
, ISA_MIPS32R6
);
14251 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14252 extract32(ctx
->opcode
, 9, 2));
14255 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14258 gen_pool32axf(env
, ctx
, rt
, rs
);
14261 generate_exception_end(ctx
, EXCP_BREAK
);
14264 check_insn(ctx
, ISA_MIPS32R6
);
14265 generate_exception_end(ctx
, EXCP_RI
);
14269 MIPS_INVAL("pool32a");
14270 generate_exception_end(ctx
, EXCP_RI
);
14275 minor
= (ctx
->opcode
>> 12) & 0xf;
14278 check_cp0_enabled(ctx
);
14279 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14280 gen_cache_operation(ctx
, rt
, rs
, imm
);
14285 /* COP2: Not implemented. */
14286 generate_exception_err(ctx
, EXCP_CpU
, 2);
14288 #ifdef TARGET_MIPS64
14291 check_insn(ctx
, ISA_MIPS3
);
14292 check_mips_64(ctx
);
14297 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14299 #ifdef TARGET_MIPS64
14302 check_insn(ctx
, ISA_MIPS3
);
14303 check_mips_64(ctx
);
14308 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14311 MIPS_INVAL("pool32b");
14312 generate_exception_end(ctx
, EXCP_RI
);
14317 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14318 minor
= ctx
->opcode
& 0x3f;
14319 check_cp1_enabled(ctx
);
14322 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14323 mips32_op
= OPC_ALNV_PS
;
14326 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14327 mips32_op
= OPC_MADD_S
;
14330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14331 mips32_op
= OPC_MADD_D
;
14334 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14335 mips32_op
= OPC_MADD_PS
;
14338 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14339 mips32_op
= OPC_MSUB_S
;
14342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14343 mips32_op
= OPC_MSUB_D
;
14346 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14347 mips32_op
= OPC_MSUB_PS
;
14350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14351 mips32_op
= OPC_NMADD_S
;
14354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14355 mips32_op
= OPC_NMADD_D
;
14358 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14359 mips32_op
= OPC_NMADD_PS
;
14362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14363 mips32_op
= OPC_NMSUB_S
;
14366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14367 mips32_op
= OPC_NMSUB_D
;
14370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14371 mips32_op
= OPC_NMSUB_PS
;
14373 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14375 case CABS_COND_FMT
:
14376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14377 cond
= (ctx
->opcode
>> 6) & 0xf;
14378 cc
= (ctx
->opcode
>> 13) & 0x7;
14379 fmt
= (ctx
->opcode
>> 10) & 0x3;
14382 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14385 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14388 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14391 goto pool32f_invalid
;
14395 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14396 cond
= (ctx
->opcode
>> 6) & 0xf;
14397 cc
= (ctx
->opcode
>> 13) & 0x7;
14398 fmt
= (ctx
->opcode
>> 10) & 0x3;
14401 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14404 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14407 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14410 goto pool32f_invalid
;
14414 check_insn(ctx
, ISA_MIPS32R6
);
14415 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14418 check_insn(ctx
, ISA_MIPS32R6
);
14419 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14422 gen_pool32fxf(ctx
, rt
, rs
);
14426 switch ((ctx
->opcode
>> 6) & 0x7) {
14428 mips32_op
= OPC_PLL_PS
;
14431 mips32_op
= OPC_PLU_PS
;
14434 mips32_op
= OPC_PUL_PS
;
14437 mips32_op
= OPC_PUU_PS
;
14440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14441 mips32_op
= OPC_CVT_PS_S
;
14443 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14446 goto pool32f_invalid
;
14450 check_insn(ctx
, ISA_MIPS32R6
);
14451 switch ((ctx
->opcode
>> 9) & 0x3) {
14453 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14456 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14459 goto pool32f_invalid
;
14464 switch ((ctx
->opcode
>> 6) & 0x7) {
14466 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14467 mips32_op
= OPC_LWXC1
;
14470 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14471 mips32_op
= OPC_SWXC1
;
14474 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14475 mips32_op
= OPC_LDXC1
;
14478 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14479 mips32_op
= OPC_SDXC1
;
14482 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14483 mips32_op
= OPC_LUXC1
;
14486 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14487 mips32_op
= OPC_SUXC1
;
14489 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14492 goto pool32f_invalid
;
14496 check_insn(ctx
, ISA_MIPS32R6
);
14497 switch ((ctx
->opcode
>> 9) & 0x3) {
14499 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14502 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14505 goto pool32f_invalid
;
14510 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14511 fmt
= (ctx
->opcode
>> 9) & 0x3;
14512 switch ((ctx
->opcode
>> 6) & 0x7) {
14516 mips32_op
= OPC_RSQRT2_S
;
14519 mips32_op
= OPC_RSQRT2_D
;
14522 mips32_op
= OPC_RSQRT2_PS
;
14525 goto pool32f_invalid
;
14531 mips32_op
= OPC_RECIP2_S
;
14534 mips32_op
= OPC_RECIP2_D
;
14537 mips32_op
= OPC_RECIP2_PS
;
14540 goto pool32f_invalid
;
14544 mips32_op
= OPC_ADDR_PS
;
14547 mips32_op
= OPC_MULR_PS
;
14549 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14552 goto pool32f_invalid
;
14556 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14557 cc
= (ctx
->opcode
>> 13) & 0x7;
14558 fmt
= (ctx
->opcode
>> 9) & 0x3;
14559 switch ((ctx
->opcode
>> 6) & 0x7) {
14560 case MOVF_FMT
: /* RINT_FMT */
14561 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14565 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14568 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14571 goto pool32f_invalid
;
14577 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14580 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14584 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14587 goto pool32f_invalid
;
14591 case MOVT_FMT
: /* CLASS_FMT */
14592 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14596 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14599 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14602 goto pool32f_invalid
;
14608 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14611 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14615 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14618 goto pool32f_invalid
;
14623 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14626 goto pool32f_invalid
;
14629 #define FINSN_3ARG_SDPS(prfx) \
14630 switch ((ctx->opcode >> 8) & 0x3) { \
14632 mips32_op = OPC_##prfx##_S; \
14635 mips32_op = OPC_##prfx##_D; \
14637 case FMT_SDPS_PS: \
14639 mips32_op = OPC_##prfx##_PS; \
14642 goto pool32f_invalid; \
14645 check_insn(ctx
, ISA_MIPS32R6
);
14646 switch ((ctx
->opcode
>> 9) & 0x3) {
14648 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14651 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14654 goto pool32f_invalid
;
14658 check_insn(ctx
, ISA_MIPS32R6
);
14659 switch ((ctx
->opcode
>> 9) & 0x3) {
14661 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14664 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14667 goto pool32f_invalid
;
14671 /* regular FP ops */
14672 switch ((ctx
->opcode
>> 6) & 0x3) {
14674 FINSN_3ARG_SDPS(ADD
);
14677 FINSN_3ARG_SDPS(SUB
);
14680 FINSN_3ARG_SDPS(MUL
);
14683 fmt
= (ctx
->opcode
>> 8) & 0x3;
14685 mips32_op
= OPC_DIV_D
;
14686 } else if (fmt
== 0) {
14687 mips32_op
= OPC_DIV_S
;
14689 goto pool32f_invalid
;
14693 goto pool32f_invalid
;
14698 switch ((ctx
->opcode
>> 6) & 0x7) {
14699 case MOVN_FMT
: /* SELNEZ_FMT */
14700 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14702 switch ((ctx
->opcode
>> 9) & 0x3) {
14704 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14707 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14710 goto pool32f_invalid
;
14714 FINSN_3ARG_SDPS(MOVN
);
14718 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14719 FINSN_3ARG_SDPS(MOVN
);
14721 case MOVZ_FMT
: /* SELEQZ_FMT */
14722 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14724 switch ((ctx
->opcode
>> 9) & 0x3) {
14726 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14729 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14732 goto pool32f_invalid
;
14736 FINSN_3ARG_SDPS(MOVZ
);
14740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14741 FINSN_3ARG_SDPS(MOVZ
);
14744 check_insn(ctx
, ISA_MIPS32R6
);
14745 switch ((ctx
->opcode
>> 9) & 0x3) {
14747 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14750 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14753 goto pool32f_invalid
;
14757 check_insn(ctx
, ISA_MIPS32R6
);
14758 switch ((ctx
->opcode
>> 9) & 0x3) {
14760 mips32_op
= OPC_MADDF_S
;
14763 mips32_op
= OPC_MADDF_D
;
14766 goto pool32f_invalid
;
14770 check_insn(ctx
, ISA_MIPS32R6
);
14771 switch ((ctx
->opcode
>> 9) & 0x3) {
14773 mips32_op
= OPC_MSUBF_S
;
14776 mips32_op
= OPC_MSUBF_D
;
14779 goto pool32f_invalid
;
14783 goto pool32f_invalid
;
14787 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14791 MIPS_INVAL("pool32f");
14792 generate_exception_end(ctx
, EXCP_RI
);
14796 generate_exception_err(ctx
, EXCP_CpU
, 1);
14800 minor
= (ctx
->opcode
>> 21) & 0x1f;
14803 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14804 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14808 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14809 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14812 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14813 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14814 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14817 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14818 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14821 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14822 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14823 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14826 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14827 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14828 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14832 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14836 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14840 case TLTI
: /* BC1EQZC */
14841 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14843 check_cp1_enabled(ctx
);
14844 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14847 mips32_op
= OPC_TLTI
;
14851 case TGEI
: /* BC1NEZC */
14852 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14854 check_cp1_enabled(ctx
);
14855 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14858 mips32_op
= OPC_TGEI
;
14863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14864 mips32_op
= OPC_TLTIU
;
14867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14868 mips32_op
= OPC_TGEIU
;
14870 case TNEI
: /* SYNCI */
14871 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14873 /* Break the TB to be able to sync copied instructions
14875 ctx
->base
.is_jmp
= DISAS_STOP
;
14878 mips32_op
= OPC_TNEI
;
14883 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14884 mips32_op
= OPC_TEQI
;
14886 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14891 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14892 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14893 4, rs
, 0, imm
<< 1, 0);
14894 /* Compact branches don't have a delay slot, so just let
14895 the normal delay slot handling take us to the branch
14899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14900 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14903 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14904 /* Break the TB to be able to sync copied instructions
14906 ctx
->base
.is_jmp
= DISAS_STOP
;
14910 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14911 /* COP2: Not implemented. */
14912 generate_exception_err(ctx
, EXCP_CpU
, 2);
14915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14916 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14919 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14920 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14923 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14924 mips32_op
= OPC_BC1FANY4
;
14927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14928 mips32_op
= OPC_BC1TANY4
;
14931 check_insn(ctx
, ASE_MIPS3D
);
14934 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14935 check_cp1_enabled(ctx
);
14936 gen_compute_branch1(ctx
, mips32_op
,
14937 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14939 generate_exception_err(ctx
, EXCP_CpU
, 1);
14944 /* MIPS DSP: not implemented */
14947 MIPS_INVAL("pool32i");
14948 generate_exception_end(ctx
, EXCP_RI
);
14953 minor
= (ctx
->opcode
>> 12) & 0xf;
14954 offset
= sextract32(ctx
->opcode
, 0,
14955 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14958 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14959 mips32_op
= OPC_LWL
;
14962 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14963 mips32_op
= OPC_SWL
;
14966 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14967 mips32_op
= OPC_LWR
;
14970 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14971 mips32_op
= OPC_SWR
;
14973 #if defined(TARGET_MIPS64)
14975 check_insn(ctx
, ISA_MIPS3
);
14976 check_mips_64(ctx
);
14977 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14978 mips32_op
= OPC_LDL
;
14981 check_insn(ctx
, ISA_MIPS3
);
14982 check_mips_64(ctx
);
14983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14984 mips32_op
= OPC_SDL
;
14987 check_insn(ctx
, ISA_MIPS3
);
14988 check_mips_64(ctx
);
14989 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14990 mips32_op
= OPC_LDR
;
14993 check_insn(ctx
, ISA_MIPS3
);
14994 check_mips_64(ctx
);
14995 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14996 mips32_op
= OPC_SDR
;
14999 check_insn(ctx
, ISA_MIPS3
);
15000 check_mips_64(ctx
);
15001 mips32_op
= OPC_LWU
;
15004 check_insn(ctx
, ISA_MIPS3
);
15005 check_mips_64(ctx
);
15006 mips32_op
= OPC_LLD
;
15010 mips32_op
= OPC_LL
;
15013 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
15016 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
15019 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
15021 #if defined(TARGET_MIPS64)
15023 check_insn(ctx
, ISA_MIPS3
);
15024 check_mips_64(ctx
);
15025 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
15030 MIPS_INVAL("pool32c ld-eva");
15031 generate_exception_end(ctx
, EXCP_RI
);
15034 check_cp0_enabled(ctx
);
15036 minor2
= (ctx
->opcode
>> 9) & 0x7;
15037 offset
= sextract32(ctx
->opcode
, 0, 9);
15040 mips32_op
= OPC_LBUE
;
15043 mips32_op
= OPC_LHUE
;
15046 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15047 mips32_op
= OPC_LWLE
;
15050 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15051 mips32_op
= OPC_LWRE
;
15054 mips32_op
= OPC_LBE
;
15057 mips32_op
= OPC_LHE
;
15060 mips32_op
= OPC_LLE
;
15063 mips32_op
= OPC_LWE
;
15069 MIPS_INVAL("pool32c st-eva");
15070 generate_exception_end(ctx
, EXCP_RI
);
15073 check_cp0_enabled(ctx
);
15075 minor2
= (ctx
->opcode
>> 9) & 0x7;
15076 offset
= sextract32(ctx
->opcode
, 0, 9);
15079 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15080 mips32_op
= OPC_SWLE
;
15083 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15084 mips32_op
= OPC_SWRE
;
15087 /* Treat as no-op */
15088 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15089 /* hint codes 24-31 are reserved and signal RI */
15090 generate_exception(ctx
, EXCP_RI
);
15094 /* Treat as no-op */
15095 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15096 gen_cache_operation(ctx
, rt
, rs
, offset
);
15100 mips32_op
= OPC_SBE
;
15103 mips32_op
= OPC_SHE
;
15106 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
15109 mips32_op
= OPC_SWE
;
15114 /* Treat as no-op */
15115 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15116 /* hint codes 24-31 are reserved and signal RI */
15117 generate_exception(ctx
, EXCP_RI
);
15121 MIPS_INVAL("pool32c");
15122 generate_exception_end(ctx
, EXCP_RI
);
15126 case ADDI32
: /* AUI, LUI */
15127 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15129 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15132 mips32_op
= OPC_ADDI
;
15137 mips32_op
= OPC_ADDIU
;
15139 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15142 /* Logical operations */
15144 mips32_op
= OPC_ORI
;
15147 mips32_op
= OPC_XORI
;
15150 mips32_op
= OPC_ANDI
;
15152 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15155 /* Set less than immediate */
15157 mips32_op
= OPC_SLTI
;
15160 mips32_op
= OPC_SLTIU
;
15162 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15165 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15166 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15167 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15168 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15170 case JALS32
: /* BOVC, BEQC, BEQZALC */
15171 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15174 mips32_op
= OPC_BOVC
;
15175 } else if (rs
< rt
&& rs
== 0) {
15177 mips32_op
= OPC_BEQZALC
;
15180 mips32_op
= OPC_BEQC
;
15182 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15185 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15186 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15187 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15190 case BEQ32
: /* BC */
15191 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15193 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15194 sextract32(ctx
->opcode
<< 1, 0, 27));
15197 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15200 case BNE32
: /* BALC */
15201 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15203 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15204 sextract32(ctx
->opcode
<< 1, 0, 27));
15207 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15210 case J32
: /* BGTZC, BLTZC, BLTC */
15211 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15212 if (rs
== 0 && rt
!= 0) {
15214 mips32_op
= OPC_BGTZC
;
15215 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15217 mips32_op
= OPC_BLTZC
;
15220 mips32_op
= OPC_BLTC
;
15222 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15225 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15226 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15229 case JAL32
: /* BLEZC, BGEZC, BGEC */
15230 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15231 if (rs
== 0 && rt
!= 0) {
15233 mips32_op
= OPC_BLEZC
;
15234 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15236 mips32_op
= OPC_BGEZC
;
15239 mips32_op
= OPC_BGEC
;
15241 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15244 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15245 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15246 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15249 /* Floating point (COP1) */
15251 mips32_op
= OPC_LWC1
;
15254 mips32_op
= OPC_LDC1
;
15257 mips32_op
= OPC_SWC1
;
15260 mips32_op
= OPC_SDC1
;
15262 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15264 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15265 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15266 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15267 switch ((ctx
->opcode
>> 16) & 0x1f) {
15276 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15279 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
15282 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
15292 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15295 generate_exception(ctx
, EXCP_RI
);
15300 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15301 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15303 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15306 case BNVC
: /* BNEC, BNEZALC */
15307 check_insn(ctx
, ISA_MIPS32R6
);
15310 mips32_op
= OPC_BNVC
;
15311 } else if (rs
< rt
&& rs
== 0) {
15313 mips32_op
= OPC_BNEZALC
;
15316 mips32_op
= OPC_BNEC
;
15318 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15320 case R6_BNEZC
: /* JIALC */
15321 check_insn(ctx
, ISA_MIPS32R6
);
15324 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15325 sextract32(ctx
->opcode
<< 1, 0, 22));
15328 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15331 case R6_BEQZC
: /* JIC */
15332 check_insn(ctx
, ISA_MIPS32R6
);
15335 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15336 sextract32(ctx
->opcode
<< 1, 0, 22));
15339 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15342 case BLEZALC
: /* BGEZALC, BGEUC */
15343 check_insn(ctx
, ISA_MIPS32R6
);
15344 if (rs
== 0 && rt
!= 0) {
15346 mips32_op
= OPC_BLEZALC
;
15347 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15349 mips32_op
= OPC_BGEZALC
;
15352 mips32_op
= OPC_BGEUC
;
15354 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15356 case BGTZALC
: /* BLTZALC, BLTUC */
15357 check_insn(ctx
, ISA_MIPS32R6
);
15358 if (rs
== 0 && rt
!= 0) {
15360 mips32_op
= OPC_BGTZALC
;
15361 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15363 mips32_op
= OPC_BLTZALC
;
15366 mips32_op
= OPC_BLTUC
;
15368 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15370 /* Loads and stores */
15372 mips32_op
= OPC_LB
;
15375 mips32_op
= OPC_LBU
;
15378 mips32_op
= OPC_LH
;
15381 mips32_op
= OPC_LHU
;
15384 mips32_op
= OPC_LW
;
15386 #ifdef TARGET_MIPS64
15388 check_insn(ctx
, ISA_MIPS3
);
15389 check_mips_64(ctx
);
15390 mips32_op
= OPC_LD
;
15393 check_insn(ctx
, ISA_MIPS3
);
15394 check_mips_64(ctx
);
15395 mips32_op
= OPC_SD
;
15399 mips32_op
= OPC_SB
;
15402 mips32_op
= OPC_SH
;
15405 mips32_op
= OPC_SW
;
15408 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15411 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15414 generate_exception_end(ctx
, EXCP_RI
);
15419 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15423 /* make sure instructions are on a halfword boundary */
15424 if (ctx
->base
.pc_next
& 0x1) {
15425 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15426 generate_exception_end(ctx
, EXCP_AdEL
);
15430 op
= (ctx
->opcode
>> 10) & 0x3f;
15431 /* Enforce properly-sized instructions in a delay slot */
15432 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15433 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15435 /* POOL32A, POOL32B, POOL32I, POOL32C */
15437 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15439 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15441 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15443 /* LB32, LH32, LWC132, LDC132, LW32 */
15444 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15445 generate_exception_end(ctx
, EXCP_RI
);
15450 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15452 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15454 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15455 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15456 generate_exception_end(ctx
, EXCP_RI
);
15466 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15467 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15468 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15471 switch (ctx
->opcode
& 0x1) {
15479 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15480 /* In the Release 6 the register number location in
15481 * the instruction encoding has changed.
15483 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15485 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15491 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15492 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15493 int amount
= (ctx
->opcode
>> 1) & 0x7;
15495 amount
= amount
== 0 ? 8 : amount
;
15497 switch (ctx
->opcode
& 0x1) {
15506 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15510 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15511 gen_pool16c_r6_insn(ctx
);
15513 gen_pool16c_insn(ctx
);
15518 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15519 int rb
= 28; /* GP */
15520 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15522 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15527 if (ctx
->opcode
& 1) {
15528 generate_exception_end(ctx
, EXCP_RI
);
15531 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15532 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15533 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15534 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15539 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15540 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15541 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15542 offset
= (offset
== 0xf ? -1 : offset
);
15544 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15549 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15550 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15551 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15553 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15558 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15559 int rb
= 29; /* SP */
15560 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15562 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15567 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15568 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15569 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15571 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15576 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15577 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15578 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15580 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15585 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15586 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15587 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15589 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15594 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15595 int rb
= 29; /* SP */
15596 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15598 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15603 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15604 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15605 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15607 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15612 int rd
= uMIPS_RD5(ctx
->opcode
);
15613 int rs
= uMIPS_RS5(ctx
->opcode
);
15615 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15622 switch (ctx
->opcode
& 0x1) {
15632 switch (ctx
->opcode
& 0x1) {
15637 gen_addiur1sp(ctx
);
15641 case B16
: /* BC16 */
15642 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15643 sextract32(ctx
->opcode
, 0, 10) << 1,
15644 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15646 case BNEZ16
: /* BNEZC16 */
15647 case BEQZ16
: /* BEQZC16 */
15648 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15649 mmreg(uMIPS_RD(ctx
->opcode
)),
15650 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15651 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15656 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15657 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15659 imm
= (imm
== 0x7f ? -1 : imm
);
15660 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15666 generate_exception_end(ctx
, EXCP_RI
);
15669 decode_micromips32_opc(env
, ctx
);
15676 /* SmartMIPS extension to MIPS32 */
15678 #if defined(TARGET_MIPS64)
15680 /* MDMX extension to MIPS64 */
15684 /* MIPSDSP functions. */
15685 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15686 int rd
, int base
, int offset
)
15691 t0
= tcg_temp_new();
15694 gen_load_gpr(t0
, offset
);
15695 } else if (offset
== 0) {
15696 gen_load_gpr(t0
, base
);
15698 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15703 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15704 gen_store_gpr(t0
, rd
);
15707 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15708 gen_store_gpr(t0
, rd
);
15711 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15712 gen_store_gpr(t0
, rd
);
15714 #if defined(TARGET_MIPS64)
15716 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15717 gen_store_gpr(t0
, rd
);
15724 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15725 int ret
, int v1
, int v2
)
15731 /* Treat as NOP. */
15735 v1_t
= tcg_temp_new();
15736 v2_t
= tcg_temp_new();
15738 gen_load_gpr(v1_t
, v1
);
15739 gen_load_gpr(v2_t
, v2
);
15742 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15743 case OPC_MULT_G_2E
:
15747 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15749 case OPC_ADDUH_R_QB
:
15750 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15753 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15755 case OPC_ADDQH_R_PH
:
15756 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15759 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15761 case OPC_ADDQH_R_W
:
15762 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15765 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15767 case OPC_SUBUH_R_QB
:
15768 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15771 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15773 case OPC_SUBQH_R_PH
:
15774 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15777 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15779 case OPC_SUBQH_R_W
:
15780 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15784 case OPC_ABSQ_S_PH_DSP
:
15786 case OPC_ABSQ_S_QB
:
15788 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15790 case OPC_ABSQ_S_PH
:
15792 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15796 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15798 case OPC_PRECEQ_W_PHL
:
15800 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15801 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15803 case OPC_PRECEQ_W_PHR
:
15805 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15806 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15807 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15809 case OPC_PRECEQU_PH_QBL
:
15811 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15813 case OPC_PRECEQU_PH_QBR
:
15815 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15817 case OPC_PRECEQU_PH_QBLA
:
15819 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15821 case OPC_PRECEQU_PH_QBRA
:
15823 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15825 case OPC_PRECEU_PH_QBL
:
15827 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15829 case OPC_PRECEU_PH_QBR
:
15831 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15833 case OPC_PRECEU_PH_QBLA
:
15835 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15837 case OPC_PRECEU_PH_QBRA
:
15839 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15843 case OPC_ADDU_QB_DSP
:
15847 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15849 case OPC_ADDQ_S_PH
:
15851 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15855 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15859 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15861 case OPC_ADDU_S_QB
:
15863 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15867 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15869 case OPC_ADDU_S_PH
:
15871 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15875 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15877 case OPC_SUBQ_S_PH
:
15879 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15883 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15887 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15889 case OPC_SUBU_S_QB
:
15891 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15895 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15897 case OPC_SUBU_S_PH
:
15899 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15903 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15907 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15911 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15913 case OPC_RADDU_W_QB
:
15915 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15919 case OPC_CMPU_EQ_QB_DSP
:
15921 case OPC_PRECR_QB_PH
:
15923 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15925 case OPC_PRECRQ_QB_PH
:
15927 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15929 case OPC_PRECR_SRA_PH_W
:
15932 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15933 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15935 tcg_temp_free_i32(sa_t
);
15938 case OPC_PRECR_SRA_R_PH_W
:
15941 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15942 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15944 tcg_temp_free_i32(sa_t
);
15947 case OPC_PRECRQ_PH_W
:
15949 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15951 case OPC_PRECRQ_RS_PH_W
:
15953 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15955 case OPC_PRECRQU_S_QB_PH
:
15957 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15961 #ifdef TARGET_MIPS64
15962 case OPC_ABSQ_S_QH_DSP
:
15964 case OPC_PRECEQ_L_PWL
:
15966 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15968 case OPC_PRECEQ_L_PWR
:
15970 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15972 case OPC_PRECEQ_PW_QHL
:
15974 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15976 case OPC_PRECEQ_PW_QHR
:
15978 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15980 case OPC_PRECEQ_PW_QHLA
:
15982 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15984 case OPC_PRECEQ_PW_QHRA
:
15986 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15988 case OPC_PRECEQU_QH_OBL
:
15990 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15992 case OPC_PRECEQU_QH_OBR
:
15994 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15996 case OPC_PRECEQU_QH_OBLA
:
15998 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
16000 case OPC_PRECEQU_QH_OBRA
:
16002 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
16004 case OPC_PRECEU_QH_OBL
:
16006 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
16008 case OPC_PRECEU_QH_OBR
:
16010 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
16012 case OPC_PRECEU_QH_OBLA
:
16014 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
16016 case OPC_PRECEU_QH_OBRA
:
16018 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
16020 case OPC_ABSQ_S_OB
:
16022 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
16024 case OPC_ABSQ_S_PW
:
16026 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
16028 case OPC_ABSQ_S_QH
:
16030 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
16034 case OPC_ADDU_OB_DSP
:
16036 case OPC_RADDU_L_OB
:
16038 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
16042 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16044 case OPC_SUBQ_S_PW
:
16046 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16050 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16052 case OPC_SUBQ_S_QH
:
16054 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16058 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16060 case OPC_SUBU_S_OB
:
16062 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16066 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16068 case OPC_SUBU_S_QH
:
16070 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16074 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16076 case OPC_SUBUH_R_OB
:
16078 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16082 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16084 case OPC_ADDQ_S_PW
:
16086 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16090 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16092 case OPC_ADDQ_S_QH
:
16094 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16098 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16100 case OPC_ADDU_S_OB
:
16102 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16106 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16108 case OPC_ADDU_S_QH
:
16110 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16114 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16116 case OPC_ADDUH_R_OB
:
16118 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16122 case OPC_CMPU_EQ_OB_DSP
:
16124 case OPC_PRECR_OB_QH
:
16126 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16128 case OPC_PRECR_SRA_QH_PW
:
16131 TCGv_i32 ret_t
= tcg_const_i32(ret
);
16132 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
16133 tcg_temp_free_i32(ret_t
);
16136 case OPC_PRECR_SRA_R_QH_PW
:
16139 TCGv_i32 sa_v
= tcg_const_i32(ret
);
16140 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
16141 tcg_temp_free_i32(sa_v
);
16144 case OPC_PRECRQ_OB_QH
:
16146 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16148 case OPC_PRECRQ_PW_L
:
16150 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
16152 case OPC_PRECRQ_QH_PW
:
16154 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16156 case OPC_PRECRQ_RS_QH_PW
:
16158 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16160 case OPC_PRECRQU_S_OB_QH
:
16162 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16169 tcg_temp_free(v1_t
);
16170 tcg_temp_free(v2_t
);
16173 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16174 int ret
, int v1
, int v2
)
16182 /* Treat as NOP. */
16186 t0
= tcg_temp_new();
16187 v1_t
= tcg_temp_new();
16188 v2_t
= tcg_temp_new();
16190 tcg_gen_movi_tl(t0
, v1
);
16191 gen_load_gpr(v1_t
, v1
);
16192 gen_load_gpr(v2_t
, v2
);
16195 case OPC_SHLL_QB_DSP
:
16197 op2
= MASK_SHLL_QB(ctx
->opcode
);
16201 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16205 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16209 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16213 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16215 case OPC_SHLL_S_PH
:
16217 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16219 case OPC_SHLLV_S_PH
:
16221 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16225 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16227 case OPC_SHLLV_S_W
:
16229 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16233 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16237 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16241 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16245 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16249 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16251 case OPC_SHRA_R_QB
:
16253 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16257 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16259 case OPC_SHRAV_R_QB
:
16261 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16265 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16267 case OPC_SHRA_R_PH
:
16269 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16273 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16275 case OPC_SHRAV_R_PH
:
16277 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16281 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16283 case OPC_SHRAV_R_W
:
16285 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16287 default: /* Invalid */
16288 MIPS_INVAL("MASK SHLL.QB");
16289 generate_exception_end(ctx
, EXCP_RI
);
16294 #ifdef TARGET_MIPS64
16295 case OPC_SHLL_OB_DSP
:
16296 op2
= MASK_SHLL_OB(ctx
->opcode
);
16300 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16304 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16306 case OPC_SHLL_S_PW
:
16308 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16310 case OPC_SHLLV_S_PW
:
16312 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16316 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16320 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16324 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16328 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16330 case OPC_SHLL_S_QH
:
16332 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16334 case OPC_SHLLV_S_QH
:
16336 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16340 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16344 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16346 case OPC_SHRA_R_OB
:
16348 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16350 case OPC_SHRAV_R_OB
:
16352 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16356 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16360 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16362 case OPC_SHRA_R_PW
:
16364 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16366 case OPC_SHRAV_R_PW
:
16368 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16372 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16376 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16378 case OPC_SHRA_R_QH
:
16380 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16382 case OPC_SHRAV_R_QH
:
16384 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16388 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16392 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16396 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16400 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16402 default: /* Invalid */
16403 MIPS_INVAL("MASK SHLL.OB");
16404 generate_exception_end(ctx
, EXCP_RI
);
16412 tcg_temp_free(v1_t
);
16413 tcg_temp_free(v2_t
);
16416 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16417 int ret
, int v1
, int v2
, int check_ret
)
16423 if ((ret
== 0) && (check_ret
== 1)) {
16424 /* Treat as NOP. */
16428 t0
= tcg_temp_new_i32();
16429 v1_t
= tcg_temp_new();
16430 v2_t
= tcg_temp_new();
16432 tcg_gen_movi_i32(t0
, ret
);
16433 gen_load_gpr(v1_t
, v1
);
16434 gen_load_gpr(v2_t
, v2
);
16437 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16438 * the same mask and op1. */
16439 case OPC_MULT_G_2E
:
16443 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16446 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16449 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16451 case OPC_MULQ_RS_W
:
16452 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16456 case OPC_DPA_W_PH_DSP
:
16458 case OPC_DPAU_H_QBL
:
16460 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16462 case OPC_DPAU_H_QBR
:
16464 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16466 case OPC_DPSU_H_QBL
:
16468 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16470 case OPC_DPSU_H_QBR
:
16472 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16476 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16478 case OPC_DPAX_W_PH
:
16480 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16482 case OPC_DPAQ_S_W_PH
:
16484 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16486 case OPC_DPAQX_S_W_PH
:
16488 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16490 case OPC_DPAQX_SA_W_PH
:
16492 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16496 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16498 case OPC_DPSX_W_PH
:
16500 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16502 case OPC_DPSQ_S_W_PH
:
16504 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16506 case OPC_DPSQX_S_W_PH
:
16508 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16510 case OPC_DPSQX_SA_W_PH
:
16512 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16514 case OPC_MULSAQ_S_W_PH
:
16516 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16518 case OPC_DPAQ_SA_L_W
:
16520 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16522 case OPC_DPSQ_SA_L_W
:
16524 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16526 case OPC_MAQ_S_W_PHL
:
16528 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16530 case OPC_MAQ_S_W_PHR
:
16532 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16534 case OPC_MAQ_SA_W_PHL
:
16536 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16538 case OPC_MAQ_SA_W_PHR
:
16540 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16542 case OPC_MULSA_W_PH
:
16544 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16548 #ifdef TARGET_MIPS64
16549 case OPC_DPAQ_W_QH_DSP
:
16551 int ac
= ret
& 0x03;
16552 tcg_gen_movi_i32(t0
, ac
);
16557 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16561 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16565 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16569 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16573 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16575 case OPC_DPAQ_S_W_QH
:
16577 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16579 case OPC_DPAQ_SA_L_PW
:
16581 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16583 case OPC_DPAU_H_OBL
:
16585 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16587 case OPC_DPAU_H_OBR
:
16589 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16593 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16595 case OPC_DPSQ_S_W_QH
:
16597 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16599 case OPC_DPSQ_SA_L_PW
:
16601 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16603 case OPC_DPSU_H_OBL
:
16605 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16607 case OPC_DPSU_H_OBR
:
16609 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16611 case OPC_MAQ_S_L_PWL
:
16613 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16615 case OPC_MAQ_S_L_PWR
:
16617 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16619 case OPC_MAQ_S_W_QHLL
:
16621 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16623 case OPC_MAQ_SA_W_QHLL
:
16625 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16627 case OPC_MAQ_S_W_QHLR
:
16629 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16631 case OPC_MAQ_SA_W_QHLR
:
16633 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16635 case OPC_MAQ_S_W_QHRL
:
16637 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16639 case OPC_MAQ_SA_W_QHRL
:
16641 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16643 case OPC_MAQ_S_W_QHRR
:
16645 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16647 case OPC_MAQ_SA_W_QHRR
:
16649 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16651 case OPC_MULSAQ_S_L_PW
:
16653 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16655 case OPC_MULSAQ_S_W_QH
:
16657 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16663 case OPC_ADDU_QB_DSP
:
16665 case OPC_MULEU_S_PH_QBL
:
16667 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16669 case OPC_MULEU_S_PH_QBR
:
16671 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16673 case OPC_MULQ_RS_PH
:
16675 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16677 case OPC_MULEQ_S_W_PHL
:
16679 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16681 case OPC_MULEQ_S_W_PHR
:
16683 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16685 case OPC_MULQ_S_PH
:
16687 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16691 #ifdef TARGET_MIPS64
16692 case OPC_ADDU_OB_DSP
:
16694 case OPC_MULEQ_S_PW_QHL
:
16696 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16698 case OPC_MULEQ_S_PW_QHR
:
16700 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16702 case OPC_MULEU_S_QH_OBL
:
16704 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16706 case OPC_MULEU_S_QH_OBR
:
16708 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16710 case OPC_MULQ_RS_QH
:
16712 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16719 tcg_temp_free_i32(t0
);
16720 tcg_temp_free(v1_t
);
16721 tcg_temp_free(v2_t
);
16724 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16732 /* Treat as NOP. */
16736 t0
= tcg_temp_new();
16737 val_t
= tcg_temp_new();
16738 gen_load_gpr(val_t
, val
);
16741 case OPC_ABSQ_S_PH_DSP
:
16745 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16750 target_long result
;
16751 imm
= (ctx
->opcode
>> 16) & 0xFF;
16752 result
= (uint32_t)imm
<< 24 |
16753 (uint32_t)imm
<< 16 |
16754 (uint32_t)imm
<< 8 |
16756 result
= (int32_t)result
;
16757 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16762 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16763 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16764 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16765 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16766 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16767 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16772 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16773 imm
= (int16_t)(imm
<< 6) >> 6;
16774 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16775 (target_long
)((int32_t)imm
<< 16 | \
16781 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16782 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16783 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16784 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16788 #ifdef TARGET_MIPS64
16789 case OPC_ABSQ_S_QH_DSP
:
16796 imm
= (ctx
->opcode
>> 16) & 0xFF;
16797 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16798 temp
= (temp
<< 16) | temp
;
16799 temp
= (temp
<< 32) | temp
;
16800 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16808 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16809 imm
= (int16_t)(imm
<< 6) >> 6;
16810 temp
= ((target_long
)imm
<< 32) \
16811 | ((target_long
)imm
& 0xFFFFFFFF);
16812 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16820 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16821 imm
= (int16_t)(imm
<< 6) >> 6;
16823 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16824 ((uint64_t)(uint16_t)imm
<< 32) |
16825 ((uint64_t)(uint16_t)imm
<< 16) |
16826 (uint64_t)(uint16_t)imm
;
16827 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16832 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16833 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16834 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16835 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16836 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16837 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16838 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16842 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16843 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16844 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16848 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16849 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16850 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16851 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16852 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16859 tcg_temp_free(val_t
);
16862 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16863 uint32_t op1
, uint32_t op2
,
16864 int ret
, int v1
, int v2
, int check_ret
)
16870 if ((ret
== 0) && (check_ret
== 1)) {
16871 /* Treat as NOP. */
16875 t1
= tcg_temp_new();
16876 v1_t
= tcg_temp_new();
16877 v2_t
= tcg_temp_new();
16879 gen_load_gpr(v1_t
, v1
);
16880 gen_load_gpr(v2_t
, v2
);
16883 case OPC_CMPU_EQ_QB_DSP
:
16885 case OPC_CMPU_EQ_QB
:
16887 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16889 case OPC_CMPU_LT_QB
:
16891 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16893 case OPC_CMPU_LE_QB
:
16895 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16897 case OPC_CMPGU_EQ_QB
:
16899 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16901 case OPC_CMPGU_LT_QB
:
16903 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16905 case OPC_CMPGU_LE_QB
:
16907 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16909 case OPC_CMPGDU_EQ_QB
:
16911 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16912 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16913 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16914 tcg_gen_shli_tl(t1
, t1
, 24);
16915 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16917 case OPC_CMPGDU_LT_QB
:
16919 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16920 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16921 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16922 tcg_gen_shli_tl(t1
, t1
, 24);
16923 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16925 case OPC_CMPGDU_LE_QB
:
16927 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16928 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16929 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16930 tcg_gen_shli_tl(t1
, t1
, 24);
16931 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16933 case OPC_CMP_EQ_PH
:
16935 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16937 case OPC_CMP_LT_PH
:
16939 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16941 case OPC_CMP_LE_PH
:
16943 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16947 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16951 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16953 case OPC_PACKRL_PH
:
16955 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16959 #ifdef TARGET_MIPS64
16960 case OPC_CMPU_EQ_OB_DSP
:
16962 case OPC_CMP_EQ_PW
:
16964 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16966 case OPC_CMP_LT_PW
:
16968 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16970 case OPC_CMP_LE_PW
:
16972 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16974 case OPC_CMP_EQ_QH
:
16976 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16978 case OPC_CMP_LT_QH
:
16980 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16982 case OPC_CMP_LE_QH
:
16984 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16986 case OPC_CMPGDU_EQ_OB
:
16988 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16990 case OPC_CMPGDU_LT_OB
:
16992 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16994 case OPC_CMPGDU_LE_OB
:
16996 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16998 case OPC_CMPGU_EQ_OB
:
17000 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
17002 case OPC_CMPGU_LT_OB
:
17004 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
17006 case OPC_CMPGU_LE_OB
:
17008 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
17010 case OPC_CMPU_EQ_OB
:
17012 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
17014 case OPC_CMPU_LT_OB
:
17016 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
17018 case OPC_CMPU_LE_OB
:
17020 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
17022 case OPC_PACKRL_PW
:
17024 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
17028 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
17032 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
17036 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
17044 tcg_temp_free(v1_t
);
17045 tcg_temp_free(v2_t
);
17048 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
17049 uint32_t op1
, int rt
, int rs
, int sa
)
17056 /* Treat as NOP. */
17060 t0
= tcg_temp_new();
17061 gen_load_gpr(t0
, rs
);
17064 case OPC_APPEND_DSP
:
17065 switch (MASK_APPEND(ctx
->opcode
)) {
17068 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
17070 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
17074 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
17075 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
17076 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
17077 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
17079 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
17083 if (sa
!= 0 && sa
!= 2) {
17084 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
17085 tcg_gen_ext32u_tl(t0
, t0
);
17086 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
17087 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
17089 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
17091 default: /* Invalid */
17092 MIPS_INVAL("MASK APPEND");
17093 generate_exception_end(ctx
, EXCP_RI
);
17097 #ifdef TARGET_MIPS64
17098 case OPC_DAPPEND_DSP
:
17099 switch (MASK_DAPPEND(ctx
->opcode
)) {
17102 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
17106 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
17107 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
17108 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
17112 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
17113 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
17114 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
17119 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
17120 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
17121 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
17122 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
17125 default: /* Invalid */
17126 MIPS_INVAL("MASK DAPPEND");
17127 generate_exception_end(ctx
, EXCP_RI
);
17136 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
17137 int ret
, int v1
, int v2
, int check_ret
)
17146 if ((ret
== 0) && (check_ret
== 1)) {
17147 /* Treat as NOP. */
17151 t0
= tcg_temp_new();
17152 t1
= tcg_temp_new();
17153 v1_t
= tcg_temp_new();
17154 v2_t
= tcg_temp_new();
17156 gen_load_gpr(v1_t
, v1
);
17157 gen_load_gpr(v2_t
, v2
);
17160 case OPC_EXTR_W_DSP
:
17164 tcg_gen_movi_tl(t0
, v2
);
17165 tcg_gen_movi_tl(t1
, v1
);
17166 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17169 tcg_gen_movi_tl(t0
, v2
);
17170 tcg_gen_movi_tl(t1
, v1
);
17171 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17173 case OPC_EXTR_RS_W
:
17174 tcg_gen_movi_tl(t0
, v2
);
17175 tcg_gen_movi_tl(t1
, v1
);
17176 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17179 tcg_gen_movi_tl(t0
, v2
);
17180 tcg_gen_movi_tl(t1
, v1
);
17181 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17183 case OPC_EXTRV_S_H
:
17184 tcg_gen_movi_tl(t0
, v2
);
17185 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17188 tcg_gen_movi_tl(t0
, v2
);
17189 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17191 case OPC_EXTRV_R_W
:
17192 tcg_gen_movi_tl(t0
, v2
);
17193 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17195 case OPC_EXTRV_RS_W
:
17196 tcg_gen_movi_tl(t0
, v2
);
17197 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17200 tcg_gen_movi_tl(t0
, v2
);
17201 tcg_gen_movi_tl(t1
, v1
);
17202 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17205 tcg_gen_movi_tl(t0
, v2
);
17206 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17209 tcg_gen_movi_tl(t0
, v2
);
17210 tcg_gen_movi_tl(t1
, v1
);
17211 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17214 tcg_gen_movi_tl(t0
, v2
);
17215 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17218 imm
= (ctx
->opcode
>> 20) & 0x3F;
17219 tcg_gen_movi_tl(t0
, ret
);
17220 tcg_gen_movi_tl(t1
, imm
);
17221 gen_helper_shilo(t0
, t1
, cpu_env
);
17224 tcg_gen_movi_tl(t0
, ret
);
17225 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17228 tcg_gen_movi_tl(t0
, ret
);
17229 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17232 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17233 tcg_gen_movi_tl(t0
, imm
);
17234 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17237 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17238 tcg_gen_movi_tl(t0
, imm
);
17239 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17243 #ifdef TARGET_MIPS64
17244 case OPC_DEXTR_W_DSP
:
17248 tcg_gen_movi_tl(t0
, ret
);
17249 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17253 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17254 int ac
= (ctx
->opcode
>> 11) & 0x03;
17255 tcg_gen_movi_tl(t0
, shift
);
17256 tcg_gen_movi_tl(t1
, ac
);
17257 gen_helper_dshilo(t0
, t1
, cpu_env
);
17262 int ac
= (ctx
->opcode
>> 11) & 0x03;
17263 tcg_gen_movi_tl(t0
, ac
);
17264 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17268 tcg_gen_movi_tl(t0
, v2
);
17269 tcg_gen_movi_tl(t1
, v1
);
17271 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17274 tcg_gen_movi_tl(t0
, v2
);
17275 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17278 tcg_gen_movi_tl(t0
, v2
);
17279 tcg_gen_movi_tl(t1
, v1
);
17280 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17283 tcg_gen_movi_tl(t0
, v2
);
17284 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17287 tcg_gen_movi_tl(t0
, v2
);
17288 tcg_gen_movi_tl(t1
, v1
);
17289 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17291 case OPC_DEXTR_R_L
:
17292 tcg_gen_movi_tl(t0
, v2
);
17293 tcg_gen_movi_tl(t1
, v1
);
17294 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17296 case OPC_DEXTR_RS_L
:
17297 tcg_gen_movi_tl(t0
, v2
);
17298 tcg_gen_movi_tl(t1
, v1
);
17299 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17302 tcg_gen_movi_tl(t0
, v2
);
17303 tcg_gen_movi_tl(t1
, v1
);
17304 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17306 case OPC_DEXTR_R_W
:
17307 tcg_gen_movi_tl(t0
, v2
);
17308 tcg_gen_movi_tl(t1
, v1
);
17309 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17311 case OPC_DEXTR_RS_W
:
17312 tcg_gen_movi_tl(t0
, v2
);
17313 tcg_gen_movi_tl(t1
, v1
);
17314 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17316 case OPC_DEXTR_S_H
:
17317 tcg_gen_movi_tl(t0
, v2
);
17318 tcg_gen_movi_tl(t1
, v1
);
17319 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17321 case OPC_DEXTRV_S_H
:
17322 tcg_gen_movi_tl(t0
, v2
);
17323 tcg_gen_movi_tl(t1
, v1
);
17324 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17327 tcg_gen_movi_tl(t0
, v2
);
17328 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17330 case OPC_DEXTRV_R_L
:
17331 tcg_gen_movi_tl(t0
, v2
);
17332 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17334 case OPC_DEXTRV_RS_L
:
17335 tcg_gen_movi_tl(t0
, v2
);
17336 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17339 tcg_gen_movi_tl(t0
, v2
);
17340 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17342 case OPC_DEXTRV_R_W
:
17343 tcg_gen_movi_tl(t0
, v2
);
17344 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17346 case OPC_DEXTRV_RS_W
:
17347 tcg_gen_movi_tl(t0
, v2
);
17348 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17357 tcg_temp_free(v1_t
);
17358 tcg_temp_free(v2_t
);
17361 /* End MIPSDSP functions. */
17363 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17365 int rs
, rt
, rd
, sa
;
17368 rs
= (ctx
->opcode
>> 21) & 0x1f;
17369 rt
= (ctx
->opcode
>> 16) & 0x1f;
17370 rd
= (ctx
->opcode
>> 11) & 0x1f;
17371 sa
= (ctx
->opcode
>> 6) & 0x1f;
17373 op1
= MASK_SPECIAL(ctx
->opcode
);
17376 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17382 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17392 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17395 MIPS_INVAL("special_r6 muldiv");
17396 generate_exception_end(ctx
, EXCP_RI
);
17402 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17406 if (rt
== 0 && sa
== 1) {
17407 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17408 We need additionally to check other fields */
17409 gen_cl(ctx
, op1
, rd
, rs
);
17411 generate_exception_end(ctx
, EXCP_RI
);
17415 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17416 gen_helper_do_semihosting(cpu_env
);
17418 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17419 generate_exception_end(ctx
, EXCP_RI
);
17421 generate_exception_end(ctx
, EXCP_DBp
);
17425 #if defined(TARGET_MIPS64)
17427 check_mips_64(ctx
);
17428 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17432 if (rt
== 0 && sa
== 1) {
17433 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17434 We need additionally to check other fields */
17435 check_mips_64(ctx
);
17436 gen_cl(ctx
, op1
, rd
, rs
);
17438 generate_exception_end(ctx
, EXCP_RI
);
17446 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17456 check_mips_64(ctx
);
17457 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17460 MIPS_INVAL("special_r6 muldiv");
17461 generate_exception_end(ctx
, EXCP_RI
);
17466 default: /* Invalid */
17467 MIPS_INVAL("special_r6");
17468 generate_exception_end(ctx
, EXCP_RI
);
17473 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17475 int rs
, rt
, rd
, sa
;
17478 rs
= (ctx
->opcode
>> 21) & 0x1f;
17479 rt
= (ctx
->opcode
>> 16) & 0x1f;
17480 rd
= (ctx
->opcode
>> 11) & 0x1f;
17481 sa
= (ctx
->opcode
>> 6) & 0x1f;
17483 op1
= MASK_SPECIAL(ctx
->opcode
);
17485 case OPC_MOVN
: /* Conditional move */
17487 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17488 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17489 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17491 case OPC_MFHI
: /* Move from HI/LO */
17493 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17496 case OPC_MTLO
: /* Move to HI/LO */
17497 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17500 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17501 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17502 check_cp1_enabled(ctx
);
17503 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17504 (ctx
->opcode
>> 16) & 1);
17506 generate_exception_err(ctx
, EXCP_CpU
, 1);
17512 check_insn(ctx
, INSN_VR54XX
);
17513 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17514 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17516 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17521 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17523 #if defined(TARGET_MIPS64)
17528 check_insn(ctx
, ISA_MIPS3
);
17529 check_mips_64(ctx
);
17530 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17534 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17537 #ifdef MIPS_STRICT_STANDARD
17538 MIPS_INVAL("SPIM");
17539 generate_exception_end(ctx
, EXCP_RI
);
17541 /* Implemented as RI exception for now. */
17542 MIPS_INVAL("spim (unofficial)");
17543 generate_exception_end(ctx
, EXCP_RI
);
17546 default: /* Invalid */
17547 MIPS_INVAL("special_legacy");
17548 generate_exception_end(ctx
, EXCP_RI
);
17553 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17555 int rs
, rt
, rd
, sa
;
17558 rs
= (ctx
->opcode
>> 21) & 0x1f;
17559 rt
= (ctx
->opcode
>> 16) & 0x1f;
17560 rd
= (ctx
->opcode
>> 11) & 0x1f;
17561 sa
= (ctx
->opcode
>> 6) & 0x1f;
17563 op1
= MASK_SPECIAL(ctx
->opcode
);
17565 case OPC_SLL
: /* Shift with immediate */
17566 if (sa
== 5 && rd
== 0 &&
17567 rs
== 0 && rt
== 0) { /* PAUSE */
17568 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17569 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17570 generate_exception_end(ctx
, EXCP_RI
);
17576 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17579 switch ((ctx
->opcode
>> 21) & 0x1f) {
17581 /* rotr is decoded as srl on non-R2 CPUs */
17582 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17587 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17590 generate_exception_end(ctx
, EXCP_RI
);
17598 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17600 case OPC_SLLV
: /* Shifts */
17602 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17605 switch ((ctx
->opcode
>> 6) & 0x1f) {
17607 /* rotrv is decoded as srlv on non-R2 CPUs */
17608 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17613 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17616 generate_exception_end(ctx
, EXCP_RI
);
17620 case OPC_SLT
: /* Set on less than */
17622 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17624 case OPC_AND
: /* Logic*/
17628 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17631 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17633 case OPC_TGE
: /* Traps */
17639 check_insn(ctx
, ISA_MIPS2
);
17640 gen_trap(ctx
, op1
, rs
, rt
, -1);
17642 case OPC_LSA
: /* OPC_PMON */
17643 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17644 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17645 decode_opc_special_r6(env
, ctx
);
17647 /* Pmon entry point, also R4010 selsl */
17648 #ifdef MIPS_STRICT_STANDARD
17649 MIPS_INVAL("PMON / selsl");
17650 generate_exception_end(ctx
, EXCP_RI
);
17652 gen_helper_0e0i(pmon
, sa
);
17657 generate_exception_end(ctx
, EXCP_SYSCALL
);
17660 generate_exception_end(ctx
, EXCP_BREAK
);
17663 check_insn(ctx
, ISA_MIPS2
);
17664 gen_sync(extract32(ctx
->opcode
, 6, 5));
17667 #if defined(TARGET_MIPS64)
17668 /* MIPS64 specific opcodes */
17673 check_insn(ctx
, ISA_MIPS3
);
17674 check_mips_64(ctx
);
17675 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17678 switch ((ctx
->opcode
>> 21) & 0x1f) {
17680 /* drotr is decoded as dsrl on non-R2 CPUs */
17681 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17686 check_insn(ctx
, ISA_MIPS3
);
17687 check_mips_64(ctx
);
17688 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17691 generate_exception_end(ctx
, EXCP_RI
);
17696 switch ((ctx
->opcode
>> 21) & 0x1f) {
17698 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17699 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17704 check_insn(ctx
, ISA_MIPS3
);
17705 check_mips_64(ctx
);
17706 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17709 generate_exception_end(ctx
, EXCP_RI
);
17717 check_insn(ctx
, ISA_MIPS3
);
17718 check_mips_64(ctx
);
17719 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17723 check_insn(ctx
, ISA_MIPS3
);
17724 check_mips_64(ctx
);
17725 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17728 switch ((ctx
->opcode
>> 6) & 0x1f) {
17730 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17731 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17736 check_insn(ctx
, ISA_MIPS3
);
17737 check_mips_64(ctx
);
17738 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17741 generate_exception_end(ctx
, EXCP_RI
);
17746 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17747 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17748 decode_opc_special_r6(env
, ctx
);
17753 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17754 decode_opc_special_r6(env
, ctx
);
17756 decode_opc_special_legacy(env
, ctx
);
17761 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17766 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17768 rs
= (ctx
->opcode
>> 21) & 0x1f;
17769 rt
= (ctx
->opcode
>> 16) & 0x1f;
17770 rd
= (ctx
->opcode
>> 11) & 0x1f;
17772 op1
= MASK_SPECIAL2(ctx
->opcode
);
17774 case OPC_MADD
: /* Multiply and add/sub */
17778 check_insn(ctx
, ISA_MIPS32
);
17779 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17782 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17785 case OPC_DIVU_G_2F
:
17786 case OPC_MULT_G_2F
:
17787 case OPC_MULTU_G_2F
:
17789 case OPC_MODU_G_2F
:
17790 check_insn(ctx
, INSN_LOONGSON2F
);
17791 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17795 check_insn(ctx
, ISA_MIPS32
);
17796 gen_cl(ctx
, op1
, rd
, rs
);
17799 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17800 gen_helper_do_semihosting(cpu_env
);
17802 /* XXX: not clear which exception should be raised
17803 * when in debug mode...
17805 check_insn(ctx
, ISA_MIPS32
);
17806 generate_exception_end(ctx
, EXCP_DBp
);
17809 #if defined(TARGET_MIPS64)
17812 check_insn(ctx
, ISA_MIPS64
);
17813 check_mips_64(ctx
);
17814 gen_cl(ctx
, op1
, rd
, rs
);
17816 case OPC_DMULT_G_2F
:
17817 case OPC_DMULTU_G_2F
:
17818 case OPC_DDIV_G_2F
:
17819 case OPC_DDIVU_G_2F
:
17820 case OPC_DMOD_G_2F
:
17821 case OPC_DMODU_G_2F
:
17822 check_insn(ctx
, INSN_LOONGSON2F
);
17823 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17826 default: /* Invalid */
17827 MIPS_INVAL("special2_legacy");
17828 generate_exception_end(ctx
, EXCP_RI
);
17833 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17835 int rs
, rt
, rd
, sa
;
17839 rs
= (ctx
->opcode
>> 21) & 0x1f;
17840 rt
= (ctx
->opcode
>> 16) & 0x1f;
17841 rd
= (ctx
->opcode
>> 11) & 0x1f;
17842 sa
= (ctx
->opcode
>> 6) & 0x1f;
17843 imm
= (int16_t)ctx
->opcode
>> 7;
17845 op1
= MASK_SPECIAL3(ctx
->opcode
);
17849 /* hint codes 24-31 are reserved and signal RI */
17850 generate_exception_end(ctx
, EXCP_RI
);
17852 /* Treat as NOP. */
17855 check_cp0_enabled(ctx
);
17856 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17857 gen_cache_operation(ctx
, rt
, rs
, imm
);
17861 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17864 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17869 /* Treat as NOP. */
17872 op2
= MASK_BSHFL(ctx
->opcode
);
17875 case OPC_ALIGN_END
:
17876 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17879 gen_bitswap(ctx
, op2
, rd
, rt
);
17884 #if defined(TARGET_MIPS64)
17886 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17889 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17892 check_mips_64(ctx
);
17895 /* Treat as NOP. */
17898 op2
= MASK_DBSHFL(ctx
->opcode
);
17901 case OPC_DALIGN_END
:
17902 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17905 gen_bitswap(ctx
, op2
, rd
, rt
);
17912 default: /* Invalid */
17913 MIPS_INVAL("special3_r6");
17914 generate_exception_end(ctx
, EXCP_RI
);
17919 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17924 rs
= (ctx
->opcode
>> 21) & 0x1f;
17925 rt
= (ctx
->opcode
>> 16) & 0x1f;
17926 rd
= (ctx
->opcode
>> 11) & 0x1f;
17928 op1
= MASK_SPECIAL3(ctx
->opcode
);
17931 case OPC_DIVU_G_2E
:
17933 case OPC_MODU_G_2E
:
17934 case OPC_MULT_G_2E
:
17935 case OPC_MULTU_G_2E
:
17936 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17937 * the same mask and op1. */
17938 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17939 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17942 case OPC_ADDUH_R_QB
:
17944 case OPC_ADDQH_R_PH
:
17946 case OPC_ADDQH_R_W
:
17948 case OPC_SUBUH_R_QB
:
17950 case OPC_SUBQH_R_PH
:
17952 case OPC_SUBQH_R_W
:
17953 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17958 case OPC_MULQ_RS_W
:
17959 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17962 MIPS_INVAL("MASK ADDUH.QB");
17963 generate_exception_end(ctx
, EXCP_RI
);
17966 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17967 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17969 generate_exception_end(ctx
, EXCP_RI
);
17973 op2
= MASK_LX(ctx
->opcode
);
17975 #if defined(TARGET_MIPS64)
17981 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17983 default: /* Invalid */
17984 MIPS_INVAL("MASK LX");
17985 generate_exception_end(ctx
, EXCP_RI
);
17989 case OPC_ABSQ_S_PH_DSP
:
17990 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17992 case OPC_ABSQ_S_QB
:
17993 case OPC_ABSQ_S_PH
:
17995 case OPC_PRECEQ_W_PHL
:
17996 case OPC_PRECEQ_W_PHR
:
17997 case OPC_PRECEQU_PH_QBL
:
17998 case OPC_PRECEQU_PH_QBR
:
17999 case OPC_PRECEQU_PH_QBLA
:
18000 case OPC_PRECEQU_PH_QBRA
:
18001 case OPC_PRECEU_PH_QBL
:
18002 case OPC_PRECEU_PH_QBR
:
18003 case OPC_PRECEU_PH_QBLA
:
18004 case OPC_PRECEU_PH_QBRA
:
18005 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18012 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18015 MIPS_INVAL("MASK ABSQ_S.PH");
18016 generate_exception_end(ctx
, EXCP_RI
);
18020 case OPC_ADDU_QB_DSP
:
18021 op2
= MASK_ADDU_QB(ctx
->opcode
);
18024 case OPC_ADDQ_S_PH
:
18027 case OPC_ADDU_S_QB
:
18029 case OPC_ADDU_S_PH
:
18031 case OPC_SUBQ_S_PH
:
18034 case OPC_SUBU_S_QB
:
18036 case OPC_SUBU_S_PH
:
18040 case OPC_RADDU_W_QB
:
18041 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18043 case OPC_MULEU_S_PH_QBL
:
18044 case OPC_MULEU_S_PH_QBR
:
18045 case OPC_MULQ_RS_PH
:
18046 case OPC_MULEQ_S_W_PHL
:
18047 case OPC_MULEQ_S_W_PHR
:
18048 case OPC_MULQ_S_PH
:
18049 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18051 default: /* Invalid */
18052 MIPS_INVAL("MASK ADDU.QB");
18053 generate_exception_end(ctx
, EXCP_RI
);
18058 case OPC_CMPU_EQ_QB_DSP
:
18059 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
18061 case OPC_PRECR_SRA_PH_W
:
18062 case OPC_PRECR_SRA_R_PH_W
:
18063 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18065 case OPC_PRECR_QB_PH
:
18066 case OPC_PRECRQ_QB_PH
:
18067 case OPC_PRECRQ_PH_W
:
18068 case OPC_PRECRQ_RS_PH_W
:
18069 case OPC_PRECRQU_S_QB_PH
:
18070 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18072 case OPC_CMPU_EQ_QB
:
18073 case OPC_CMPU_LT_QB
:
18074 case OPC_CMPU_LE_QB
:
18075 case OPC_CMP_EQ_PH
:
18076 case OPC_CMP_LT_PH
:
18077 case OPC_CMP_LE_PH
:
18078 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18080 case OPC_CMPGU_EQ_QB
:
18081 case OPC_CMPGU_LT_QB
:
18082 case OPC_CMPGU_LE_QB
:
18083 case OPC_CMPGDU_EQ_QB
:
18084 case OPC_CMPGDU_LT_QB
:
18085 case OPC_CMPGDU_LE_QB
:
18088 case OPC_PACKRL_PH
:
18089 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18091 default: /* Invalid */
18092 MIPS_INVAL("MASK CMPU.EQ.QB");
18093 generate_exception_end(ctx
, EXCP_RI
);
18097 case OPC_SHLL_QB_DSP
:
18098 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18100 case OPC_DPA_W_PH_DSP
:
18101 op2
= MASK_DPA_W_PH(ctx
->opcode
);
18103 case OPC_DPAU_H_QBL
:
18104 case OPC_DPAU_H_QBR
:
18105 case OPC_DPSU_H_QBL
:
18106 case OPC_DPSU_H_QBR
:
18108 case OPC_DPAX_W_PH
:
18109 case OPC_DPAQ_S_W_PH
:
18110 case OPC_DPAQX_S_W_PH
:
18111 case OPC_DPAQX_SA_W_PH
:
18113 case OPC_DPSX_W_PH
:
18114 case OPC_DPSQ_S_W_PH
:
18115 case OPC_DPSQX_S_W_PH
:
18116 case OPC_DPSQX_SA_W_PH
:
18117 case OPC_MULSAQ_S_W_PH
:
18118 case OPC_DPAQ_SA_L_W
:
18119 case OPC_DPSQ_SA_L_W
:
18120 case OPC_MAQ_S_W_PHL
:
18121 case OPC_MAQ_S_W_PHR
:
18122 case OPC_MAQ_SA_W_PHL
:
18123 case OPC_MAQ_SA_W_PHR
:
18124 case OPC_MULSA_W_PH
:
18125 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18127 default: /* Invalid */
18128 MIPS_INVAL("MASK DPAW.PH");
18129 generate_exception_end(ctx
, EXCP_RI
);
18134 op2
= MASK_INSV(ctx
->opcode
);
18145 t0
= tcg_temp_new();
18146 t1
= tcg_temp_new();
18148 gen_load_gpr(t0
, rt
);
18149 gen_load_gpr(t1
, rs
);
18151 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18157 default: /* Invalid */
18158 MIPS_INVAL("MASK INSV");
18159 generate_exception_end(ctx
, EXCP_RI
);
18163 case OPC_APPEND_DSP
:
18164 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18166 case OPC_EXTR_W_DSP
:
18167 op2
= MASK_EXTR_W(ctx
->opcode
);
18171 case OPC_EXTR_RS_W
:
18173 case OPC_EXTRV_S_H
:
18175 case OPC_EXTRV_R_W
:
18176 case OPC_EXTRV_RS_W
:
18181 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18184 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18190 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18192 default: /* Invalid */
18193 MIPS_INVAL("MASK EXTR.W");
18194 generate_exception_end(ctx
, EXCP_RI
);
18198 #if defined(TARGET_MIPS64)
18199 case OPC_DDIV_G_2E
:
18200 case OPC_DDIVU_G_2E
:
18201 case OPC_DMULT_G_2E
:
18202 case OPC_DMULTU_G_2E
:
18203 case OPC_DMOD_G_2E
:
18204 case OPC_DMODU_G_2E
:
18205 check_insn(ctx
, INSN_LOONGSON2E
);
18206 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18208 case OPC_ABSQ_S_QH_DSP
:
18209 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18211 case OPC_PRECEQ_L_PWL
:
18212 case OPC_PRECEQ_L_PWR
:
18213 case OPC_PRECEQ_PW_QHL
:
18214 case OPC_PRECEQ_PW_QHR
:
18215 case OPC_PRECEQ_PW_QHLA
:
18216 case OPC_PRECEQ_PW_QHRA
:
18217 case OPC_PRECEQU_QH_OBL
:
18218 case OPC_PRECEQU_QH_OBR
:
18219 case OPC_PRECEQU_QH_OBLA
:
18220 case OPC_PRECEQU_QH_OBRA
:
18221 case OPC_PRECEU_QH_OBL
:
18222 case OPC_PRECEU_QH_OBR
:
18223 case OPC_PRECEU_QH_OBLA
:
18224 case OPC_PRECEU_QH_OBRA
:
18225 case OPC_ABSQ_S_OB
:
18226 case OPC_ABSQ_S_PW
:
18227 case OPC_ABSQ_S_QH
:
18228 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18236 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18238 default: /* Invalid */
18239 MIPS_INVAL("MASK ABSQ_S.QH");
18240 generate_exception_end(ctx
, EXCP_RI
);
18244 case OPC_ADDU_OB_DSP
:
18245 op2
= MASK_ADDU_OB(ctx
->opcode
);
18247 case OPC_RADDU_L_OB
:
18249 case OPC_SUBQ_S_PW
:
18251 case OPC_SUBQ_S_QH
:
18253 case OPC_SUBU_S_OB
:
18255 case OPC_SUBU_S_QH
:
18257 case OPC_SUBUH_R_OB
:
18259 case OPC_ADDQ_S_PW
:
18261 case OPC_ADDQ_S_QH
:
18263 case OPC_ADDU_S_OB
:
18265 case OPC_ADDU_S_QH
:
18267 case OPC_ADDUH_R_OB
:
18268 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18270 case OPC_MULEQ_S_PW_QHL
:
18271 case OPC_MULEQ_S_PW_QHR
:
18272 case OPC_MULEU_S_QH_OBL
:
18273 case OPC_MULEU_S_QH_OBR
:
18274 case OPC_MULQ_RS_QH
:
18275 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18277 default: /* Invalid */
18278 MIPS_INVAL("MASK ADDU.OB");
18279 generate_exception_end(ctx
, EXCP_RI
);
18283 case OPC_CMPU_EQ_OB_DSP
:
18284 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18286 case OPC_PRECR_SRA_QH_PW
:
18287 case OPC_PRECR_SRA_R_QH_PW
:
18288 /* Return value is rt. */
18289 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18291 case OPC_PRECR_OB_QH
:
18292 case OPC_PRECRQ_OB_QH
:
18293 case OPC_PRECRQ_PW_L
:
18294 case OPC_PRECRQ_QH_PW
:
18295 case OPC_PRECRQ_RS_QH_PW
:
18296 case OPC_PRECRQU_S_OB_QH
:
18297 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18299 case OPC_CMPU_EQ_OB
:
18300 case OPC_CMPU_LT_OB
:
18301 case OPC_CMPU_LE_OB
:
18302 case OPC_CMP_EQ_QH
:
18303 case OPC_CMP_LT_QH
:
18304 case OPC_CMP_LE_QH
:
18305 case OPC_CMP_EQ_PW
:
18306 case OPC_CMP_LT_PW
:
18307 case OPC_CMP_LE_PW
:
18308 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18310 case OPC_CMPGDU_EQ_OB
:
18311 case OPC_CMPGDU_LT_OB
:
18312 case OPC_CMPGDU_LE_OB
:
18313 case OPC_CMPGU_EQ_OB
:
18314 case OPC_CMPGU_LT_OB
:
18315 case OPC_CMPGU_LE_OB
:
18316 case OPC_PACKRL_PW
:
18320 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18322 default: /* Invalid */
18323 MIPS_INVAL("MASK CMPU_EQ.OB");
18324 generate_exception_end(ctx
, EXCP_RI
);
18328 case OPC_DAPPEND_DSP
:
18329 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18331 case OPC_DEXTR_W_DSP
:
18332 op2
= MASK_DEXTR_W(ctx
->opcode
);
18339 case OPC_DEXTR_R_L
:
18340 case OPC_DEXTR_RS_L
:
18342 case OPC_DEXTR_R_W
:
18343 case OPC_DEXTR_RS_W
:
18344 case OPC_DEXTR_S_H
:
18346 case OPC_DEXTRV_R_L
:
18347 case OPC_DEXTRV_RS_L
:
18348 case OPC_DEXTRV_S_H
:
18350 case OPC_DEXTRV_R_W
:
18351 case OPC_DEXTRV_RS_W
:
18352 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18357 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18359 default: /* Invalid */
18360 MIPS_INVAL("MASK EXTR.W");
18361 generate_exception_end(ctx
, EXCP_RI
);
18365 case OPC_DPAQ_W_QH_DSP
:
18366 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18368 case OPC_DPAU_H_OBL
:
18369 case OPC_DPAU_H_OBR
:
18370 case OPC_DPSU_H_OBL
:
18371 case OPC_DPSU_H_OBR
:
18373 case OPC_DPAQ_S_W_QH
:
18375 case OPC_DPSQ_S_W_QH
:
18376 case OPC_MULSAQ_S_W_QH
:
18377 case OPC_DPAQ_SA_L_PW
:
18378 case OPC_DPSQ_SA_L_PW
:
18379 case OPC_MULSAQ_S_L_PW
:
18380 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18382 case OPC_MAQ_S_W_QHLL
:
18383 case OPC_MAQ_S_W_QHLR
:
18384 case OPC_MAQ_S_W_QHRL
:
18385 case OPC_MAQ_S_W_QHRR
:
18386 case OPC_MAQ_SA_W_QHLL
:
18387 case OPC_MAQ_SA_W_QHLR
:
18388 case OPC_MAQ_SA_W_QHRL
:
18389 case OPC_MAQ_SA_W_QHRR
:
18390 case OPC_MAQ_S_L_PWL
:
18391 case OPC_MAQ_S_L_PWR
:
18396 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18398 default: /* Invalid */
18399 MIPS_INVAL("MASK DPAQ.W.QH");
18400 generate_exception_end(ctx
, EXCP_RI
);
18404 case OPC_DINSV_DSP
:
18405 op2
= MASK_INSV(ctx
->opcode
);
18416 t0
= tcg_temp_new();
18417 t1
= tcg_temp_new();
18419 gen_load_gpr(t0
, rt
);
18420 gen_load_gpr(t1
, rs
);
18422 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18428 default: /* Invalid */
18429 MIPS_INVAL("MASK DINSV");
18430 generate_exception_end(ctx
, EXCP_RI
);
18434 case OPC_SHLL_OB_DSP
:
18435 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18438 default: /* Invalid */
18439 MIPS_INVAL("special3_legacy");
18440 generate_exception_end(ctx
, EXCP_RI
);
18445 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18447 int rs
, rt
, rd
, sa
;
18451 rs
= (ctx
->opcode
>> 21) & 0x1f;
18452 rt
= (ctx
->opcode
>> 16) & 0x1f;
18453 rd
= (ctx
->opcode
>> 11) & 0x1f;
18454 sa
= (ctx
->opcode
>> 6) & 0x1f;
18455 imm
= sextract32(ctx
->opcode
, 7, 9);
18457 op1
= MASK_SPECIAL3(ctx
->opcode
);
18460 * EVA loads and stores overlap Loongson 2E instructions decoded by
18461 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18476 check_cp0_enabled(ctx
);
18477 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18481 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18486 check_cp0_enabled(ctx
);
18487 gen_st(ctx
, op1
, rt
, rs
, imm
);
18490 check_cp0_enabled(ctx
);
18491 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18494 check_cp0_enabled(ctx
);
18495 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18496 gen_cache_operation(ctx
, rt
, rs
, imm
);
18498 /* Treat as NOP. */
18501 check_cp0_enabled(ctx
);
18502 /* Treat as NOP. */
18510 check_insn(ctx
, ISA_MIPS32R2
);
18511 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18514 op2
= MASK_BSHFL(ctx
->opcode
);
18517 case OPC_ALIGN_END
:
18519 check_insn(ctx
, ISA_MIPS32R6
);
18520 decode_opc_special3_r6(env
, ctx
);
18523 check_insn(ctx
, ISA_MIPS32R2
);
18524 gen_bshfl(ctx
, op2
, rt
, rd
);
18528 #if defined(TARGET_MIPS64)
18535 check_insn(ctx
, ISA_MIPS64R2
);
18536 check_mips_64(ctx
);
18537 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18540 op2
= MASK_DBSHFL(ctx
->opcode
);
18543 case OPC_DALIGN_END
:
18545 check_insn(ctx
, ISA_MIPS32R6
);
18546 decode_opc_special3_r6(env
, ctx
);
18549 check_insn(ctx
, ISA_MIPS64R2
);
18550 check_mips_64(ctx
);
18551 op2
= MASK_DBSHFL(ctx
->opcode
);
18552 gen_bshfl(ctx
, op2
, rt
, rd
);
18558 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18561 check_insn(ctx
, ASE_MT
);
18563 TCGv t0
= tcg_temp_new();
18564 TCGv t1
= tcg_temp_new();
18566 gen_load_gpr(t0
, rt
);
18567 gen_load_gpr(t1
, rs
);
18568 gen_helper_fork(t0
, t1
);
18574 check_insn(ctx
, ASE_MT
);
18576 TCGv t0
= tcg_temp_new();
18578 gen_load_gpr(t0
, rs
);
18579 gen_helper_yield(t0
, cpu_env
, t0
);
18580 gen_store_gpr(t0
, rd
);
18585 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18586 decode_opc_special3_r6(env
, ctx
);
18588 decode_opc_special3_legacy(env
, ctx
);
18593 /* MIPS SIMD Architecture (MSA) */
18594 static inline int check_msa_access(DisasContext
*ctx
)
18596 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18597 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18598 generate_exception_end(ctx
, EXCP_RI
);
18602 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18603 if (ctx
->insn_flags
& ASE_MSA
) {
18604 generate_exception_end(ctx
, EXCP_MSADIS
);
18607 generate_exception_end(ctx
, EXCP_RI
);
18614 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18616 /* generates tcg ops to check if any element is 0 */
18617 /* Note this function only works with MSA_WRLEN = 128 */
18618 uint64_t eval_zero_or_big
= 0;
18619 uint64_t eval_big
= 0;
18620 TCGv_i64 t0
= tcg_temp_new_i64();
18621 TCGv_i64 t1
= tcg_temp_new_i64();
18624 eval_zero_or_big
= 0x0101010101010101ULL
;
18625 eval_big
= 0x8080808080808080ULL
;
18628 eval_zero_or_big
= 0x0001000100010001ULL
;
18629 eval_big
= 0x8000800080008000ULL
;
18632 eval_zero_or_big
= 0x0000000100000001ULL
;
18633 eval_big
= 0x8000000080000000ULL
;
18636 eval_zero_or_big
= 0x0000000000000001ULL
;
18637 eval_big
= 0x8000000000000000ULL
;
18640 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18641 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18642 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18643 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18644 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18645 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18646 tcg_gen_or_i64(t0
, t0
, t1
);
18647 /* if all bits are zero then all elements are not zero */
18648 /* if some bit is non-zero then some element is zero */
18649 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18650 tcg_gen_trunc_i64_tl(tresult
, t0
);
18651 tcg_temp_free_i64(t0
);
18652 tcg_temp_free_i64(t1
);
18655 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18657 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18658 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18659 int64_t s16
= (int16_t)ctx
->opcode
;
18661 check_msa_access(ctx
);
18663 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18664 generate_exception_end(ctx
, EXCP_RI
);
18671 TCGv_i64 t0
= tcg_temp_new_i64();
18672 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18673 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18674 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18675 tcg_gen_trunc_i64_tl(bcond
, t0
);
18676 tcg_temp_free_i64(t0
);
18683 gen_check_zero_element(bcond
, df
, wt
);
18689 gen_check_zero_element(bcond
, df
, wt
);
18690 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18694 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
18696 ctx
->hflags
|= MIPS_HFLAG_BC
;
18697 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18700 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18702 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18703 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18704 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18705 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18707 TCGv_i32 twd
= tcg_const_i32(wd
);
18708 TCGv_i32 tws
= tcg_const_i32(ws
);
18709 TCGv_i32 ti8
= tcg_const_i32(i8
);
18711 switch (MASK_MSA_I8(ctx
->opcode
)) {
18713 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18716 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18719 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18722 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18725 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18728 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18731 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18737 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18738 if (df
== DF_DOUBLE
) {
18739 generate_exception_end(ctx
, EXCP_RI
);
18741 TCGv_i32 tdf
= tcg_const_i32(df
);
18742 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18743 tcg_temp_free_i32(tdf
);
18748 MIPS_INVAL("MSA instruction");
18749 generate_exception_end(ctx
, EXCP_RI
);
18753 tcg_temp_free_i32(twd
);
18754 tcg_temp_free_i32(tws
);
18755 tcg_temp_free_i32(ti8
);
18758 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18760 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18761 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18762 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18763 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18764 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18765 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18767 TCGv_i32 tdf
= tcg_const_i32(df
);
18768 TCGv_i32 twd
= tcg_const_i32(wd
);
18769 TCGv_i32 tws
= tcg_const_i32(ws
);
18770 TCGv_i32 timm
= tcg_temp_new_i32();
18771 tcg_gen_movi_i32(timm
, u5
);
18773 switch (MASK_MSA_I5(ctx
->opcode
)) {
18775 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18778 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18780 case OPC_MAXI_S_df
:
18781 tcg_gen_movi_i32(timm
, s5
);
18782 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18784 case OPC_MAXI_U_df
:
18785 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18787 case OPC_MINI_S_df
:
18788 tcg_gen_movi_i32(timm
, s5
);
18789 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18791 case OPC_MINI_U_df
:
18792 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18795 tcg_gen_movi_i32(timm
, s5
);
18796 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18798 case OPC_CLTI_S_df
:
18799 tcg_gen_movi_i32(timm
, s5
);
18800 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18802 case OPC_CLTI_U_df
:
18803 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18805 case OPC_CLEI_S_df
:
18806 tcg_gen_movi_i32(timm
, s5
);
18807 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18809 case OPC_CLEI_U_df
:
18810 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18814 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18815 tcg_gen_movi_i32(timm
, s10
);
18816 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18820 MIPS_INVAL("MSA instruction");
18821 generate_exception_end(ctx
, EXCP_RI
);
18825 tcg_temp_free_i32(tdf
);
18826 tcg_temp_free_i32(twd
);
18827 tcg_temp_free_i32(tws
);
18828 tcg_temp_free_i32(timm
);
18831 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18833 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18834 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18835 uint32_t df
= 0, m
= 0;
18836 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18837 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18844 if ((dfm
& 0x40) == 0x00) {
18847 } else if ((dfm
& 0x60) == 0x40) {
18850 } else if ((dfm
& 0x70) == 0x60) {
18853 } else if ((dfm
& 0x78) == 0x70) {
18857 generate_exception_end(ctx
, EXCP_RI
);
18861 tdf
= tcg_const_i32(df
);
18862 tm
= tcg_const_i32(m
);
18863 twd
= tcg_const_i32(wd
);
18864 tws
= tcg_const_i32(ws
);
18866 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18868 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18871 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18874 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18877 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18880 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18883 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18885 case OPC_BINSLI_df
:
18886 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18888 case OPC_BINSRI_df
:
18889 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18892 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18895 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18898 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18901 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18904 MIPS_INVAL("MSA instruction");
18905 generate_exception_end(ctx
, EXCP_RI
);
18909 tcg_temp_free_i32(tdf
);
18910 tcg_temp_free_i32(tm
);
18911 tcg_temp_free_i32(twd
);
18912 tcg_temp_free_i32(tws
);
18915 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18917 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18918 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18919 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18920 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18921 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18923 TCGv_i32 tdf
= tcg_const_i32(df
);
18924 TCGv_i32 twd
= tcg_const_i32(wd
);
18925 TCGv_i32 tws
= tcg_const_i32(ws
);
18926 TCGv_i32 twt
= tcg_const_i32(wt
);
18928 switch (MASK_MSA_3R(ctx
->opcode
)) {
18930 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18933 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18936 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18939 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18941 case OPC_SUBS_S_df
:
18942 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18945 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18948 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18951 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18954 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18957 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18959 case OPC_ADDS_A_df
:
18960 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18962 case OPC_SUBS_U_df
:
18963 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18966 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18969 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18972 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18975 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18978 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18981 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18983 case OPC_ADDS_S_df
:
18984 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18986 case OPC_SUBSUS_U_df
:
18987 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18990 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18993 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18996 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18999 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
19002 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19005 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19007 case OPC_ADDS_U_df
:
19008 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19010 case OPC_SUBSUU_S_df
:
19011 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19014 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
19017 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
19020 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19023 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19026 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19028 case OPC_ASUB_S_df
:
19029 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19032 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19035 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
19038 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
19041 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19044 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19047 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19049 case OPC_ASUB_U_df
:
19050 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19053 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19056 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
19059 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
19062 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19064 case OPC_AVER_S_df
:
19065 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19068 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19071 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
19074 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
19077 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19079 case OPC_AVER_U_df
:
19080 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19083 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19086 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
19089 case OPC_DOTP_S_df
:
19090 case OPC_DOTP_U_df
:
19091 case OPC_DPADD_S_df
:
19092 case OPC_DPADD_U_df
:
19093 case OPC_DPSUB_S_df
:
19094 case OPC_HADD_S_df
:
19095 case OPC_DPSUB_U_df
:
19096 case OPC_HADD_U_df
:
19097 case OPC_HSUB_S_df
:
19098 case OPC_HSUB_U_df
:
19099 if (df
== DF_BYTE
) {
19100 generate_exception_end(ctx
, EXCP_RI
);
19103 switch (MASK_MSA_3R(ctx
->opcode
)) {
19104 case OPC_DOTP_S_df
:
19105 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19107 case OPC_DOTP_U_df
:
19108 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19110 case OPC_DPADD_S_df
:
19111 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19113 case OPC_DPADD_U_df
:
19114 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19116 case OPC_DPSUB_S_df
:
19117 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19119 case OPC_HADD_S_df
:
19120 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19122 case OPC_DPSUB_U_df
:
19123 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19125 case OPC_HADD_U_df
:
19126 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19128 case OPC_HSUB_S_df
:
19129 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
19131 case OPC_HSUB_U_df
:
19132 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
19137 MIPS_INVAL("MSA instruction");
19138 generate_exception_end(ctx
, EXCP_RI
);
19141 tcg_temp_free_i32(twd
);
19142 tcg_temp_free_i32(tws
);
19143 tcg_temp_free_i32(twt
);
19144 tcg_temp_free_i32(tdf
);
19147 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
19149 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
19150 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
19151 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
19152 TCGv telm
= tcg_temp_new();
19153 TCGv_i32 tsr
= tcg_const_i32(source
);
19154 TCGv_i32 tdt
= tcg_const_i32(dest
);
19156 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
19158 gen_load_gpr(telm
, source
);
19159 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
19162 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
19163 gen_store_gpr(telm
, dest
);
19166 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
19169 MIPS_INVAL("MSA instruction");
19170 generate_exception_end(ctx
, EXCP_RI
);
19174 tcg_temp_free(telm
);
19175 tcg_temp_free_i32(tdt
);
19176 tcg_temp_free_i32(tsr
);
19179 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
19182 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19183 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19184 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19186 TCGv_i32 tws
= tcg_const_i32(ws
);
19187 TCGv_i32 twd
= tcg_const_i32(wd
);
19188 TCGv_i32 tn
= tcg_const_i32(n
);
19189 TCGv_i32 tdf
= tcg_const_i32(df
);
19191 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19193 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
19195 case OPC_SPLATI_df
:
19196 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
19199 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
19201 case OPC_COPY_S_df
:
19202 case OPC_COPY_U_df
:
19203 case OPC_INSERT_df
:
19204 #if !defined(TARGET_MIPS64)
19205 /* Double format valid only for MIPS64 */
19206 if (df
== DF_DOUBLE
) {
19207 generate_exception_end(ctx
, EXCP_RI
);
19211 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19212 case OPC_COPY_S_df
:
19213 if (likely(wd
!= 0)) {
19214 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19217 case OPC_COPY_U_df
:
19218 if (likely(wd
!= 0)) {
19219 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19222 case OPC_INSERT_df
:
19223 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19228 MIPS_INVAL("MSA instruction");
19229 generate_exception_end(ctx
, EXCP_RI
);
19231 tcg_temp_free_i32(twd
);
19232 tcg_temp_free_i32(tws
);
19233 tcg_temp_free_i32(tn
);
19234 tcg_temp_free_i32(tdf
);
19237 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19239 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19240 uint32_t df
= 0, n
= 0;
19242 if ((dfn
& 0x30) == 0x00) {
19245 } else if ((dfn
& 0x38) == 0x20) {
19248 } else if ((dfn
& 0x3c) == 0x30) {
19251 } else if ((dfn
& 0x3e) == 0x38) {
19254 } else if (dfn
== 0x3E) {
19255 /* CTCMSA, CFCMSA, MOVE.V */
19256 gen_msa_elm_3e(env
, ctx
);
19259 generate_exception_end(ctx
, EXCP_RI
);
19263 gen_msa_elm_df(env
, ctx
, df
, n
);
19266 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19268 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19269 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19270 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19271 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19272 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19274 TCGv_i32 twd
= tcg_const_i32(wd
);
19275 TCGv_i32 tws
= tcg_const_i32(ws
);
19276 TCGv_i32 twt
= tcg_const_i32(wt
);
19277 TCGv_i32 tdf
= tcg_temp_new_i32();
19279 /* adjust df value for floating-point instruction */
19280 tcg_gen_movi_i32(tdf
, df
+ 2);
19282 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19284 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19287 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19290 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19293 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19296 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19299 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19302 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19305 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19308 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19311 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19314 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19317 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19320 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19323 tcg_gen_movi_i32(tdf
, df
+ 1);
19324 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19327 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19330 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19332 case OPC_MADD_Q_df
:
19333 tcg_gen_movi_i32(tdf
, df
+ 1);
19334 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19337 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19339 case OPC_MSUB_Q_df
:
19340 tcg_gen_movi_i32(tdf
, df
+ 1);
19341 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19344 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19347 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19350 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19353 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19356 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19359 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19362 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19365 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19368 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19371 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19374 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19377 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19380 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19382 case OPC_MULR_Q_df
:
19383 tcg_gen_movi_i32(tdf
, df
+ 1);
19384 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19387 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19389 case OPC_FMIN_A_df
:
19390 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19392 case OPC_MADDR_Q_df
:
19393 tcg_gen_movi_i32(tdf
, df
+ 1);
19394 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19397 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19400 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19402 case OPC_MSUBR_Q_df
:
19403 tcg_gen_movi_i32(tdf
, df
+ 1);
19404 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19407 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19409 case OPC_FMAX_A_df
:
19410 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19413 MIPS_INVAL("MSA instruction");
19414 generate_exception_end(ctx
, EXCP_RI
);
19418 tcg_temp_free_i32(twd
);
19419 tcg_temp_free_i32(tws
);
19420 tcg_temp_free_i32(twt
);
19421 tcg_temp_free_i32(tdf
);
19424 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19426 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19427 (op & (0x7 << 18)))
19428 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19429 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19430 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19431 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19432 TCGv_i32 twd
= tcg_const_i32(wd
);
19433 TCGv_i32 tws
= tcg_const_i32(ws
);
19434 TCGv_i32 twt
= tcg_const_i32(wt
);
19435 TCGv_i32 tdf
= tcg_const_i32(df
);
19437 switch (MASK_MSA_2R(ctx
->opcode
)) {
19439 #if !defined(TARGET_MIPS64)
19440 /* Double format valid only for MIPS64 */
19441 if (df
== DF_DOUBLE
) {
19442 generate_exception_end(ctx
, EXCP_RI
);
19446 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19449 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19452 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19455 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19458 MIPS_INVAL("MSA instruction");
19459 generate_exception_end(ctx
, EXCP_RI
);
19463 tcg_temp_free_i32(twd
);
19464 tcg_temp_free_i32(tws
);
19465 tcg_temp_free_i32(twt
);
19466 tcg_temp_free_i32(tdf
);
19469 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19471 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19472 (op & (0xf << 17)))
19473 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19474 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19475 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19476 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19477 TCGv_i32 twd
= tcg_const_i32(wd
);
19478 TCGv_i32 tws
= tcg_const_i32(ws
);
19479 TCGv_i32 twt
= tcg_const_i32(wt
);
19480 /* adjust df value for floating-point instruction */
19481 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19483 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19484 case OPC_FCLASS_df
:
19485 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19487 case OPC_FTRUNC_S_df
:
19488 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19490 case OPC_FTRUNC_U_df
:
19491 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19494 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19496 case OPC_FRSQRT_df
:
19497 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19500 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19503 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19506 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19508 case OPC_FEXUPL_df
:
19509 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19511 case OPC_FEXUPR_df
:
19512 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19515 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19518 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19520 case OPC_FTINT_S_df
:
19521 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19523 case OPC_FTINT_U_df
:
19524 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19526 case OPC_FFINT_S_df
:
19527 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19529 case OPC_FFINT_U_df
:
19530 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19534 tcg_temp_free_i32(twd
);
19535 tcg_temp_free_i32(tws
);
19536 tcg_temp_free_i32(twt
);
19537 tcg_temp_free_i32(tdf
);
19540 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19542 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19543 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19544 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19545 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19546 TCGv_i32 twd
= tcg_const_i32(wd
);
19547 TCGv_i32 tws
= tcg_const_i32(ws
);
19548 TCGv_i32 twt
= tcg_const_i32(wt
);
19550 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19552 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19555 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19558 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19561 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19564 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19567 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19570 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19573 MIPS_INVAL("MSA instruction");
19574 generate_exception_end(ctx
, EXCP_RI
);
19578 tcg_temp_free_i32(twd
);
19579 tcg_temp_free_i32(tws
);
19580 tcg_temp_free_i32(twt
);
19583 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19585 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19593 gen_msa_vec_v(env
, ctx
);
19596 gen_msa_2r(env
, ctx
);
19599 gen_msa_2rf(env
, ctx
);
19602 MIPS_INVAL("MSA instruction");
19603 generate_exception_end(ctx
, EXCP_RI
);
19608 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19610 uint32_t opcode
= ctx
->opcode
;
19611 check_insn(ctx
, ASE_MSA
);
19612 check_msa_access(ctx
);
19614 switch (MASK_MSA_MINOR(opcode
)) {
19615 case OPC_MSA_I8_00
:
19616 case OPC_MSA_I8_01
:
19617 case OPC_MSA_I8_02
:
19618 gen_msa_i8(env
, ctx
);
19620 case OPC_MSA_I5_06
:
19621 case OPC_MSA_I5_07
:
19622 gen_msa_i5(env
, ctx
);
19624 case OPC_MSA_BIT_09
:
19625 case OPC_MSA_BIT_0A
:
19626 gen_msa_bit(env
, ctx
);
19628 case OPC_MSA_3R_0D
:
19629 case OPC_MSA_3R_0E
:
19630 case OPC_MSA_3R_0F
:
19631 case OPC_MSA_3R_10
:
19632 case OPC_MSA_3R_11
:
19633 case OPC_MSA_3R_12
:
19634 case OPC_MSA_3R_13
:
19635 case OPC_MSA_3R_14
:
19636 case OPC_MSA_3R_15
:
19637 gen_msa_3r(env
, ctx
);
19640 gen_msa_elm(env
, ctx
);
19642 case OPC_MSA_3RF_1A
:
19643 case OPC_MSA_3RF_1B
:
19644 case OPC_MSA_3RF_1C
:
19645 gen_msa_3rf(env
, ctx
);
19648 gen_msa_vec(env
, ctx
);
19659 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19660 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19661 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19662 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19664 TCGv_i32 twd
= tcg_const_i32(wd
);
19665 TCGv taddr
= tcg_temp_new();
19666 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19668 switch (MASK_MSA_MINOR(opcode
)) {
19670 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19673 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19676 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19679 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19682 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19685 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19688 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19691 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19695 tcg_temp_free_i32(twd
);
19696 tcg_temp_free(taddr
);
19700 MIPS_INVAL("MSA instruction");
19701 generate_exception_end(ctx
, EXCP_RI
);
19707 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19710 int rs
, rt
, rd
, sa
;
19714 /* make sure instructions are on a word boundary */
19715 if (ctx
->base
.pc_next
& 0x3) {
19716 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
19717 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19721 /* Handle blikely not taken case */
19722 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19723 TCGLabel
*l1
= gen_new_label();
19725 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19726 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19727 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
19731 op
= MASK_OP_MAJOR(ctx
->opcode
);
19732 rs
= (ctx
->opcode
>> 21) & 0x1f;
19733 rt
= (ctx
->opcode
>> 16) & 0x1f;
19734 rd
= (ctx
->opcode
>> 11) & 0x1f;
19735 sa
= (ctx
->opcode
>> 6) & 0x1f;
19736 imm
= (int16_t)ctx
->opcode
;
19739 decode_opc_special(env
, ctx
);
19742 decode_opc_special2_legacy(env
, ctx
);
19745 decode_opc_special3(env
, ctx
);
19748 op1
= MASK_REGIMM(ctx
->opcode
);
19750 case OPC_BLTZL
: /* REGIMM branches */
19754 check_insn(ctx
, ISA_MIPS2
);
19755 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19759 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19763 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19765 /* OPC_NAL, OPC_BAL */
19766 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19768 generate_exception_end(ctx
, EXCP_RI
);
19771 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19774 case OPC_TGEI
: /* REGIMM traps */
19781 check_insn(ctx
, ISA_MIPS2
);
19782 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19783 gen_trap(ctx
, op1
, rs
, -1, imm
);
19786 check_insn(ctx
, ISA_MIPS32R6
);
19787 generate_exception_end(ctx
, EXCP_RI
);
19790 check_insn(ctx
, ISA_MIPS32R2
);
19791 /* Break the TB to be able to sync copied instructions
19793 ctx
->base
.is_jmp
= DISAS_STOP
;
19795 case OPC_BPOSGE32
: /* MIPS DSP branch */
19796 #if defined(TARGET_MIPS64)
19800 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19802 #if defined(TARGET_MIPS64)
19804 check_insn(ctx
, ISA_MIPS32R6
);
19805 check_mips_64(ctx
);
19807 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19811 check_insn(ctx
, ISA_MIPS32R6
);
19812 check_mips_64(ctx
);
19814 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19818 default: /* Invalid */
19819 MIPS_INVAL("regimm");
19820 generate_exception_end(ctx
, EXCP_RI
);
19825 check_cp0_enabled(ctx
);
19826 op1
= MASK_CP0(ctx
->opcode
);
19834 #if defined(TARGET_MIPS64)
19838 #ifndef CONFIG_USER_ONLY
19839 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19840 #endif /* !CONFIG_USER_ONLY */
19858 #ifndef CONFIG_USER_ONLY
19859 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19860 #endif /* !CONFIG_USER_ONLY */
19863 #ifndef CONFIG_USER_ONLY
19866 TCGv t0
= tcg_temp_new();
19868 op2
= MASK_MFMC0(ctx
->opcode
);
19871 check_insn(ctx
, ASE_MT
);
19872 gen_helper_dmt(t0
);
19873 gen_store_gpr(t0
, rt
);
19876 check_insn(ctx
, ASE_MT
);
19877 gen_helper_emt(t0
);
19878 gen_store_gpr(t0
, rt
);
19881 check_insn(ctx
, ASE_MT
);
19882 gen_helper_dvpe(t0
, cpu_env
);
19883 gen_store_gpr(t0
, rt
);
19886 check_insn(ctx
, ASE_MT
);
19887 gen_helper_evpe(t0
, cpu_env
);
19888 gen_store_gpr(t0
, rt
);
19891 check_insn(ctx
, ISA_MIPS32R6
);
19893 gen_helper_dvp(t0
, cpu_env
);
19894 gen_store_gpr(t0
, rt
);
19898 check_insn(ctx
, ISA_MIPS32R6
);
19900 gen_helper_evp(t0
, cpu_env
);
19901 gen_store_gpr(t0
, rt
);
19905 check_insn(ctx
, ISA_MIPS32R2
);
19906 save_cpu_state(ctx
, 1);
19907 gen_helper_di(t0
, cpu_env
);
19908 gen_store_gpr(t0
, rt
);
19909 /* Stop translation as we may have switched
19910 the execution mode. */
19911 ctx
->base
.is_jmp
= DISAS_STOP
;
19914 check_insn(ctx
, ISA_MIPS32R2
);
19915 save_cpu_state(ctx
, 1);
19916 gen_helper_ei(t0
, cpu_env
);
19917 gen_store_gpr(t0
, rt
);
19918 /* DISAS_STOP isn't sufficient, we need to ensure we break
19919 out of translated code to check for pending interrupts */
19920 gen_save_pc(ctx
->base
.pc_next
+ 4);
19921 ctx
->base
.is_jmp
= DISAS_EXIT
;
19923 default: /* Invalid */
19924 MIPS_INVAL("mfmc0");
19925 generate_exception_end(ctx
, EXCP_RI
);
19930 #endif /* !CONFIG_USER_ONLY */
19933 check_insn(ctx
, ISA_MIPS32R2
);
19934 gen_load_srsgpr(rt
, rd
);
19937 check_insn(ctx
, ISA_MIPS32R2
);
19938 gen_store_srsgpr(rt
, rd
);
19942 generate_exception_end(ctx
, EXCP_RI
);
19946 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19947 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19948 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19949 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19952 /* Arithmetic with immediate opcode */
19953 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19957 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19959 case OPC_SLTI
: /* Set on less than with immediate opcode */
19961 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19963 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19964 case OPC_LUI
: /* OPC_AUI */
19967 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19969 case OPC_J
: /* Jump */
19971 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19972 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19975 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19976 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19978 generate_exception_end(ctx
, EXCP_RI
);
19981 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19982 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19985 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19988 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19989 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19991 generate_exception_end(ctx
, EXCP_RI
);
19994 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19995 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19998 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
20001 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
20004 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
20006 check_insn(ctx
, ISA_MIPS32R6
);
20007 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
20008 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20011 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
20014 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
20016 check_insn(ctx
, ISA_MIPS32R6
);
20017 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
20018 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20023 check_insn(ctx
, ISA_MIPS2
);
20024 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20028 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
20030 case OPC_LL
: /* Load and stores */
20031 check_insn(ctx
, ISA_MIPS2
);
20035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20043 gen_ld(ctx
, op
, rt
, rs
, imm
);
20047 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20052 gen_st(ctx
, op
, rt
, rs
, imm
);
20055 check_insn(ctx
, ISA_MIPS2
);
20056 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20057 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20060 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20061 check_cp0_enabled(ctx
);
20062 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
20063 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
20064 gen_cache_operation(ctx
, rt
, rs
, imm
);
20066 /* Treat as NOP. */
20069 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20070 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
20071 /* Treat as NOP. */
20074 /* Floating point (COP1). */
20079 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
20083 op1
= MASK_CP1(ctx
->opcode
);
20088 check_cp1_enabled(ctx
);
20089 check_insn(ctx
, ISA_MIPS32R2
);
20095 check_cp1_enabled(ctx
);
20096 gen_cp1(ctx
, op1
, rt
, rd
);
20098 #if defined(TARGET_MIPS64)
20101 check_cp1_enabled(ctx
);
20102 check_insn(ctx
, ISA_MIPS3
);
20103 check_mips_64(ctx
);
20104 gen_cp1(ctx
, op1
, rt
, rd
);
20107 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
20108 check_cp1_enabled(ctx
);
20109 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20111 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
20116 check_insn(ctx
, ASE_MIPS3D
);
20117 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
20118 (rt
>> 2) & 0x7, imm
<< 2);
20122 check_cp1_enabled(ctx
);
20123 check_insn(ctx
, ISA_MIPS32R6
);
20124 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
20128 check_cp1_enabled(ctx
);
20129 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20131 check_insn(ctx
, ASE_MIPS3D
);
20134 check_cp1_enabled(ctx
);
20135 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20136 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
20137 (rt
>> 2) & 0x7, imm
<< 2);
20144 check_cp1_enabled(ctx
);
20145 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20151 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
20152 check_cp1_enabled(ctx
);
20153 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20155 case R6_OPC_CMP_AF_S
:
20156 case R6_OPC_CMP_UN_S
:
20157 case R6_OPC_CMP_EQ_S
:
20158 case R6_OPC_CMP_UEQ_S
:
20159 case R6_OPC_CMP_LT_S
:
20160 case R6_OPC_CMP_ULT_S
:
20161 case R6_OPC_CMP_LE_S
:
20162 case R6_OPC_CMP_ULE_S
:
20163 case R6_OPC_CMP_SAF_S
:
20164 case R6_OPC_CMP_SUN_S
:
20165 case R6_OPC_CMP_SEQ_S
:
20166 case R6_OPC_CMP_SEUQ_S
:
20167 case R6_OPC_CMP_SLT_S
:
20168 case R6_OPC_CMP_SULT_S
:
20169 case R6_OPC_CMP_SLE_S
:
20170 case R6_OPC_CMP_SULE_S
:
20171 case R6_OPC_CMP_OR_S
:
20172 case R6_OPC_CMP_UNE_S
:
20173 case R6_OPC_CMP_NE_S
:
20174 case R6_OPC_CMP_SOR_S
:
20175 case R6_OPC_CMP_SUNE_S
:
20176 case R6_OPC_CMP_SNE_S
:
20177 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
20179 case R6_OPC_CMP_AF_D
:
20180 case R6_OPC_CMP_UN_D
:
20181 case R6_OPC_CMP_EQ_D
:
20182 case R6_OPC_CMP_UEQ_D
:
20183 case R6_OPC_CMP_LT_D
:
20184 case R6_OPC_CMP_ULT_D
:
20185 case R6_OPC_CMP_LE_D
:
20186 case R6_OPC_CMP_ULE_D
:
20187 case R6_OPC_CMP_SAF_D
:
20188 case R6_OPC_CMP_SUN_D
:
20189 case R6_OPC_CMP_SEQ_D
:
20190 case R6_OPC_CMP_SEUQ_D
:
20191 case R6_OPC_CMP_SLT_D
:
20192 case R6_OPC_CMP_SULT_D
:
20193 case R6_OPC_CMP_SLE_D
:
20194 case R6_OPC_CMP_SULE_D
:
20195 case R6_OPC_CMP_OR_D
:
20196 case R6_OPC_CMP_UNE_D
:
20197 case R6_OPC_CMP_NE_D
:
20198 case R6_OPC_CMP_SOR_D
:
20199 case R6_OPC_CMP_SUNE_D
:
20200 case R6_OPC_CMP_SNE_D
:
20201 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
20204 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
20205 rt
, rd
, sa
, (imm
>> 8) & 0x7);
20210 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20225 check_insn(ctx
, ASE_MSA
);
20226 gen_msa_branch(env
, ctx
, op1
);
20230 generate_exception_end(ctx
, EXCP_RI
);
20235 /* Compact branches [R6] and COP2 [non-R6] */
20236 case OPC_BC
: /* OPC_LWC2 */
20237 case OPC_BALC
: /* OPC_SWC2 */
20238 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20239 /* OPC_BC, OPC_BALC */
20240 gen_compute_compact_branch(ctx
, op
, 0, 0,
20241 sextract32(ctx
->opcode
<< 2, 0, 28));
20243 /* OPC_LWC2, OPC_SWC2 */
20244 /* COP2: Not implemented. */
20245 generate_exception_err(ctx
, EXCP_CpU
, 2);
20248 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20249 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20250 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20252 /* OPC_BEQZC, OPC_BNEZC */
20253 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20254 sextract32(ctx
->opcode
<< 2, 0, 23));
20256 /* OPC_JIC, OPC_JIALC */
20257 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20260 /* OPC_LWC2, OPC_SWC2 */
20261 /* COP2: Not implemented. */
20262 generate_exception_err(ctx
, EXCP_CpU
, 2);
20266 check_insn(ctx
, INSN_LOONGSON2F
);
20267 /* Note that these instructions use different fields. */
20268 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20273 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20274 check_cp1_enabled(ctx
);
20275 op1
= MASK_CP3(ctx
->opcode
);
20279 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20285 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20286 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20289 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20290 /* Treat as NOP. */
20293 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20307 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20308 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20312 generate_exception_end(ctx
, EXCP_RI
);
20316 generate_exception_err(ctx
, EXCP_CpU
, 1);
20320 #if defined(TARGET_MIPS64)
20321 /* MIPS64 opcodes */
20325 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20329 check_insn(ctx
, ISA_MIPS3
);
20330 check_mips_64(ctx
);
20331 gen_ld(ctx
, op
, rt
, rs
, imm
);
20335 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20338 check_insn(ctx
, ISA_MIPS3
);
20339 check_mips_64(ctx
);
20340 gen_st(ctx
, op
, rt
, rs
, imm
);
20343 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20344 check_insn(ctx
, ISA_MIPS3
);
20345 check_mips_64(ctx
);
20346 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20348 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20349 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20350 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20351 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20354 check_insn(ctx
, ISA_MIPS3
);
20355 check_mips_64(ctx
);
20356 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20360 check_insn(ctx
, ISA_MIPS3
);
20361 check_mips_64(ctx
);
20362 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20365 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20366 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20367 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20369 MIPS_INVAL("major opcode");
20370 generate_exception_end(ctx
, EXCP_RI
);
20374 case OPC_DAUI
: /* OPC_JALX */
20375 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20376 #if defined(TARGET_MIPS64)
20378 check_mips_64(ctx
);
20380 generate_exception(ctx
, EXCP_RI
);
20381 } else if (rt
!= 0) {
20382 TCGv t0
= tcg_temp_new();
20383 gen_load_gpr(t0
, rs
);
20384 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20388 generate_exception_end(ctx
, EXCP_RI
);
20389 MIPS_INVAL("major opcode");
20393 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20394 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20395 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20398 case OPC_MSA
: /* OPC_MDMX */
20399 /* MDMX: Not implemented. */
20403 check_insn(ctx
, ISA_MIPS32R6
);
20404 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
20406 default: /* Invalid */
20407 MIPS_INVAL("major opcode");
20408 generate_exception_end(ctx
, EXCP_RI
);
20413 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
20415 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20416 CPUMIPSState
*env
= cs
->env_ptr
;
20418 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
20419 ctx
->saved_pc
= -1;
20420 ctx
->insn_flags
= env
->insn_flags
;
20421 ctx
->CP0_Config1
= env
->CP0_Config1
;
20423 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20424 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20425 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20426 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20427 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20428 ctx
->PAMask
= env
->PAMask
;
20429 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20430 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20431 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20432 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20433 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20434 /* Restore delay slot state from the tb context. */
20435 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
20436 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20437 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20438 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20439 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20440 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20441 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20442 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20443 restore_cpu_state(env
, ctx
);
20444 #ifdef CONFIG_USER_ONLY
20445 ctx
->mem_idx
= MIPS_HFLAG_UM
;
20447 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
20449 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
20450 MO_UNALN
: MO_ALIGN
;
20452 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
20456 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20460 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20462 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20464 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
20468 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
20469 const CPUBreakpoint
*bp
)
20471 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20473 save_cpu_state(ctx
, 1);
20474 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20475 gen_helper_raise_exception_debug(cpu_env
);
20476 /* The address covered by the breakpoint must be included in
20477 [tb->pc, tb->pc + tb->size) in order to for it to be
20478 properly cleared -- thus we increment the PC here so that
20479 the logic setting tb->size below does the right thing. */
20480 ctx
->base
.pc_next
+= 4;
20484 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
20486 CPUMIPSState
*env
= cs
->env_ptr
;
20487 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20491 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
20492 if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
20493 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
20495 decode_opc(env
, ctx
);
20496 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
20497 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20498 insn_bytes
= decode_micromips_opc(env
, ctx
);
20499 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
20500 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20501 insn_bytes
= decode_mips16_opc(env
, ctx
);
20503 generate_exception_end(ctx
, EXCP_RI
);
20504 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
20508 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
20509 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20510 MIPS_HFLAG_FBNSLOT
))) {
20511 /* force to generate branch as there is neither delay nor
20515 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
20516 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
20517 /* Force to generate branch as microMIPS R6 doesn't restrict
20518 branches in the forbidden slot. */
20523 gen_branch(ctx
, insn_bytes
);
20525 ctx
->base
.pc_next
+= insn_bytes
;
20527 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
20530 /* Execute a branch and its delay slot as a single instruction.
20531 This is what GDB expects and is consistent with what the
20532 hardware does (e.g. if a delay slot instruction faults, the
20533 reported PC is the PC of the branch). */
20534 if (ctx
->base
.singlestep_enabled
&&
20535 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
20536 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20538 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
20539 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20543 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
20545 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20547 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
20548 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
20549 gen_helper_raise_exception_debug(cpu_env
);
20551 switch (ctx
->base
.is_jmp
) {
20553 gen_save_pc(ctx
->base
.pc_next
);
20554 tcg_gen_lookup_and_goto_ptr();
20557 case DISAS_TOO_MANY
:
20558 save_cpu_state(ctx
, 0);
20559 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
20562 tcg_gen_exit_tb(NULL
, 0);
20564 case DISAS_NORETURN
:
20567 g_assert_not_reached();
20572 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
20574 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
20575 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
20578 static const TranslatorOps mips_tr_ops
= {
20579 .init_disas_context
= mips_tr_init_disas_context
,
20580 .tb_start
= mips_tr_tb_start
,
20581 .insn_start
= mips_tr_insn_start
,
20582 .breakpoint_check
= mips_tr_breakpoint_check
,
20583 .translate_insn
= mips_tr_translate_insn
,
20584 .tb_stop
= mips_tr_tb_stop
,
20585 .disas_log
= mips_tr_disas_log
,
20588 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20592 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
20595 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20599 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20601 #define printfpr(fp) \
20604 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20605 " fd:%13g fs:%13g psu: %13g\n", \
20606 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20607 (double)(fp)->fd, \
20608 (double)(fp)->fs[FP_ENDIAN_IDX], \
20609 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20612 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20613 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20614 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20615 " fd:%13g fs:%13g psu:%13g\n", \
20616 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20618 (double)tmp.fs[FP_ENDIAN_IDX], \
20619 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20624 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20625 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20626 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20627 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20628 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20629 printfpr(&env
->active_fpu
.fpr
[i
]);
20635 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20638 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20639 CPUMIPSState
*env
= &cpu
->env
;
20642 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20643 " LO=0x" TARGET_FMT_lx
" ds %04x "
20644 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20645 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20646 env
->hflags
, env
->btarget
, env
->bcond
);
20647 for (i
= 0; i
< 32; i
++) {
20649 cpu_fprintf(f
, "GPR%02d:", i
);
20650 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20652 cpu_fprintf(f
, "\n");
20655 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20656 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20657 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20659 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20660 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20661 env
->CP0_Config2
, env
->CP0_Config3
);
20662 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20663 env
->CP0_Config4
, env
->CP0_Config5
);
20664 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
20665 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20669 void mips_tcg_init(void)
20674 for (i
= 1; i
< 32; i
++)
20675 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20676 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20679 for (i
= 0; i
< 32; i
++) {
20680 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20682 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20683 /* The scalar floating-point unit (FPU) registers are mapped on
20684 * the MSA vector registers. */
20685 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20686 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20687 msa_wr_d
[i
* 2 + 1] =
20688 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20691 cpu_PC
= tcg_global_mem_new(cpu_env
,
20692 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20693 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20694 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20695 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20697 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20698 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20701 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20702 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20704 bcond
= tcg_global_mem_new(cpu_env
,
20705 offsetof(CPUMIPSState
, bcond
), "bcond");
20706 btarget
= tcg_global_mem_new(cpu_env
,
20707 offsetof(CPUMIPSState
, btarget
), "btarget");
20708 hflags
= tcg_global_mem_new_i32(cpu_env
,
20709 offsetof(CPUMIPSState
, hflags
), "hflags");
20711 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20712 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20714 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20715 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20719 #include "translate_init.inc.c"
20721 void cpu_mips_realize_env(CPUMIPSState
*env
)
20723 env
->exception_base
= (int32_t)0xBFC00000;
20725 #ifndef CONFIG_USER_ONLY
20726 mmu_init(env
, env
->cpu_model
);
20728 fpu_init(env
, env
->cpu_model
);
20729 mvp_init(env
, env
->cpu_model
);
20732 bool cpu_supports_cps_smp(const char *cpu_type
)
20734 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20735 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20738 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
20740 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20741 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
20744 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20746 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20747 vp
->env
.exception_base
= address
;
20750 void cpu_state_reset(CPUMIPSState
*env
)
20752 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20753 CPUState
*cs
= CPU(cpu
);
20755 /* Reset registers to their default values */
20756 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20757 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20758 #ifdef TARGET_WORDS_BIGENDIAN
20759 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20761 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20762 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20763 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20764 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20765 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20766 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20767 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20768 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20769 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20770 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20771 << env
->cpu_model
->CP0_LLAddr_shift
;
20772 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20773 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20774 env
->CCRes
= env
->cpu_model
->CCRes
;
20775 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20776 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20777 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20778 env
->current_tc
= 0;
20779 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20780 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20781 #if defined(TARGET_MIPS64)
20782 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20783 env
->SEGMask
|= 3ULL << 62;
20786 env
->PABITS
= env
->cpu_model
->PABITS
;
20787 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20788 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20789 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20790 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20791 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20792 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20793 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20794 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20795 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20796 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20797 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20798 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20799 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20800 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20801 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20802 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20803 env
->msair
= env
->cpu_model
->MSAIR
;
20804 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20806 #if defined(CONFIG_USER_ONLY)
20807 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20808 # ifdef TARGET_MIPS64
20809 /* Enable 64-bit register mode. */
20810 env
->CP0_Status
|= (1 << CP0St_PX
);
20812 # ifdef TARGET_ABI_MIPSN64
20813 /* Enable 64-bit address mode. */
20814 env
->CP0_Status
|= (1 << CP0St_UX
);
20816 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20817 hardware registers. */
20818 env
->CP0_HWREna
|= 0x0000000F;
20819 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20820 env
->CP0_Status
|= (1 << CP0St_CU1
);
20822 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20823 env
->CP0_Status
|= (1 << CP0St_MX
);
20825 # if defined(TARGET_MIPS64)
20826 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20827 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20828 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20829 env
->CP0_Status
|= (1 << CP0St_FR
);
20833 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20834 /* If the exception was raised from a delay slot,
20835 come back to the jump. */
20836 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20837 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20839 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20841 env
->active_tc
.PC
= env
->exception_base
;
20842 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20843 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20844 env
->CP0_Wired
= 0;
20845 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20846 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20847 if (mips_um_ksegs_enabled()) {
20848 env
->CP0_EBase
|= 0x40000000;
20850 env
->CP0_EBase
|= (int32_t)0x80000000;
20852 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20853 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20855 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20857 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20858 /* vectored interrupts not implemented, timer on int 7,
20859 no performance counters. */
20860 env
->CP0_IntCtl
= 0xe0000000;
20864 for (i
= 0; i
< 7; i
++) {
20865 env
->CP0_WatchLo
[i
] = 0;
20866 env
->CP0_WatchHi
[i
] = 0x80000000;
20868 env
->CP0_WatchLo
[7] = 0;
20869 env
->CP0_WatchHi
[7] = 0;
20871 /* Count register increments in debug mode, EJTAG version 1 */
20872 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20874 cpu_mips_store_count(env
, 1);
20876 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20879 /* Only TC0 on VPE 0 starts as active. */
20880 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20881 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20882 env
->tcs
[i
].CP0_TCHalt
= 1;
20884 env
->active_tc
.CP0_TCHalt
= 1;
20887 if (cs
->cpu_index
== 0) {
20888 /* VPE0 starts up enabled. */
20889 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20890 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20892 /* TC0 starts up unhalted. */
20894 env
->active_tc
.CP0_TCHalt
= 0;
20895 env
->tcs
[0].CP0_TCHalt
= 0;
20896 /* With thread 0 active. */
20897 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20898 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20903 * Configure default legacy segmentation control. We use this regardless of
20904 * whether segmentation control is presented to the guest.
20906 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20907 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20908 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20909 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20910 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20911 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20913 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20914 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20915 (3 << CP0SC_C
)) << 16;
20916 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20917 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20918 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20919 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20920 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20921 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20922 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20923 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20925 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20926 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20927 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20928 env
->CP0_Status
|= (1 << CP0St_FR
);
20931 if (env
->CP0_Config3
& (1 << CP0C3_ISA
)) {
20932 /* microMIPS on reset when Config3.ISA == {1, 3} */
20933 env
->hflags
|= MIPS_HFLAG_M16
;
20937 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20941 compute_hflags(env
);
20942 restore_fp_status(env
);
20943 restore_pamask(env
);
20944 cs
->exception_index
= EXCP_NONE
;
20946 if (semihosting_get_argc()) {
20947 /* UHI interface can be used to obtain argc and argv */
20948 env
->active_tc
.gpr
[4] = -1;
20952 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20953 target_ulong
*data
)
20955 env
->active_tc
.PC
= data
[0];
20956 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20957 env
->hflags
|= data
[1];
20958 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20959 case MIPS_HFLAG_BR
:
20961 case MIPS_HFLAG_BC
:
20962 case MIPS_HFLAG_BL
:
20964 env
->btarget
= data
[2];