2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
906 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
907 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
908 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
909 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
910 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
911 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
912 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
913 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
914 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
915 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
916 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
917 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
918 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
919 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
927 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
928 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
929 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
930 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
931 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
932 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
933 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR
= 0x01 | OPC_C0
,
941 OPC_TLBWI
= 0x02 | OPC_C0
,
942 OPC_TLBINV
= 0x03 | OPC_C0
,
943 OPC_TLBINVF
= 0x04 | OPC_C0
,
944 OPC_TLBWR
= 0x06 | OPC_C0
,
945 OPC_TLBP
= 0x08 | OPC_C0
,
946 OPC_RFE
= 0x10 | OPC_C0
,
947 OPC_ERET
= 0x18 | OPC_C0
,
948 OPC_DERET
= 0x1F | OPC_C0
,
949 OPC_WAIT
= 0x20 | OPC_C0
,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S
= 16, /* single fp */
959 FMT_D
= 17, /* double fp */
960 FMT_E
= 18, /* extended fp */
961 FMT_Q
= 19, /* quad fp */
962 FMT_W
= 20, /* 32-bit fixed */
963 FMT_L
= 21, /* 64-bit fixed */
964 FMT_PS
= 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
970 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
971 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
972 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
973 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
974 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
975 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
976 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
977 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
978 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
979 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
980 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
981 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
982 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
983 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
984 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
985 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
986 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
987 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
988 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
989 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
991 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
992 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
993 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
994 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
995 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
996 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
997 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
998 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1006 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1007 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1008 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1012 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1013 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1017 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1018 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1025 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1026 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1027 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1028 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1029 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1030 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1031 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1032 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1033 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1034 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1041 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1042 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1043 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1044 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1045 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1046 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1047 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1050 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1051 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1052 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1053 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1054 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1055 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1056 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1059 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1060 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1061 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1062 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1063 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1064 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1065 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1068 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1069 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1070 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1071 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1072 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1073 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1074 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1077 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1078 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1079 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1080 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1081 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1083 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1084 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1085 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1086 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1087 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1088 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1090 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1091 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1092 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1093 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1094 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1095 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1097 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1098 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1099 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1100 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1101 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1102 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1104 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1105 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1106 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1107 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1108 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1109 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1111 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1112 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1113 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1114 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1115 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1116 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1118 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1119 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1120 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1121 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1122 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1123 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1125 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1126 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1127 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1128 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1129 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1130 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1
= 0x00 | OPC_CP3
,
1138 OPC_LDXC1
= 0x01 | OPC_CP3
,
1139 OPC_LUXC1
= 0x05 | OPC_CP3
,
1140 OPC_SWXC1
= 0x08 | OPC_CP3
,
1141 OPC_SDXC1
= 0x09 | OPC_CP3
,
1142 OPC_SUXC1
= 0x0D | OPC_CP3
,
1143 OPC_PREFX
= 0x0F | OPC_CP3
,
1144 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1145 OPC_MADD_S
= 0x20 | OPC_CP3
,
1146 OPC_MADD_D
= 0x21 | OPC_CP3
,
1147 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1148 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1149 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1150 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1151 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1152 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1153 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1154 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1155 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1156 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1163 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1164 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1165 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1166 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1167 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1168 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1169 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1170 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1171 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1172 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1173 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1174 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1175 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1176 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1177 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1178 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1179 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1180 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1181 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1182 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1184 /* MI10 instruction */
1185 OPC_LD_B
= (0x20) | OPC_MSA
,
1186 OPC_LD_H
= (0x21) | OPC_MSA
,
1187 OPC_LD_W
= (0x22) | OPC_MSA
,
1188 OPC_LD_D
= (0x23) | OPC_MSA
,
1189 OPC_ST_B
= (0x24) | OPC_MSA
,
1190 OPC_ST_H
= (0x25) | OPC_MSA
,
1191 OPC_ST_W
= (0x26) | OPC_MSA
,
1192 OPC_ST_D
= (0x27) | OPC_MSA
,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1198 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1199 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1200 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1201 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1202 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1203 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1204 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1205 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1206 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1207 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1208 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1210 /* I8 instruction */
1211 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1212 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1213 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1214 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1215 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1216 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1217 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1218 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1219 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1220 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1224 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1225 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1226 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1227 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1228 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1229 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1231 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1232 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1236 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1237 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1238 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1242 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1243 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1244 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1245 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1246 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1247 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1248 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1249 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1250 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1251 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1252 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1253 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1254 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1255 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1256 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1260 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1262 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1263 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1264 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1265 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1266 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1267 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1268 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1269 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1270 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1272 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1274 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1275 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1278 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1279 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1281 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1282 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1283 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1285 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1290 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1292 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1293 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1294 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1295 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1296 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1297 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1298 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1299 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1300 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1301 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1310 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1312 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1313 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1314 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1315 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1316 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1317 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1318 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1319 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1320 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1321 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1325 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1326 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1327 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1328 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1329 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1330 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1331 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1332 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1339 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1343 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1358 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1362 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1367 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1370 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1373 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1379 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1380 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1381 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1382 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1383 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1384 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1385 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1386 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1387 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1388 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1389 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1392 /* global register indices */
1393 static TCGv cpu_gpr
[32], cpu_PC
;
1394 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1395 static TCGv cpu_dspctrl
, btarget
, bcond
;
1396 static TCGv_i32 hflags
;
1397 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1398 static TCGv_i64 fpu_f64
[32];
1399 static TCGv_i64 msa_wr_d
[64];
1401 #include "exec/gen-icount.h"
1403 #define gen_helper_0e0i(name, arg) do { \
1404 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1405 gen_helper_##name(cpu_env, helper_tmp); \
1406 tcg_temp_free_i32(helper_tmp); \
1409 #define gen_helper_0e1i(name, arg1, arg2) do { \
1410 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1411 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1412 tcg_temp_free_i32(helper_tmp); \
1415 #define gen_helper_1e0i(name, ret, arg1) do { \
1416 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1417 gen_helper_##name(ret, cpu_env, helper_tmp); \
1418 tcg_temp_free_i32(helper_tmp); \
1421 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1422 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1423 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1424 tcg_temp_free_i32(helper_tmp); \
1427 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1428 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1429 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1430 tcg_temp_free_i32(helper_tmp); \
1433 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1434 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1435 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1436 tcg_temp_free_i32(helper_tmp); \
1439 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1440 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1441 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1442 tcg_temp_free_i32(helper_tmp); \
1445 typedef struct DisasContext
{
1446 DisasContextBase base
;
1447 target_ulong saved_pc
;
1448 target_ulong page_start
;
1451 int32_t CP0_Config1
;
1452 int32_t CP0_Config3
;
1453 int32_t CP0_Config5
;
1454 /* Routine used to access memory */
1456 TCGMemOp default_tcg_memop_mask
;
1457 uint32_t hflags
, saved_hflags
;
1458 target_ulong btarget
;
1469 int CP0_LLAddr_shift
;
1478 #define DISAS_STOP DISAS_TARGET_0
1479 #define DISAS_EXIT DISAS_TARGET_1
1481 static const char * const regnames
[] = {
1482 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1483 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1484 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1485 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1488 static const char * const regnames_HI
[] = {
1489 "HI0", "HI1", "HI2", "HI3",
1492 static const char * const regnames_LO
[] = {
1493 "LO0", "LO1", "LO2", "LO3",
1496 static const char * const fregnames
[] = {
1497 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1498 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1499 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1500 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1503 static const char * const msaregnames
[] = {
1504 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1505 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1506 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1507 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1508 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1509 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1510 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1511 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1512 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1513 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1514 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1515 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1516 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1517 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1518 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1519 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1522 #define LOG_DISAS(...) \
1524 if (MIPS_DEBUG_DISAS) { \
1525 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1529 #define MIPS_INVAL(op) \
1531 if (MIPS_DEBUG_DISAS) { \
1532 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1533 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1534 ctx->base.pc_next, ctx->opcode, op, \
1535 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1536 ((ctx->opcode >> 16) & 0x1F)); \
1540 /* General purpose registers moves. */
1541 static inline void gen_load_gpr (TCGv t
, int reg
)
1544 tcg_gen_movi_tl(t
, 0);
1546 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1549 static inline void gen_store_gpr (TCGv t
, int reg
)
1552 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1555 /* Moves to/from shadow registers. */
1556 static inline void gen_load_srsgpr (int from
, int to
)
1558 TCGv t0
= tcg_temp_new();
1561 tcg_gen_movi_tl(t0
, 0);
1563 TCGv_i32 t2
= tcg_temp_new_i32();
1564 TCGv_ptr addr
= tcg_temp_new_ptr();
1566 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1567 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1568 tcg_gen_andi_i32(t2
, t2
, 0xf);
1569 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1570 tcg_gen_ext_i32_ptr(addr
, t2
);
1571 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1573 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1574 tcg_temp_free_ptr(addr
);
1575 tcg_temp_free_i32(t2
);
1577 gen_store_gpr(t0
, to
);
1581 static inline void gen_store_srsgpr (int from
, int to
)
1584 TCGv t0
= tcg_temp_new();
1585 TCGv_i32 t2
= tcg_temp_new_i32();
1586 TCGv_ptr addr
= tcg_temp_new_ptr();
1588 gen_load_gpr(t0
, from
);
1589 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1590 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1591 tcg_gen_andi_i32(t2
, t2
, 0xf);
1592 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1593 tcg_gen_ext_i32_ptr(addr
, t2
);
1594 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1596 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1597 tcg_temp_free_ptr(addr
);
1598 tcg_temp_free_i32(t2
);
1604 static inline void gen_save_pc(target_ulong pc
)
1606 tcg_gen_movi_tl(cpu_PC
, pc
);
1609 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1611 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1612 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1613 gen_save_pc(ctx
->base
.pc_next
);
1614 ctx
->saved_pc
= ctx
->base
.pc_next
;
1616 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1617 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1618 ctx
->saved_hflags
= ctx
->hflags
;
1619 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1625 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1631 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1633 ctx
->saved_hflags
= ctx
->hflags
;
1634 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1640 ctx
->btarget
= env
->btarget
;
1645 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1647 TCGv_i32 texcp
= tcg_const_i32(excp
);
1648 TCGv_i32 terr
= tcg_const_i32(err
);
1649 save_cpu_state(ctx
, 1);
1650 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1651 tcg_temp_free_i32(terr
);
1652 tcg_temp_free_i32(texcp
);
1653 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1656 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1658 gen_helper_0e0i(raise_exception
, excp
);
1661 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1663 generate_exception_err(ctx
, excp
, 0);
1666 /* Floating point register moves. */
1667 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1669 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1670 generate_exception(ctx
, EXCP_RI
);
1672 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1675 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1679 generate_exception(ctx
, EXCP_RI
);
1681 t64
= tcg_temp_new_i64();
1682 tcg_gen_extu_i32_i64(t64
, t
);
1683 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1684 tcg_temp_free_i64(t64
);
1687 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1689 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1690 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1692 gen_load_fpr32(ctx
, t
, reg
| 1);
1696 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1698 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1699 TCGv_i64 t64
= tcg_temp_new_i64();
1700 tcg_gen_extu_i32_i64(t64
, t
);
1701 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1702 tcg_temp_free_i64(t64
);
1704 gen_store_fpr32(ctx
, t
, reg
| 1);
1708 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1710 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1711 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1713 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1717 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1719 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1720 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1723 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1724 t0
= tcg_temp_new_i64();
1725 tcg_gen_shri_i64(t0
, t
, 32);
1726 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1727 tcg_temp_free_i64(t0
);
1731 static inline int get_fp_bit (int cc
)
1739 /* Addresses computation */
1740 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1742 tcg_gen_add_tl(ret
, arg0
, arg1
);
1744 #if defined(TARGET_MIPS64)
1745 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1746 tcg_gen_ext32s_i64(ret
, ret
);
1751 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
1754 tcg_gen_addi_tl(ret
, base
, ofs
);
1756 #if defined(TARGET_MIPS64)
1757 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1758 tcg_gen_ext32s_i64(ret
, ret
);
1763 /* Addresses computation (translation time) */
1764 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1767 target_long sum
= base
+ offset
;
1769 #if defined(TARGET_MIPS64)
1770 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1777 /* Sign-extract the low 32-bits to a target_long. */
1778 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1780 #if defined(TARGET_MIPS64)
1781 tcg_gen_ext32s_i64(ret
, arg
);
1783 tcg_gen_extrl_i64_i32(ret
, arg
);
1787 /* Sign-extract the high 32-bits to a target_long. */
1788 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1790 #if defined(TARGET_MIPS64)
1791 tcg_gen_sari_i64(ret
, arg
, 32);
1793 tcg_gen_extrh_i64_i32(ret
, arg
);
1797 static inline void check_cp0_enabled(DisasContext
*ctx
)
1799 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1800 generate_exception_err(ctx
, EXCP_CpU
, 0);
1803 static inline void check_cp1_enabled(DisasContext
*ctx
)
1805 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1806 generate_exception_err(ctx
, EXCP_CpU
, 1);
1809 /* Verify that the processor is running with COP1X instructions enabled.
1810 This is associated with the nabla symbol in the MIPS32 and MIPS64
1813 static inline void check_cop1x(DisasContext
*ctx
)
1815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1816 generate_exception_end(ctx
, EXCP_RI
);
1819 /* Verify that the processor is running with 64-bit floating-point
1820 operations enabled. */
1822 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1824 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1825 generate_exception_end(ctx
, EXCP_RI
);
1829 * Verify if floating point register is valid; an operation is not defined
1830 * if bit 0 of any register specification is set and the FR bit in the
1831 * Status register equals zero, since the register numbers specify an
1832 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1833 * in the Status register equals one, both even and odd register numbers
1834 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1836 * Multiple 64 bit wide registers can be checked by calling
1837 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1839 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1841 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1842 generate_exception_end(ctx
, EXCP_RI
);
1845 /* Verify that the processor is running with DSP instructions enabled.
1846 This is enabled by CP0 Status register MX(24) bit.
1849 static inline void check_dsp(DisasContext
*ctx
)
1851 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1852 if (ctx
->insn_flags
& ASE_DSP
) {
1853 generate_exception_end(ctx
, EXCP_DSPDIS
);
1855 generate_exception_end(ctx
, EXCP_RI
);
1860 static inline void check_dspr2(DisasContext
*ctx
)
1862 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1863 if (ctx
->insn_flags
& ASE_DSP
) {
1864 generate_exception_end(ctx
, EXCP_DSPDIS
);
1866 generate_exception_end(ctx
, EXCP_RI
);
1871 /* This code generates a "reserved instruction" exception if the
1872 CPU does not support the instruction set corresponding to flags. */
1873 static inline void check_insn(DisasContext
*ctx
, int flags
)
1875 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1876 generate_exception_end(ctx
, EXCP_RI
);
1880 /* This code generates a "reserved instruction" exception if the
1881 CPU has corresponding flag set which indicates that the instruction
1882 has been removed. */
1883 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1885 if (unlikely(ctx
->insn_flags
& flags
)) {
1886 generate_exception_end(ctx
, EXCP_RI
);
1890 /* This code generates a "reserved instruction" exception if the
1891 CPU does not support 64-bit paired-single (PS) floating point data type */
1892 static inline void check_ps(DisasContext
*ctx
)
1894 if (unlikely(!ctx
->ps
)) {
1895 generate_exception(ctx
, EXCP_RI
);
1897 check_cp1_64bitmode(ctx
);
1900 #ifdef TARGET_MIPS64
1901 /* This code generates a "reserved instruction" exception if 64-bit
1902 instructions are not enabled. */
1903 static inline void check_mips_64(DisasContext
*ctx
)
1905 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1906 generate_exception_end(ctx
, EXCP_RI
);
1910 #ifndef CONFIG_USER_ONLY
1911 static inline void check_mvh(DisasContext
*ctx
)
1913 if (unlikely(!ctx
->mvh
)) {
1914 generate_exception(ctx
, EXCP_RI
);
1920 * This code generates a "reserved instruction" exception if the
1921 * Config5 XNP bit is set.
1923 static inline void check_xnp(DisasContext
*ctx
)
1925 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
1926 generate_exception_end(ctx
, EXCP_RI
);
1931 * This code generates a "reserved instruction" exception if the
1932 * Config3 MT bit is NOT set.
1934 static inline void check_mt(DisasContext
*ctx
)
1936 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1937 generate_exception_end(ctx
, EXCP_RI
);
1941 #ifndef CONFIG_USER_ONLY
1943 * This code generates a "coprocessor unusable" exception if CP0 is not
1944 * available, and, if that is not the case, generates a "reserved instruction"
1945 * exception if the Config5 MT bit is NOT set. This is needed for availability
1946 * control of some of MT ASE instructions.
1948 static inline void check_cp0_mt(DisasContext
*ctx
)
1950 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1951 generate_exception_err(ctx
, EXCP_CpU
, 0);
1953 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1954 generate_exception_err(ctx
, EXCP_RI
, 0);
1961 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1962 calling interface for 32 and 64-bit FPRs. No sense in changing
1963 all callers for gen_load_fpr32 when we need the CTX parameter for
1965 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1966 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1967 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1968 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1969 int ft, int fs, int cc) \
1971 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1972 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1981 check_cp1_registers(ctx, fs | ft); \
1989 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1990 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1992 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1993 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1994 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1995 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1996 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1997 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1998 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1999 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
2000 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
2001 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
2002 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
2003 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
2004 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
2005 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
2006 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
2007 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
2010 tcg_temp_free_i##bits (fp0); \
2011 tcg_temp_free_i##bits (fp1); \
2014 FOP_CONDS(, 0, d
, FMT_D
, 64)
2015 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
2016 FOP_CONDS(, 0, s
, FMT_S
, 32)
2017 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
2018 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
2019 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
2022 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
2023 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
2024 int ft, int fs, int fd) \
2026 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
2027 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
2028 if (ifmt == FMT_D) { \
2029 check_cp1_registers(ctx, fs | ft | fd); \
2031 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
2032 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
2035 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
2038 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
2041 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
2044 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
2047 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
2050 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
2053 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2056 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2059 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2062 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2065 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2068 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2071 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2074 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2077 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2080 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2083 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2086 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2089 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2092 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2095 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2098 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2104 tcg_temp_free_i ## bits (fp0); \
2105 tcg_temp_free_i ## bits (fp1); \
2108 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2109 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2111 #undef gen_ldcmp_fpr32
2112 #undef gen_ldcmp_fpr64
2114 /* load/store instructions. */
2115 #ifdef CONFIG_USER_ONLY
2116 #define OP_LD_ATOMIC(insn,fname) \
2117 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2118 DisasContext *ctx) \
2120 TCGv t0 = tcg_temp_new(); \
2121 tcg_gen_mov_tl(t0, arg1); \
2122 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2123 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2124 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2125 tcg_temp_free(t0); \
2128 #define OP_LD_ATOMIC(insn,fname) \
2129 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2130 DisasContext *ctx) \
2132 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2135 OP_LD_ATOMIC(ll
,ld32s
);
2136 #if defined(TARGET_MIPS64)
2137 OP_LD_ATOMIC(lld
,ld64
);
2141 #ifdef CONFIG_USER_ONLY
2142 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2143 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2144 DisasContext *ctx) \
2146 TCGv t0 = tcg_temp_new(); \
2147 TCGLabel *l1 = gen_new_label(); \
2148 TCGLabel *l2 = gen_new_label(); \
2150 tcg_gen_andi_tl(t0, arg2, almask); \
2151 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2152 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2153 generate_exception(ctx, EXCP_AdES); \
2154 gen_set_label(l1); \
2155 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2156 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2157 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2158 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2159 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2160 generate_exception_end(ctx, EXCP_SC); \
2161 gen_set_label(l2); \
2162 tcg_gen_movi_tl(t0, 0); \
2163 gen_store_gpr(t0, rt); \
2164 tcg_temp_free(t0); \
2167 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2168 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2169 DisasContext *ctx) \
2171 TCGv t0 = tcg_temp_new(); \
2172 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2173 gen_store_gpr(t0, rt); \
2174 tcg_temp_free(t0); \
2177 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2178 #if defined(TARGET_MIPS64)
2179 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2183 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2184 int base
, int offset
)
2187 tcg_gen_movi_tl(addr
, offset
);
2188 } else if (offset
== 0) {
2189 gen_load_gpr(addr
, base
);
2191 tcg_gen_movi_tl(addr
, offset
);
2192 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2196 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2198 target_ulong pc
= ctx
->base
.pc_next
;
2200 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2201 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2206 pc
&= ~(target_ulong
)3;
2211 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2212 int rt
, int base
, int offset
)
2215 int mem_idx
= ctx
->mem_idx
;
2217 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2218 /* Loongson CPU uses a load to zero register for prefetch.
2219 We emulate it as a NOP. On other CPU we must perform the
2220 actual memory access. */
2224 t0
= tcg_temp_new();
2225 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2228 #if defined(TARGET_MIPS64)
2230 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2231 ctx
->default_tcg_memop_mask
);
2232 gen_store_gpr(t0
, rt
);
2235 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2236 ctx
->default_tcg_memop_mask
);
2237 gen_store_gpr(t0
, rt
);
2241 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2242 gen_store_gpr(t0
, rt
);
2245 t1
= tcg_temp_new();
2246 /* Do a byte access to possibly trigger a page
2247 fault with the unaligned address. */
2248 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2249 tcg_gen_andi_tl(t1
, t0
, 7);
2250 #ifndef TARGET_WORDS_BIGENDIAN
2251 tcg_gen_xori_tl(t1
, t1
, 7);
2253 tcg_gen_shli_tl(t1
, t1
, 3);
2254 tcg_gen_andi_tl(t0
, t0
, ~7);
2255 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2256 tcg_gen_shl_tl(t0
, t0
, t1
);
2257 t2
= tcg_const_tl(-1);
2258 tcg_gen_shl_tl(t2
, t2
, t1
);
2259 gen_load_gpr(t1
, rt
);
2260 tcg_gen_andc_tl(t1
, t1
, t2
);
2262 tcg_gen_or_tl(t0
, t0
, t1
);
2264 gen_store_gpr(t0
, rt
);
2267 t1
= tcg_temp_new();
2268 /* Do a byte access to possibly trigger a page
2269 fault with the unaligned address. */
2270 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2271 tcg_gen_andi_tl(t1
, t0
, 7);
2272 #ifdef TARGET_WORDS_BIGENDIAN
2273 tcg_gen_xori_tl(t1
, t1
, 7);
2275 tcg_gen_shli_tl(t1
, t1
, 3);
2276 tcg_gen_andi_tl(t0
, t0
, ~7);
2277 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2278 tcg_gen_shr_tl(t0
, t0
, t1
);
2279 tcg_gen_xori_tl(t1
, t1
, 63);
2280 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2281 tcg_gen_shl_tl(t2
, t2
, t1
);
2282 gen_load_gpr(t1
, rt
);
2283 tcg_gen_and_tl(t1
, t1
, t2
);
2285 tcg_gen_or_tl(t0
, t0
, t1
);
2287 gen_store_gpr(t0
, rt
);
2290 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2291 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2293 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2294 gen_store_gpr(t0
, rt
);
2298 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2299 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2301 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2302 gen_store_gpr(t0
, rt
);
2305 mem_idx
= MIPS_HFLAG_UM
;
2308 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2309 ctx
->default_tcg_memop_mask
);
2310 gen_store_gpr(t0
, rt
);
2313 mem_idx
= MIPS_HFLAG_UM
;
2316 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2317 ctx
->default_tcg_memop_mask
);
2318 gen_store_gpr(t0
, rt
);
2321 mem_idx
= MIPS_HFLAG_UM
;
2324 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2325 ctx
->default_tcg_memop_mask
);
2326 gen_store_gpr(t0
, rt
);
2329 mem_idx
= MIPS_HFLAG_UM
;
2332 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2333 gen_store_gpr(t0
, rt
);
2336 mem_idx
= MIPS_HFLAG_UM
;
2339 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2340 gen_store_gpr(t0
, rt
);
2343 mem_idx
= MIPS_HFLAG_UM
;
2346 t1
= tcg_temp_new();
2347 /* Do a byte access to possibly trigger a page
2348 fault with the unaligned address. */
2349 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2350 tcg_gen_andi_tl(t1
, t0
, 3);
2351 #ifndef TARGET_WORDS_BIGENDIAN
2352 tcg_gen_xori_tl(t1
, t1
, 3);
2354 tcg_gen_shli_tl(t1
, t1
, 3);
2355 tcg_gen_andi_tl(t0
, t0
, ~3);
2356 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2357 tcg_gen_shl_tl(t0
, t0
, t1
);
2358 t2
= tcg_const_tl(-1);
2359 tcg_gen_shl_tl(t2
, t2
, t1
);
2360 gen_load_gpr(t1
, rt
);
2361 tcg_gen_andc_tl(t1
, t1
, t2
);
2363 tcg_gen_or_tl(t0
, t0
, t1
);
2365 tcg_gen_ext32s_tl(t0
, t0
);
2366 gen_store_gpr(t0
, rt
);
2369 mem_idx
= MIPS_HFLAG_UM
;
2372 t1
= tcg_temp_new();
2373 /* Do a byte access to possibly trigger a page
2374 fault with the unaligned address. */
2375 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2376 tcg_gen_andi_tl(t1
, t0
, 3);
2377 #ifdef TARGET_WORDS_BIGENDIAN
2378 tcg_gen_xori_tl(t1
, t1
, 3);
2380 tcg_gen_shli_tl(t1
, t1
, 3);
2381 tcg_gen_andi_tl(t0
, t0
, ~3);
2382 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2383 tcg_gen_shr_tl(t0
, t0
, t1
);
2384 tcg_gen_xori_tl(t1
, t1
, 31);
2385 t2
= tcg_const_tl(0xfffffffeull
);
2386 tcg_gen_shl_tl(t2
, t2
, t1
);
2387 gen_load_gpr(t1
, rt
);
2388 tcg_gen_and_tl(t1
, t1
, t2
);
2390 tcg_gen_or_tl(t0
, t0
, t1
);
2392 tcg_gen_ext32s_tl(t0
, t0
);
2393 gen_store_gpr(t0
, rt
);
2396 mem_idx
= MIPS_HFLAG_UM
;
2400 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2401 gen_store_gpr(t0
, rt
);
2407 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
2408 uint32_t reg1
, uint32_t reg2
)
2410 TCGv taddr
= tcg_temp_new();
2411 TCGv_i64 tval
= tcg_temp_new_i64();
2412 TCGv tmp1
= tcg_temp_new();
2413 TCGv tmp2
= tcg_temp_new();
2415 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
2416 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
2417 #ifdef TARGET_WORDS_BIGENDIAN
2418 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
2420 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
2422 gen_store_gpr(tmp1
, reg1
);
2423 tcg_temp_free(tmp1
);
2424 gen_store_gpr(tmp2
, reg2
);
2425 tcg_temp_free(tmp2
);
2426 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
2427 tcg_temp_free_i64(tval
);
2428 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2429 tcg_temp_free(taddr
);
2433 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2434 int base
, int offset
)
2436 TCGv t0
= tcg_temp_new();
2437 TCGv t1
= tcg_temp_new();
2438 int mem_idx
= ctx
->mem_idx
;
2440 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2441 gen_load_gpr(t1
, rt
);
2443 #if defined(TARGET_MIPS64)
2445 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2446 ctx
->default_tcg_memop_mask
);
2449 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2452 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2456 mem_idx
= MIPS_HFLAG_UM
;
2459 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2460 ctx
->default_tcg_memop_mask
);
2463 mem_idx
= MIPS_HFLAG_UM
;
2466 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2467 ctx
->default_tcg_memop_mask
);
2470 mem_idx
= MIPS_HFLAG_UM
;
2473 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2476 mem_idx
= MIPS_HFLAG_UM
;
2479 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2482 mem_idx
= MIPS_HFLAG_UM
;
2485 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2493 /* Store conditional */
2494 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2495 int base
, int16_t offset
)
2498 int mem_idx
= ctx
->mem_idx
;
2500 #ifdef CONFIG_USER_ONLY
2501 t0
= tcg_temp_local_new();
2502 t1
= tcg_temp_local_new();
2504 t0
= tcg_temp_new();
2505 t1
= tcg_temp_new();
2507 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2508 gen_load_gpr(t1
, rt
);
2510 #if defined(TARGET_MIPS64)
2513 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2517 mem_idx
= MIPS_HFLAG_UM
;
2521 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2528 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
2529 uint32_t reg1
, uint32_t reg2
)
2531 TCGv taddr
= tcg_temp_local_new();
2532 TCGv lladdr
= tcg_temp_local_new();
2533 TCGv_i64 tval
= tcg_temp_new_i64();
2534 TCGv_i64 llval
= tcg_temp_new_i64();
2535 TCGv_i64 val
= tcg_temp_new_i64();
2536 TCGv tmp1
= tcg_temp_new();
2537 TCGv tmp2
= tcg_temp_new();
2538 TCGLabel
*lab_fail
= gen_new_label();
2539 TCGLabel
*lab_done
= gen_new_label();
2541 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
2543 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2544 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
2546 gen_load_gpr(tmp1
, reg1
);
2547 gen_load_gpr(tmp2
, reg2
);
2549 #ifdef TARGET_WORDS_BIGENDIAN
2550 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
2552 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
2555 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
2556 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
2557 ctx
->mem_idx
, MO_64
);
2559 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
2561 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
2563 gen_set_label(lab_fail
);
2566 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
2568 gen_set_label(lab_done
);
2569 tcg_gen_movi_tl(lladdr
, -1);
2570 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
2573 /* Load and store */
2574 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2577 /* Don't do NOP if destination is zero: we must perform the actual
2582 TCGv_i32 fp0
= tcg_temp_new_i32();
2583 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2584 ctx
->default_tcg_memop_mask
);
2585 gen_store_fpr32(ctx
, fp0
, ft
);
2586 tcg_temp_free_i32(fp0
);
2591 TCGv_i32 fp0
= tcg_temp_new_i32();
2592 gen_load_fpr32(ctx
, fp0
, ft
);
2593 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2594 ctx
->default_tcg_memop_mask
);
2595 tcg_temp_free_i32(fp0
);
2600 TCGv_i64 fp0
= tcg_temp_new_i64();
2601 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2602 ctx
->default_tcg_memop_mask
);
2603 gen_store_fpr64(ctx
, fp0
, ft
);
2604 tcg_temp_free_i64(fp0
);
2609 TCGv_i64 fp0
= tcg_temp_new_i64();
2610 gen_load_fpr64(ctx
, fp0
, ft
);
2611 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2612 ctx
->default_tcg_memop_mask
);
2613 tcg_temp_free_i64(fp0
);
2617 MIPS_INVAL("flt_ldst");
2618 generate_exception_end(ctx
, EXCP_RI
);
2623 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2624 int rs
, int16_t imm
)
2626 TCGv t0
= tcg_temp_new();
2628 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2629 check_cp1_enabled(ctx
);
2633 check_insn(ctx
, ISA_MIPS2
);
2636 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2637 gen_flt_ldst(ctx
, op
, rt
, t0
);
2640 generate_exception_err(ctx
, EXCP_CpU
, 1);
2645 /* Arithmetic with immediate operand */
2646 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2647 int rt
, int rs
, int imm
)
2649 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2651 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2652 /* If no destination, treat it as a NOP.
2653 For addi, we must generate the overflow exception when needed. */
2659 TCGv t0
= tcg_temp_local_new();
2660 TCGv t1
= tcg_temp_new();
2661 TCGv t2
= tcg_temp_new();
2662 TCGLabel
*l1
= gen_new_label();
2664 gen_load_gpr(t1
, rs
);
2665 tcg_gen_addi_tl(t0
, t1
, uimm
);
2666 tcg_gen_ext32s_tl(t0
, t0
);
2668 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2669 tcg_gen_xori_tl(t2
, t0
, uimm
);
2670 tcg_gen_and_tl(t1
, t1
, t2
);
2672 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2674 /* operands of same sign, result different sign */
2675 generate_exception(ctx
, EXCP_OVERFLOW
);
2677 tcg_gen_ext32s_tl(t0
, t0
);
2678 gen_store_gpr(t0
, rt
);
2684 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2685 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2687 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2690 #if defined(TARGET_MIPS64)
2693 TCGv t0
= tcg_temp_local_new();
2694 TCGv t1
= tcg_temp_new();
2695 TCGv t2
= tcg_temp_new();
2696 TCGLabel
*l1
= gen_new_label();
2698 gen_load_gpr(t1
, rs
);
2699 tcg_gen_addi_tl(t0
, t1
, uimm
);
2701 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2702 tcg_gen_xori_tl(t2
, t0
, uimm
);
2703 tcg_gen_and_tl(t1
, t1
, t2
);
2705 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2707 /* operands of same sign, result different sign */
2708 generate_exception(ctx
, EXCP_OVERFLOW
);
2710 gen_store_gpr(t0
, rt
);
2716 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2718 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2725 /* Logic with immediate operand */
2726 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2727 int rt
, int rs
, int16_t imm
)
2732 /* If no destination, treat it as a NOP. */
2735 uimm
= (uint16_t)imm
;
2738 if (likely(rs
!= 0))
2739 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2741 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2745 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2747 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2750 if (likely(rs
!= 0))
2751 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2753 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2756 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2758 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2759 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2761 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2770 /* Set on less than with immediate operand */
2771 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2772 int rt
, int rs
, int16_t imm
)
2774 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2778 /* If no destination, treat it as a NOP. */
2781 t0
= tcg_temp_new();
2782 gen_load_gpr(t0
, rs
);
2785 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2788 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2794 /* Shifts with immediate operand */
2795 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2796 int rt
, int rs
, int16_t imm
)
2798 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2802 /* If no destination, treat it as a NOP. */
2806 t0
= tcg_temp_new();
2807 gen_load_gpr(t0
, rs
);
2810 tcg_gen_shli_tl(t0
, t0
, uimm
);
2811 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2814 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2818 tcg_gen_ext32u_tl(t0
, t0
);
2819 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2821 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2826 TCGv_i32 t1
= tcg_temp_new_i32();
2828 tcg_gen_trunc_tl_i32(t1
, t0
);
2829 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2830 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2831 tcg_temp_free_i32(t1
);
2833 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2836 #if defined(TARGET_MIPS64)
2838 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2841 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2844 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2848 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2850 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2854 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2857 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2860 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2863 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2871 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2872 int rd
, int rs
, int rt
)
2874 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2875 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2876 /* If no destination, treat it as a NOP.
2877 For add & sub, we must generate the overflow exception when needed. */
2884 TCGv t0
= tcg_temp_local_new();
2885 TCGv t1
= tcg_temp_new();
2886 TCGv t2
= tcg_temp_new();
2887 TCGLabel
*l1
= gen_new_label();
2889 gen_load_gpr(t1
, rs
);
2890 gen_load_gpr(t2
, rt
);
2891 tcg_gen_add_tl(t0
, t1
, t2
);
2892 tcg_gen_ext32s_tl(t0
, t0
);
2893 tcg_gen_xor_tl(t1
, t1
, t2
);
2894 tcg_gen_xor_tl(t2
, t0
, t2
);
2895 tcg_gen_andc_tl(t1
, t2
, t1
);
2897 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2899 /* operands of same sign, result different sign */
2900 generate_exception(ctx
, EXCP_OVERFLOW
);
2902 gen_store_gpr(t0
, rd
);
2907 if (rs
!= 0 && rt
!= 0) {
2908 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2909 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2910 } else if (rs
== 0 && rt
!= 0) {
2911 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2912 } else if (rs
!= 0 && rt
== 0) {
2913 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2915 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2920 TCGv t0
= tcg_temp_local_new();
2921 TCGv t1
= tcg_temp_new();
2922 TCGv t2
= tcg_temp_new();
2923 TCGLabel
*l1
= gen_new_label();
2925 gen_load_gpr(t1
, rs
);
2926 gen_load_gpr(t2
, rt
);
2927 tcg_gen_sub_tl(t0
, t1
, t2
);
2928 tcg_gen_ext32s_tl(t0
, t0
);
2929 tcg_gen_xor_tl(t2
, t1
, t2
);
2930 tcg_gen_xor_tl(t1
, t0
, t1
);
2931 tcg_gen_and_tl(t1
, t1
, t2
);
2933 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2935 /* operands of different sign, first operand and result different sign */
2936 generate_exception(ctx
, EXCP_OVERFLOW
);
2938 gen_store_gpr(t0
, rd
);
2943 if (rs
!= 0 && rt
!= 0) {
2944 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2945 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2946 } else if (rs
== 0 && rt
!= 0) {
2947 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2948 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2949 } else if (rs
!= 0 && rt
== 0) {
2950 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2952 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2955 #if defined(TARGET_MIPS64)
2958 TCGv t0
= tcg_temp_local_new();
2959 TCGv t1
= tcg_temp_new();
2960 TCGv t2
= tcg_temp_new();
2961 TCGLabel
*l1
= gen_new_label();
2963 gen_load_gpr(t1
, rs
);
2964 gen_load_gpr(t2
, rt
);
2965 tcg_gen_add_tl(t0
, t1
, t2
);
2966 tcg_gen_xor_tl(t1
, t1
, t2
);
2967 tcg_gen_xor_tl(t2
, t0
, t2
);
2968 tcg_gen_andc_tl(t1
, t2
, t1
);
2970 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2972 /* operands of same sign, result different sign */
2973 generate_exception(ctx
, EXCP_OVERFLOW
);
2975 gen_store_gpr(t0
, rd
);
2980 if (rs
!= 0 && rt
!= 0) {
2981 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2982 } else if (rs
== 0 && rt
!= 0) {
2983 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2984 } else if (rs
!= 0 && rt
== 0) {
2985 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2987 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2992 TCGv t0
= tcg_temp_local_new();
2993 TCGv t1
= tcg_temp_new();
2994 TCGv t2
= tcg_temp_new();
2995 TCGLabel
*l1
= gen_new_label();
2997 gen_load_gpr(t1
, rs
);
2998 gen_load_gpr(t2
, rt
);
2999 tcg_gen_sub_tl(t0
, t1
, t2
);
3000 tcg_gen_xor_tl(t2
, t1
, t2
);
3001 tcg_gen_xor_tl(t1
, t0
, t1
);
3002 tcg_gen_and_tl(t1
, t1
, t2
);
3004 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3006 /* operands of different sign, first operand and result different sign */
3007 generate_exception(ctx
, EXCP_OVERFLOW
);
3009 gen_store_gpr(t0
, rd
);
3014 if (rs
!= 0 && rt
!= 0) {
3015 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3016 } else if (rs
== 0 && rt
!= 0) {
3017 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3018 } else if (rs
!= 0 && rt
== 0) {
3019 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3021 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3026 if (likely(rs
!= 0 && rt
!= 0)) {
3027 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3028 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3030 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3036 /* Conditional move */
3037 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
3038 int rd
, int rs
, int rt
)
3043 /* If no destination, treat it as a NOP. */
3047 t0
= tcg_temp_new();
3048 gen_load_gpr(t0
, rt
);
3049 t1
= tcg_const_tl(0);
3050 t2
= tcg_temp_new();
3051 gen_load_gpr(t2
, rs
);
3054 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3057 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3060 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3063 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3072 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
3073 int rd
, int rs
, int rt
)
3076 /* If no destination, treat it as a NOP. */
3082 if (likely(rs
!= 0 && rt
!= 0)) {
3083 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3085 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3089 if (rs
!= 0 && rt
!= 0) {
3090 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3091 } else if (rs
== 0 && rt
!= 0) {
3092 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3093 } else if (rs
!= 0 && rt
== 0) {
3094 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3096 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
3100 if (likely(rs
!= 0 && rt
!= 0)) {
3101 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3102 } else if (rs
== 0 && rt
!= 0) {
3103 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3104 } else if (rs
!= 0 && rt
== 0) {
3105 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3107 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3111 if (likely(rs
!= 0 && rt
!= 0)) {
3112 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3113 } else if (rs
== 0 && rt
!= 0) {
3114 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3115 } else if (rs
!= 0 && rt
== 0) {
3116 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3118 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3124 /* Set on lower than */
3125 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
3126 int rd
, int rs
, int rt
)
3131 /* If no destination, treat it as a NOP. */
3135 t0
= tcg_temp_new();
3136 t1
= tcg_temp_new();
3137 gen_load_gpr(t0
, rs
);
3138 gen_load_gpr(t1
, rt
);
3141 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3144 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3152 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3153 int rd
, int rs
, int rt
)
3158 /* If no destination, treat it as a NOP.
3159 For add & sub, we must generate the overflow exception when needed. */
3163 t0
= tcg_temp_new();
3164 t1
= tcg_temp_new();
3165 gen_load_gpr(t0
, rs
);
3166 gen_load_gpr(t1
, rt
);
3169 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3170 tcg_gen_shl_tl(t0
, t1
, t0
);
3171 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3174 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3175 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3178 tcg_gen_ext32u_tl(t1
, t1
);
3179 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3180 tcg_gen_shr_tl(t0
, t1
, t0
);
3181 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3185 TCGv_i32 t2
= tcg_temp_new_i32();
3186 TCGv_i32 t3
= tcg_temp_new_i32();
3188 tcg_gen_trunc_tl_i32(t2
, t0
);
3189 tcg_gen_trunc_tl_i32(t3
, t1
);
3190 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3191 tcg_gen_rotr_i32(t2
, t3
, t2
);
3192 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3193 tcg_temp_free_i32(t2
);
3194 tcg_temp_free_i32(t3
);
3197 #if defined(TARGET_MIPS64)
3199 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3200 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3203 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3204 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3207 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3208 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3211 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3212 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3220 /* Arithmetic on HI/LO registers */
3221 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3223 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3234 #if defined(TARGET_MIPS64)
3236 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3240 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3244 #if defined(TARGET_MIPS64)
3246 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3250 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3255 #if defined(TARGET_MIPS64)
3257 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3261 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3264 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3269 #if defined(TARGET_MIPS64)
3271 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3275 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3278 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3284 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3287 TCGv t0
= tcg_const_tl(addr
);
3288 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3289 gen_store_gpr(t0
, reg
);
3293 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3299 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3302 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3303 addr
= addr_add(ctx
, pc
, offset
);
3304 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3308 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3309 addr
= addr_add(ctx
, pc
, offset
);
3310 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3312 #if defined(TARGET_MIPS64)
3315 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3316 addr
= addr_add(ctx
, pc
, offset
);
3317 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3321 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3324 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3325 addr
= addr_add(ctx
, pc
, offset
);
3326 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3331 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3332 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3333 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3336 #if defined(TARGET_MIPS64)
3337 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3338 case R6_OPC_LDPC
+ (1 << 16):
3339 case R6_OPC_LDPC
+ (2 << 16):
3340 case R6_OPC_LDPC
+ (3 << 16):
3342 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3343 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3344 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3348 MIPS_INVAL("OPC_PCREL");
3349 generate_exception_end(ctx
, EXCP_RI
);
3356 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3365 t0
= tcg_temp_new();
3366 t1
= tcg_temp_new();
3368 gen_load_gpr(t0
, rs
);
3369 gen_load_gpr(t1
, rt
);
3374 TCGv t2
= tcg_temp_new();
3375 TCGv t3
= tcg_temp_new();
3376 tcg_gen_ext32s_tl(t0
, t0
);
3377 tcg_gen_ext32s_tl(t1
, t1
);
3378 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3379 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3380 tcg_gen_and_tl(t2
, t2
, t3
);
3381 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3382 tcg_gen_or_tl(t2
, t2
, t3
);
3383 tcg_gen_movi_tl(t3
, 0);
3384 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3385 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3386 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3393 TCGv t2
= tcg_temp_new();
3394 TCGv t3
= tcg_temp_new();
3395 tcg_gen_ext32s_tl(t0
, t0
);
3396 tcg_gen_ext32s_tl(t1
, t1
);
3397 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3398 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3399 tcg_gen_and_tl(t2
, t2
, t3
);
3400 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3401 tcg_gen_or_tl(t2
, t2
, t3
);
3402 tcg_gen_movi_tl(t3
, 0);
3403 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3404 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3405 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3412 TCGv t2
= tcg_const_tl(0);
3413 TCGv t3
= tcg_const_tl(1);
3414 tcg_gen_ext32u_tl(t0
, t0
);
3415 tcg_gen_ext32u_tl(t1
, t1
);
3416 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3417 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3418 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3425 TCGv t2
= tcg_const_tl(0);
3426 TCGv t3
= tcg_const_tl(1);
3427 tcg_gen_ext32u_tl(t0
, t0
);
3428 tcg_gen_ext32u_tl(t1
, t1
);
3429 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3430 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3431 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3438 TCGv_i32 t2
= tcg_temp_new_i32();
3439 TCGv_i32 t3
= tcg_temp_new_i32();
3440 tcg_gen_trunc_tl_i32(t2
, t0
);
3441 tcg_gen_trunc_tl_i32(t3
, t1
);
3442 tcg_gen_mul_i32(t2
, t2
, t3
);
3443 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3444 tcg_temp_free_i32(t2
);
3445 tcg_temp_free_i32(t3
);
3450 TCGv_i32 t2
= tcg_temp_new_i32();
3451 TCGv_i32 t3
= tcg_temp_new_i32();
3452 tcg_gen_trunc_tl_i32(t2
, t0
);
3453 tcg_gen_trunc_tl_i32(t3
, t1
);
3454 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3455 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3456 tcg_temp_free_i32(t2
);
3457 tcg_temp_free_i32(t3
);
3462 TCGv_i32 t2
= tcg_temp_new_i32();
3463 TCGv_i32 t3
= tcg_temp_new_i32();
3464 tcg_gen_trunc_tl_i32(t2
, t0
);
3465 tcg_gen_trunc_tl_i32(t3
, t1
);
3466 tcg_gen_mul_i32(t2
, t2
, t3
);
3467 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3468 tcg_temp_free_i32(t2
);
3469 tcg_temp_free_i32(t3
);
3474 TCGv_i32 t2
= tcg_temp_new_i32();
3475 TCGv_i32 t3
= tcg_temp_new_i32();
3476 tcg_gen_trunc_tl_i32(t2
, t0
);
3477 tcg_gen_trunc_tl_i32(t3
, t1
);
3478 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3479 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3480 tcg_temp_free_i32(t2
);
3481 tcg_temp_free_i32(t3
);
3484 #if defined(TARGET_MIPS64)
3487 TCGv t2
= tcg_temp_new();
3488 TCGv t3
= tcg_temp_new();
3489 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3490 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3491 tcg_gen_and_tl(t2
, t2
, t3
);
3492 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3493 tcg_gen_or_tl(t2
, t2
, t3
);
3494 tcg_gen_movi_tl(t3
, 0);
3495 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3496 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3503 TCGv t2
= tcg_temp_new();
3504 TCGv t3
= tcg_temp_new();
3505 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3506 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3507 tcg_gen_and_tl(t2
, t2
, t3
);
3508 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3509 tcg_gen_or_tl(t2
, t2
, t3
);
3510 tcg_gen_movi_tl(t3
, 0);
3511 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3512 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3519 TCGv t2
= tcg_const_tl(0);
3520 TCGv t3
= tcg_const_tl(1);
3521 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3522 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3529 TCGv t2
= tcg_const_tl(0);
3530 TCGv t3
= tcg_const_tl(1);
3531 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3532 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3538 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3542 TCGv t2
= tcg_temp_new();
3543 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3548 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3552 TCGv t2
= tcg_temp_new();
3553 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3559 MIPS_INVAL("r6 mul/div");
3560 generate_exception_end(ctx
, EXCP_RI
);
3568 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3569 int acc
, int rs
, int rt
)
3573 t0
= tcg_temp_new();
3574 t1
= tcg_temp_new();
3576 gen_load_gpr(t0
, rs
);
3577 gen_load_gpr(t1
, rt
);
3586 TCGv t2
= tcg_temp_new();
3587 TCGv t3
= tcg_temp_new();
3588 tcg_gen_ext32s_tl(t0
, t0
);
3589 tcg_gen_ext32s_tl(t1
, t1
);
3590 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3591 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3592 tcg_gen_and_tl(t2
, t2
, t3
);
3593 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3594 tcg_gen_or_tl(t2
, t2
, t3
);
3595 tcg_gen_movi_tl(t3
, 0);
3596 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3597 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3598 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3599 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3600 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3607 TCGv t2
= tcg_const_tl(0);
3608 TCGv t3
= tcg_const_tl(1);
3609 tcg_gen_ext32u_tl(t0
, t0
);
3610 tcg_gen_ext32u_tl(t1
, t1
);
3611 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3612 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3613 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3614 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3615 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3622 TCGv_i32 t2
= tcg_temp_new_i32();
3623 TCGv_i32 t3
= tcg_temp_new_i32();
3624 tcg_gen_trunc_tl_i32(t2
, t0
);
3625 tcg_gen_trunc_tl_i32(t3
, t1
);
3626 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3627 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3628 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3629 tcg_temp_free_i32(t2
);
3630 tcg_temp_free_i32(t3
);
3635 TCGv_i32 t2
= tcg_temp_new_i32();
3636 TCGv_i32 t3
= tcg_temp_new_i32();
3637 tcg_gen_trunc_tl_i32(t2
, t0
);
3638 tcg_gen_trunc_tl_i32(t3
, t1
);
3639 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3640 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3641 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3642 tcg_temp_free_i32(t2
);
3643 tcg_temp_free_i32(t3
);
3646 #if defined(TARGET_MIPS64)
3649 TCGv t2
= tcg_temp_new();
3650 TCGv t3
= tcg_temp_new();
3651 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3652 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3653 tcg_gen_and_tl(t2
, t2
, t3
);
3654 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3655 tcg_gen_or_tl(t2
, t2
, t3
);
3656 tcg_gen_movi_tl(t3
, 0);
3657 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3658 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3659 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3666 TCGv t2
= tcg_const_tl(0);
3667 TCGv t3
= tcg_const_tl(1);
3668 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3669 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3670 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3676 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3679 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3684 TCGv_i64 t2
= tcg_temp_new_i64();
3685 TCGv_i64 t3
= tcg_temp_new_i64();
3687 tcg_gen_ext_tl_i64(t2
, t0
);
3688 tcg_gen_ext_tl_i64(t3
, t1
);
3689 tcg_gen_mul_i64(t2
, t2
, t3
);
3690 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3691 tcg_gen_add_i64(t2
, t2
, t3
);
3692 tcg_temp_free_i64(t3
);
3693 gen_move_low32(cpu_LO
[acc
], t2
);
3694 gen_move_high32(cpu_HI
[acc
], t2
);
3695 tcg_temp_free_i64(t2
);
3700 TCGv_i64 t2
= tcg_temp_new_i64();
3701 TCGv_i64 t3
= tcg_temp_new_i64();
3703 tcg_gen_ext32u_tl(t0
, t0
);
3704 tcg_gen_ext32u_tl(t1
, t1
);
3705 tcg_gen_extu_tl_i64(t2
, t0
);
3706 tcg_gen_extu_tl_i64(t3
, t1
);
3707 tcg_gen_mul_i64(t2
, t2
, t3
);
3708 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3709 tcg_gen_add_i64(t2
, t2
, t3
);
3710 tcg_temp_free_i64(t3
);
3711 gen_move_low32(cpu_LO
[acc
], t2
);
3712 gen_move_high32(cpu_HI
[acc
], t2
);
3713 tcg_temp_free_i64(t2
);
3718 TCGv_i64 t2
= tcg_temp_new_i64();
3719 TCGv_i64 t3
= tcg_temp_new_i64();
3721 tcg_gen_ext_tl_i64(t2
, t0
);
3722 tcg_gen_ext_tl_i64(t3
, t1
);
3723 tcg_gen_mul_i64(t2
, t2
, t3
);
3724 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3725 tcg_gen_sub_i64(t2
, t3
, t2
);
3726 tcg_temp_free_i64(t3
);
3727 gen_move_low32(cpu_LO
[acc
], t2
);
3728 gen_move_high32(cpu_HI
[acc
], t2
);
3729 tcg_temp_free_i64(t2
);
3734 TCGv_i64 t2
= tcg_temp_new_i64();
3735 TCGv_i64 t3
= tcg_temp_new_i64();
3737 tcg_gen_ext32u_tl(t0
, t0
);
3738 tcg_gen_ext32u_tl(t1
, t1
);
3739 tcg_gen_extu_tl_i64(t2
, t0
);
3740 tcg_gen_extu_tl_i64(t3
, t1
);
3741 tcg_gen_mul_i64(t2
, t2
, t3
);
3742 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3743 tcg_gen_sub_i64(t2
, t3
, t2
);
3744 tcg_temp_free_i64(t3
);
3745 gen_move_low32(cpu_LO
[acc
], t2
);
3746 gen_move_high32(cpu_HI
[acc
], t2
);
3747 tcg_temp_free_i64(t2
);
3751 MIPS_INVAL("mul/div");
3752 generate_exception_end(ctx
, EXCP_RI
);
3760 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3761 int rd
, int rs
, int rt
)
3763 TCGv t0
= tcg_temp_new();
3764 TCGv t1
= tcg_temp_new();
3766 gen_load_gpr(t0
, rs
);
3767 gen_load_gpr(t1
, rt
);
3770 case OPC_VR54XX_MULS
:
3771 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3773 case OPC_VR54XX_MULSU
:
3774 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3776 case OPC_VR54XX_MACC
:
3777 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3779 case OPC_VR54XX_MACCU
:
3780 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3782 case OPC_VR54XX_MSAC
:
3783 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3785 case OPC_VR54XX_MSACU
:
3786 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3788 case OPC_VR54XX_MULHI
:
3789 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3791 case OPC_VR54XX_MULHIU
:
3792 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3794 case OPC_VR54XX_MULSHI
:
3795 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3797 case OPC_VR54XX_MULSHIU
:
3798 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3800 case OPC_VR54XX_MACCHI
:
3801 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3803 case OPC_VR54XX_MACCHIU
:
3804 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3806 case OPC_VR54XX_MSACHI
:
3807 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3809 case OPC_VR54XX_MSACHIU
:
3810 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3813 MIPS_INVAL("mul vr54xx");
3814 generate_exception_end(ctx
, EXCP_RI
);
3817 gen_store_gpr(t0
, rd
);
3824 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3834 gen_load_gpr(t0
, rs
);
3839 #if defined(TARGET_MIPS64)
3843 tcg_gen_not_tl(t0
, t0
);
3852 tcg_gen_ext32u_tl(t0
, t0
);
3853 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3854 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3856 #if defined(TARGET_MIPS64)
3861 tcg_gen_clzi_i64(t0
, t0
, 64);
3867 /* Godson integer instructions */
3868 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3869 int rd
, int rs
, int rt
)
3881 case OPC_MULTU_G_2E
:
3882 case OPC_MULTU_G_2F
:
3883 #if defined(TARGET_MIPS64)
3884 case OPC_DMULT_G_2E
:
3885 case OPC_DMULT_G_2F
:
3886 case OPC_DMULTU_G_2E
:
3887 case OPC_DMULTU_G_2F
:
3889 t0
= tcg_temp_new();
3890 t1
= tcg_temp_new();
3893 t0
= tcg_temp_local_new();
3894 t1
= tcg_temp_local_new();
3898 gen_load_gpr(t0
, rs
);
3899 gen_load_gpr(t1
, rt
);
3904 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3905 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3907 case OPC_MULTU_G_2E
:
3908 case OPC_MULTU_G_2F
:
3909 tcg_gen_ext32u_tl(t0
, t0
);
3910 tcg_gen_ext32u_tl(t1
, t1
);
3911 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3912 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3917 TCGLabel
*l1
= gen_new_label();
3918 TCGLabel
*l2
= gen_new_label();
3919 TCGLabel
*l3
= gen_new_label();
3920 tcg_gen_ext32s_tl(t0
, t0
);
3921 tcg_gen_ext32s_tl(t1
, t1
);
3922 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3923 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3926 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3927 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3928 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3931 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3932 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3939 TCGLabel
*l1
= gen_new_label();
3940 TCGLabel
*l2
= gen_new_label();
3941 tcg_gen_ext32u_tl(t0
, t0
);
3942 tcg_gen_ext32u_tl(t1
, t1
);
3943 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3944 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3947 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3948 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3955 TCGLabel
*l1
= gen_new_label();
3956 TCGLabel
*l2
= gen_new_label();
3957 TCGLabel
*l3
= gen_new_label();
3958 tcg_gen_ext32u_tl(t0
, t0
);
3959 tcg_gen_ext32u_tl(t1
, t1
);
3960 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3961 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3962 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3964 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3967 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3968 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3975 TCGLabel
*l1
= gen_new_label();
3976 TCGLabel
*l2
= gen_new_label();
3977 tcg_gen_ext32u_tl(t0
, t0
);
3978 tcg_gen_ext32u_tl(t1
, t1
);
3979 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3980 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3983 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3984 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3988 #if defined(TARGET_MIPS64)
3989 case OPC_DMULT_G_2E
:
3990 case OPC_DMULT_G_2F
:
3991 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3993 case OPC_DMULTU_G_2E
:
3994 case OPC_DMULTU_G_2F
:
3995 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4000 TCGLabel
*l1
= gen_new_label();
4001 TCGLabel
*l2
= gen_new_label();
4002 TCGLabel
*l3
= gen_new_label();
4003 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4004 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4007 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4008 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4009 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4012 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4016 case OPC_DDIVU_G_2E
:
4017 case OPC_DDIVU_G_2F
:
4019 TCGLabel
*l1
= gen_new_label();
4020 TCGLabel
*l2
= gen_new_label();
4021 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4022 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4025 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4032 TCGLabel
*l1
= gen_new_label();
4033 TCGLabel
*l2
= gen_new_label();
4034 TCGLabel
*l3
= gen_new_label();
4035 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4036 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4037 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4039 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4042 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4046 case OPC_DMODU_G_2E
:
4047 case OPC_DMODU_G_2F
:
4049 TCGLabel
*l1
= gen_new_label();
4050 TCGLabel
*l2
= gen_new_label();
4051 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4052 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4055 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4066 /* Loongson multimedia instructions */
4067 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
4069 uint32_t opc
, shift_max
;
4072 opc
= MASK_LMI(ctx
->opcode
);
4078 t0
= tcg_temp_local_new_i64();
4079 t1
= tcg_temp_local_new_i64();
4082 t0
= tcg_temp_new_i64();
4083 t1
= tcg_temp_new_i64();
4087 check_cp1_enabled(ctx
);
4088 gen_load_fpr64(ctx
, t0
, rs
);
4089 gen_load_fpr64(ctx
, t1
, rt
);
4091 #define LMI_HELPER(UP, LO) \
4092 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
4093 #define LMI_HELPER_1(UP, LO) \
4094 case OPC_##UP: gen_helper_##LO(t0, t0); break
4095 #define LMI_DIRECT(UP, LO, OP) \
4096 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
4099 LMI_HELPER(PADDSH
, paddsh
);
4100 LMI_HELPER(PADDUSH
, paddush
);
4101 LMI_HELPER(PADDH
, paddh
);
4102 LMI_HELPER(PADDW
, paddw
);
4103 LMI_HELPER(PADDSB
, paddsb
);
4104 LMI_HELPER(PADDUSB
, paddusb
);
4105 LMI_HELPER(PADDB
, paddb
);
4107 LMI_HELPER(PSUBSH
, psubsh
);
4108 LMI_HELPER(PSUBUSH
, psubush
);
4109 LMI_HELPER(PSUBH
, psubh
);
4110 LMI_HELPER(PSUBW
, psubw
);
4111 LMI_HELPER(PSUBSB
, psubsb
);
4112 LMI_HELPER(PSUBUSB
, psubusb
);
4113 LMI_HELPER(PSUBB
, psubb
);
4115 LMI_HELPER(PSHUFH
, pshufh
);
4116 LMI_HELPER(PACKSSWH
, packsswh
);
4117 LMI_HELPER(PACKSSHB
, packsshb
);
4118 LMI_HELPER(PACKUSHB
, packushb
);
4120 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
4121 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
4122 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
4123 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
4124 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
4125 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
4127 LMI_HELPER(PAVGH
, pavgh
);
4128 LMI_HELPER(PAVGB
, pavgb
);
4129 LMI_HELPER(PMAXSH
, pmaxsh
);
4130 LMI_HELPER(PMINSH
, pminsh
);
4131 LMI_HELPER(PMAXUB
, pmaxub
);
4132 LMI_HELPER(PMINUB
, pminub
);
4134 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4135 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4136 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4137 LMI_HELPER(PCMPGTH
, pcmpgth
);
4138 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4139 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4141 LMI_HELPER(PSLLW
, psllw
);
4142 LMI_HELPER(PSLLH
, psllh
);
4143 LMI_HELPER(PSRLW
, psrlw
);
4144 LMI_HELPER(PSRLH
, psrlh
);
4145 LMI_HELPER(PSRAW
, psraw
);
4146 LMI_HELPER(PSRAH
, psrah
);
4148 LMI_HELPER(PMULLH
, pmullh
);
4149 LMI_HELPER(PMULHH
, pmulhh
);
4150 LMI_HELPER(PMULHUH
, pmulhuh
);
4151 LMI_HELPER(PMADDHW
, pmaddhw
);
4153 LMI_HELPER(PASUBUB
, pasubub
);
4154 LMI_HELPER_1(BIADD
, biadd
);
4155 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4157 LMI_DIRECT(PADDD
, paddd
, add
);
4158 LMI_DIRECT(PSUBD
, psubd
, sub
);
4159 LMI_DIRECT(XOR_CP2
, xor, xor);
4160 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4161 LMI_DIRECT(AND_CP2
, and, and);
4162 LMI_DIRECT(OR_CP2
, or, or);
4165 tcg_gen_andc_i64(t0
, t1
, t0
);
4169 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4172 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4175 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4178 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4182 tcg_gen_andi_i64(t1
, t1
, 3);
4183 tcg_gen_shli_i64(t1
, t1
, 4);
4184 tcg_gen_shr_i64(t0
, t0
, t1
);
4185 tcg_gen_ext16u_i64(t0
, t0
);
4189 tcg_gen_add_i64(t0
, t0
, t1
);
4190 tcg_gen_ext32s_i64(t0
, t0
);
4193 tcg_gen_sub_i64(t0
, t0
, t1
);
4194 tcg_gen_ext32s_i64(t0
, t0
);
4216 /* Make sure shift count isn't TCG undefined behaviour. */
4217 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4222 tcg_gen_shl_i64(t0
, t0
, t1
);
4226 /* Since SRA is UndefinedResult without sign-extended inputs,
4227 we can treat SRA and DSRA the same. */
4228 tcg_gen_sar_i64(t0
, t0
, t1
);
4231 /* We want to shift in zeros for SRL; zero-extend first. */
4232 tcg_gen_ext32u_i64(t0
, t0
);
4235 tcg_gen_shr_i64(t0
, t0
, t1
);
4239 if (shift_max
== 32) {
4240 tcg_gen_ext32s_i64(t0
, t0
);
4243 /* Shifts larger than MAX produce zero. */
4244 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4245 tcg_gen_neg_i64(t1
, t1
);
4246 tcg_gen_and_i64(t0
, t0
, t1
);
4252 TCGv_i64 t2
= tcg_temp_new_i64();
4253 TCGLabel
*lab
= gen_new_label();
4255 tcg_gen_mov_i64(t2
, t0
);
4256 tcg_gen_add_i64(t0
, t1
, t2
);
4257 if (opc
== OPC_ADD_CP2
) {
4258 tcg_gen_ext32s_i64(t0
, t0
);
4260 tcg_gen_xor_i64(t1
, t1
, t2
);
4261 tcg_gen_xor_i64(t2
, t2
, t0
);
4262 tcg_gen_andc_i64(t1
, t2
, t1
);
4263 tcg_temp_free_i64(t2
);
4264 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4265 generate_exception(ctx
, EXCP_OVERFLOW
);
4273 TCGv_i64 t2
= tcg_temp_new_i64();
4274 TCGLabel
*lab
= gen_new_label();
4276 tcg_gen_mov_i64(t2
, t0
);
4277 tcg_gen_sub_i64(t0
, t1
, t2
);
4278 if (opc
== OPC_SUB_CP2
) {
4279 tcg_gen_ext32s_i64(t0
, t0
);
4281 tcg_gen_xor_i64(t1
, t1
, t2
);
4282 tcg_gen_xor_i64(t2
, t2
, t0
);
4283 tcg_gen_and_i64(t1
, t1
, t2
);
4284 tcg_temp_free_i64(t2
);
4285 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4286 generate_exception(ctx
, EXCP_OVERFLOW
);
4292 tcg_gen_ext32u_i64(t0
, t0
);
4293 tcg_gen_ext32u_i64(t1
, t1
);
4294 tcg_gen_mul_i64(t0
, t0
, t1
);
4303 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4304 FD field is the CC field? */
4306 MIPS_INVAL("loongson_cp2");
4307 generate_exception_end(ctx
, EXCP_RI
);
4314 gen_store_fpr64(ctx
, t0
, rd
);
4316 tcg_temp_free_i64(t0
);
4317 tcg_temp_free_i64(t1
);
4321 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4322 int rs
, int rt
, int16_t imm
)
4325 TCGv t0
= tcg_temp_new();
4326 TCGv t1
= tcg_temp_new();
4329 /* Load needed operands */
4337 /* Compare two registers */
4339 gen_load_gpr(t0
, rs
);
4340 gen_load_gpr(t1
, rt
);
4350 /* Compare register to immediate */
4351 if (rs
!= 0 || imm
!= 0) {
4352 gen_load_gpr(t0
, rs
);
4353 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4360 case OPC_TEQ
: /* rs == rs */
4361 case OPC_TEQI
: /* r0 == 0 */
4362 case OPC_TGE
: /* rs >= rs */
4363 case OPC_TGEI
: /* r0 >= 0 */
4364 case OPC_TGEU
: /* rs >= rs unsigned */
4365 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4367 generate_exception_end(ctx
, EXCP_TRAP
);
4369 case OPC_TLT
: /* rs < rs */
4370 case OPC_TLTI
: /* r0 < 0 */
4371 case OPC_TLTU
: /* rs < rs unsigned */
4372 case OPC_TLTIU
: /* r0 < 0 unsigned */
4373 case OPC_TNE
: /* rs != rs */
4374 case OPC_TNEI
: /* r0 != 0 */
4375 /* Never trap: treat as NOP. */
4379 TCGLabel
*l1
= gen_new_label();
4384 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4388 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4392 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4396 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4400 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4404 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4407 generate_exception(ctx
, EXCP_TRAP
);
4414 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4416 if (unlikely(ctx
->base
.singlestep_enabled
)) {
4420 #ifndef CONFIG_USER_ONLY
4421 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4427 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4429 if (use_goto_tb(ctx
, dest
)) {
4432 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4435 if (ctx
->base
.singlestep_enabled
) {
4436 save_cpu_state(ctx
, 0);
4437 gen_helper_raise_exception_debug(cpu_env
);
4439 tcg_gen_lookup_and_goto_ptr();
4443 /* Branches (before delay slot) */
4444 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4446 int rs
, int rt
, int32_t offset
,
4449 target_ulong btgt
= -1;
4451 int bcond_compute
= 0;
4452 TCGv t0
= tcg_temp_new();
4453 TCGv t1
= tcg_temp_new();
4455 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4456 #ifdef MIPS_DEBUG_DISAS
4457 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4458 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4460 generate_exception_end(ctx
, EXCP_RI
);
4464 /* Load needed operands */
4470 /* Compare two registers */
4472 gen_load_gpr(t0
, rs
);
4473 gen_load_gpr(t1
, rt
);
4476 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4490 /* Compare to zero */
4492 gen_load_gpr(t0
, rs
);
4495 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4498 #if defined(TARGET_MIPS64)
4500 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4502 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4505 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4510 /* Jump to immediate */
4511 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4516 /* Jump to register */
4517 if (offset
!= 0 && offset
!= 16) {
4518 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4519 others are reserved. */
4520 MIPS_INVAL("jump hint");
4521 generate_exception_end(ctx
, EXCP_RI
);
4524 gen_load_gpr(btarget
, rs
);
4527 MIPS_INVAL("branch/jump");
4528 generate_exception_end(ctx
, EXCP_RI
);
4531 if (bcond_compute
== 0) {
4532 /* No condition to be computed */
4534 case OPC_BEQ
: /* rx == rx */
4535 case OPC_BEQL
: /* rx == rx likely */
4536 case OPC_BGEZ
: /* 0 >= 0 */
4537 case OPC_BGEZL
: /* 0 >= 0 likely */
4538 case OPC_BLEZ
: /* 0 <= 0 */
4539 case OPC_BLEZL
: /* 0 <= 0 likely */
4541 ctx
->hflags
|= MIPS_HFLAG_B
;
4543 case OPC_BGEZAL
: /* 0 >= 0 */
4544 case OPC_BGEZALL
: /* 0 >= 0 likely */
4545 /* Always take and link */
4547 ctx
->hflags
|= MIPS_HFLAG_B
;
4549 case OPC_BNE
: /* rx != rx */
4550 case OPC_BGTZ
: /* 0 > 0 */
4551 case OPC_BLTZ
: /* 0 < 0 */
4554 case OPC_BLTZAL
: /* 0 < 0 */
4555 /* Handle as an unconditional branch to get correct delay
4558 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4559 ctx
->hflags
|= MIPS_HFLAG_B
;
4561 case OPC_BLTZALL
: /* 0 < 0 likely */
4562 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4563 /* Skip the instruction in the delay slot */
4564 ctx
->base
.pc_next
+= 4;
4566 case OPC_BNEL
: /* rx != rx likely */
4567 case OPC_BGTZL
: /* 0 > 0 likely */
4568 case OPC_BLTZL
: /* 0 < 0 likely */
4569 /* Skip the instruction in the delay slot */
4570 ctx
->base
.pc_next
+= 4;
4573 ctx
->hflags
|= MIPS_HFLAG_B
;
4576 ctx
->hflags
|= MIPS_HFLAG_BX
;
4580 ctx
->hflags
|= MIPS_HFLAG_B
;
4583 ctx
->hflags
|= MIPS_HFLAG_BR
;
4587 ctx
->hflags
|= MIPS_HFLAG_BR
;
4590 MIPS_INVAL("branch/jump");
4591 generate_exception_end(ctx
, EXCP_RI
);
4597 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4600 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4603 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4606 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4609 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4612 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4615 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4619 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4623 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4626 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4629 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4632 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4635 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4638 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4641 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4643 #if defined(TARGET_MIPS64)
4645 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4649 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4652 ctx
->hflags
|= MIPS_HFLAG_BC
;
4655 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4658 ctx
->hflags
|= MIPS_HFLAG_BL
;
4661 MIPS_INVAL("conditional branch/jump");
4662 generate_exception_end(ctx
, EXCP_RI
);
4667 ctx
->btarget
= btgt
;
4669 switch (delayslot_size
) {
4671 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4674 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4679 int post_delay
= insn_bytes
+ delayslot_size
;
4680 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4682 tcg_gen_movi_tl(cpu_gpr
[blink
],
4683 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
4687 if (insn_bytes
== 2)
4688 ctx
->hflags
|= MIPS_HFLAG_B16
;
4694 /* nanoMIPS Branches */
4695 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
4697 int rs
, int rt
, int32_t offset
)
4699 target_ulong btgt
= -1;
4700 int bcond_compute
= 0;
4701 TCGv t0
= tcg_temp_new();
4702 TCGv t1
= tcg_temp_new();
4704 /* Load needed operands */
4708 /* Compare two registers */
4710 gen_load_gpr(t0
, rs
);
4711 gen_load_gpr(t1
, rt
);
4714 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4717 /* Compare to zero */
4719 gen_load_gpr(t0
, rs
);
4722 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4725 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4727 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4731 /* Jump to register */
4732 if (offset
!= 0 && offset
!= 16) {
4733 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4734 others are reserved. */
4735 MIPS_INVAL("jump hint");
4736 generate_exception_end(ctx
, EXCP_RI
);
4739 gen_load_gpr(btarget
, rs
);
4742 MIPS_INVAL("branch/jump");
4743 generate_exception_end(ctx
, EXCP_RI
);
4746 if (bcond_compute
== 0) {
4747 /* No condition to be computed */
4749 case OPC_BEQ
: /* rx == rx */
4751 ctx
->hflags
|= MIPS_HFLAG_B
;
4753 case OPC_BGEZAL
: /* 0 >= 0 */
4754 /* Always take and link */
4755 tcg_gen_movi_tl(cpu_gpr
[31],
4756 ctx
->base
.pc_next
+ insn_bytes
);
4757 ctx
->hflags
|= MIPS_HFLAG_B
;
4759 case OPC_BNE
: /* rx != rx */
4760 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4761 /* Skip the instruction in the delay slot */
4762 ctx
->base
.pc_next
+= 4;
4765 ctx
->hflags
|= MIPS_HFLAG_BR
;
4769 tcg_gen_movi_tl(cpu_gpr
[rt
],
4770 ctx
->base
.pc_next
+ insn_bytes
);
4772 ctx
->hflags
|= MIPS_HFLAG_BR
;
4775 MIPS_INVAL("branch/jump");
4776 generate_exception_end(ctx
, EXCP_RI
);
4782 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4785 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4788 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4789 tcg_gen_movi_tl(cpu_gpr
[31],
4790 ctx
->base
.pc_next
+ insn_bytes
);
4793 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4795 ctx
->hflags
|= MIPS_HFLAG_BC
;
4798 MIPS_INVAL("conditional branch/jump");
4799 generate_exception_end(ctx
, EXCP_RI
);
4804 ctx
->btarget
= btgt
;
4807 if (insn_bytes
== 2) {
4808 ctx
->hflags
|= MIPS_HFLAG_B16
;
4815 /* special3 bitfield operations */
4816 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4817 int rs
, int lsb
, int msb
)
4819 TCGv t0
= tcg_temp_new();
4820 TCGv t1
= tcg_temp_new();
4822 gen_load_gpr(t1
, rs
);
4825 if (lsb
+ msb
> 31) {
4829 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4831 /* The two checks together imply that lsb == 0,
4832 so this is a simple sign-extension. */
4833 tcg_gen_ext32s_tl(t0
, t1
);
4836 #if defined(TARGET_MIPS64)
4845 if (lsb
+ msb
> 63) {
4848 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4855 gen_load_gpr(t0
, rt
);
4856 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4857 tcg_gen_ext32s_tl(t0
, t0
);
4859 #if defined(TARGET_MIPS64)
4870 gen_load_gpr(t0
, rt
);
4871 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4876 MIPS_INVAL("bitops");
4877 generate_exception_end(ctx
, EXCP_RI
);
4882 gen_store_gpr(t0
, rt
);
4887 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4892 /* If no destination, treat it as a NOP. */
4896 t0
= tcg_temp_new();
4897 gen_load_gpr(t0
, rt
);
4901 TCGv t1
= tcg_temp_new();
4902 TCGv t2
= tcg_const_tl(0x00FF00FF);
4904 tcg_gen_shri_tl(t1
, t0
, 8);
4905 tcg_gen_and_tl(t1
, t1
, t2
);
4906 tcg_gen_and_tl(t0
, t0
, t2
);
4907 tcg_gen_shli_tl(t0
, t0
, 8);
4908 tcg_gen_or_tl(t0
, t0
, t1
);
4911 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4915 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4918 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4920 #if defined(TARGET_MIPS64)
4923 TCGv t1
= tcg_temp_new();
4924 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4926 tcg_gen_shri_tl(t1
, t0
, 8);
4927 tcg_gen_and_tl(t1
, t1
, t2
);
4928 tcg_gen_and_tl(t0
, t0
, t2
);
4929 tcg_gen_shli_tl(t0
, t0
, 8);
4930 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4937 TCGv t1
= tcg_temp_new();
4938 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4940 tcg_gen_shri_tl(t1
, t0
, 16);
4941 tcg_gen_and_tl(t1
, t1
, t2
);
4942 tcg_gen_and_tl(t0
, t0
, t2
);
4943 tcg_gen_shli_tl(t0
, t0
, 16);
4944 tcg_gen_or_tl(t0
, t0
, t1
);
4945 tcg_gen_shri_tl(t1
, t0
, 32);
4946 tcg_gen_shli_tl(t0
, t0
, 32);
4947 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4954 MIPS_INVAL("bsfhl");
4955 generate_exception_end(ctx
, EXCP_RI
);
4962 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4971 t0
= tcg_temp_new();
4972 t1
= tcg_temp_new();
4973 gen_load_gpr(t0
, rs
);
4974 gen_load_gpr(t1
, rt
);
4975 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4976 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4977 if (opc
== OPC_LSA
) {
4978 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4987 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
4995 t0
= tcg_temp_new();
4996 if (bits
== 0 || bits
== wordsz
) {
4998 gen_load_gpr(t0
, rt
);
5000 gen_load_gpr(t0
, rs
);
5004 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5006 #if defined(TARGET_MIPS64)
5008 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5013 TCGv t1
= tcg_temp_new();
5014 gen_load_gpr(t0
, rt
);
5015 gen_load_gpr(t1
, rs
);
5019 TCGv_i64 t2
= tcg_temp_new_i64();
5020 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5021 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5022 gen_move_low32(cpu_gpr
[rd
], t2
);
5023 tcg_temp_free_i64(t2
);
5026 #if defined(TARGET_MIPS64)
5028 tcg_gen_shli_tl(t0
, t0
, bits
);
5029 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5030 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5040 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5043 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5046 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
5049 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
5052 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5059 t0
= tcg_temp_new();
5060 gen_load_gpr(t0
, rt
);
5063 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5065 #if defined(TARGET_MIPS64)
5067 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5074 #ifndef CONFIG_USER_ONLY
5075 /* CP0 (MMU and control) */
5076 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5078 TCGv_i64 t0
= tcg_temp_new_i64();
5079 TCGv_i64 t1
= tcg_temp_new_i64();
5081 tcg_gen_ext_tl_i64(t0
, arg
);
5082 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5083 #if defined(TARGET_MIPS64)
5084 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5086 tcg_gen_concat32_i64(t1
, t1
, t0
);
5088 tcg_gen_st_i64(t1
, cpu_env
, off
);
5089 tcg_temp_free_i64(t1
);
5090 tcg_temp_free_i64(t0
);
5093 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5095 TCGv_i64 t0
= tcg_temp_new_i64();
5096 TCGv_i64 t1
= tcg_temp_new_i64();
5098 tcg_gen_ext_tl_i64(t0
, arg
);
5099 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5100 tcg_gen_concat32_i64(t1
, t1
, t0
);
5101 tcg_gen_st_i64(t1
, cpu_env
, off
);
5102 tcg_temp_free_i64(t1
);
5103 tcg_temp_free_i64(t0
);
5106 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5108 TCGv_i64 t0
= tcg_temp_new_i64();
5110 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5111 #if defined(TARGET_MIPS64)
5112 tcg_gen_shri_i64(t0
, t0
, 30);
5114 tcg_gen_shri_i64(t0
, t0
, 32);
5116 gen_move_low32(arg
, t0
);
5117 tcg_temp_free_i64(t0
);
5120 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5122 TCGv_i64 t0
= tcg_temp_new_i64();
5124 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5125 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5126 gen_move_low32(arg
, t0
);
5127 tcg_temp_free_i64(t0
);
5130 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
5132 TCGv_i32 t0
= tcg_temp_new_i32();
5134 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5135 tcg_gen_ext_i32_tl(arg
, t0
);
5136 tcg_temp_free_i32(t0
);
5139 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
5141 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5142 tcg_gen_ext32s_tl(arg
, arg
);
5145 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
5147 TCGv_i32 t0
= tcg_temp_new_i32();
5149 tcg_gen_trunc_tl_i32(t0
, arg
);
5150 tcg_gen_st_i32(t0
, cpu_env
, off
);
5151 tcg_temp_free_i32(t0
);
5154 #define CP0_CHECK(c) \
5157 goto cp0_unimplemented; \
5161 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5163 const char *rn
= "invalid";
5169 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5170 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5174 goto cp0_unimplemented
;
5180 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5181 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5185 goto cp0_unimplemented
;
5191 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
5192 ctx
->CP0_LLAddr_shift
);
5196 CP0_CHECK(ctx
->mrp
);
5197 gen_helper_mfhc0_maar(arg
, cpu_env
);
5201 goto cp0_unimplemented
;
5210 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5214 goto cp0_unimplemented
;
5218 goto cp0_unimplemented
;
5220 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
5224 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5225 tcg_gen_movi_tl(arg
, 0);
5228 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5230 const char *rn
= "invalid";
5231 uint64_t mask
= ctx
->PAMask
>> 36;
5237 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5238 tcg_gen_andi_tl(arg
, arg
, mask
);
5239 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5243 goto cp0_unimplemented
;
5249 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5250 tcg_gen_andi_tl(arg
, arg
, mask
);
5251 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5255 goto cp0_unimplemented
;
5261 /* LLAddr is read-only (the only exception is bit 0 if LLB is
5262 supported); the CP0_LLAddr_rw_bitmask does not seem to be
5263 relevant for modern MIPS cores supporting MTHC0, therefore
5264 treating MTHC0 to LLAddr as NOP. */
5268 CP0_CHECK(ctx
->mrp
);
5269 gen_helper_mthc0_maar(cpu_env
, arg
);
5273 goto cp0_unimplemented
;
5282 tcg_gen_andi_tl(arg
, arg
, mask
);
5283 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5287 goto cp0_unimplemented
;
5291 goto cp0_unimplemented
;
5293 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5296 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5299 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5301 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5302 tcg_gen_movi_tl(arg
, 0);
5304 tcg_gen_movi_tl(arg
, ~0);
5308 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5310 const char *rn
= "invalid";
5313 check_insn(ctx
, ISA_MIPS32
);
5319 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5323 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5324 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5328 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5329 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5333 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5334 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5343 goto cp0_unimplemented
;
5349 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5350 gen_helper_mfc0_random(arg
, cpu_env
);
5354 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5359 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5364 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5369 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5370 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5374 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5375 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5379 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5380 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5381 rn
= "VPEScheFBack";
5384 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5389 goto cp0_unimplemented
;
5396 TCGv_i64 tmp
= tcg_temp_new_i64();
5397 tcg_gen_ld_i64(tmp
, cpu_env
,
5398 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5399 #if defined(TARGET_MIPS64)
5401 /* Move RI/XI fields to bits 31:30 */
5402 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5403 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5406 gen_move_low32(arg
, tmp
);
5407 tcg_temp_free_i64(tmp
);
5412 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5413 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5417 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5418 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5422 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5423 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5427 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5428 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5432 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5433 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5437 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5438 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5442 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5443 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5447 goto cp0_unimplemented
;
5454 TCGv_i64 tmp
= tcg_temp_new_i64();
5455 tcg_gen_ld_i64(tmp
, cpu_env
,
5456 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5457 #if defined(TARGET_MIPS64)
5459 /* Move RI/XI fields to bits 31:30 */
5460 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5461 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5464 gen_move_low32(arg
, tmp
);
5465 tcg_temp_free_i64(tmp
);
5471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5472 rn
= "GlobalNumber";
5475 goto cp0_unimplemented
;
5481 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5482 tcg_gen_ext32s_tl(arg
, arg
);
5486 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5487 rn
= "ContextConfig";
5488 goto cp0_unimplemented
;
5490 CP0_CHECK(ctx
->ulri
);
5491 tcg_gen_ld_tl(arg
, cpu_env
,
5492 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5493 tcg_gen_ext32s_tl(arg
, arg
);
5497 goto cp0_unimplemented
;
5503 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5507 check_insn(ctx
, ISA_MIPS32R2
);
5508 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5513 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5514 tcg_gen_ext32s_tl(arg
, arg
);
5519 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5520 tcg_gen_ext32s_tl(arg
, arg
);
5525 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5526 tcg_gen_ext32s_tl(arg
, arg
);
5530 goto cp0_unimplemented
;
5536 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5540 check_insn(ctx
, ISA_MIPS32R2
);
5541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5545 check_insn(ctx
, ISA_MIPS32R2
);
5546 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5550 check_insn(ctx
, ISA_MIPS32R2
);
5551 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5555 check_insn(ctx
, ISA_MIPS32R2
);
5556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5560 check_insn(ctx
, ISA_MIPS32R2
);
5561 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5565 goto cp0_unimplemented
;
5571 check_insn(ctx
, ISA_MIPS32R2
);
5572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5576 goto cp0_unimplemented
;
5582 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5583 tcg_gen_ext32s_tl(arg
, arg
);
5588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
5599 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
5603 goto cp0_unimplemented
;
5609 /* Mark as an IO operation because we read the time. */
5610 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5613 gen_helper_mfc0_count(arg
, cpu_env
);
5614 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5617 /* Break the TB to be able to take timer interrupts immediately
5618 after reading count. DISAS_STOP isn't sufficient, we need to
5619 ensure we break completely out of translated code. */
5620 gen_save_pc(ctx
->base
.pc_next
+ 4);
5621 ctx
->base
.is_jmp
= DISAS_EXIT
;
5624 /* 6,7 are implementation dependent */
5626 goto cp0_unimplemented
;
5632 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5633 tcg_gen_ext32s_tl(arg
, arg
);
5637 goto cp0_unimplemented
;
5643 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5646 /* 6,7 are implementation dependent */
5648 goto cp0_unimplemented
;
5654 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5658 check_insn(ctx
, ISA_MIPS32R2
);
5659 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5663 check_insn(ctx
, ISA_MIPS32R2
);
5664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5668 check_insn(ctx
, ISA_MIPS32R2
);
5669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5673 goto cp0_unimplemented
;
5679 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5683 goto cp0_unimplemented
;
5689 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5690 tcg_gen_ext32s_tl(arg
, arg
);
5694 goto cp0_unimplemented
;
5700 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5704 check_insn(ctx
, ISA_MIPS32R2
);
5705 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5706 tcg_gen_ext32s_tl(arg
, arg
);
5710 check_insn(ctx
, ISA_MIPS32R2
);
5711 CP0_CHECK(ctx
->cmgcr
);
5712 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5713 tcg_gen_ext32s_tl(arg
, arg
);
5717 goto cp0_unimplemented
;
5723 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5731 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5735 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5743 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5746 /* 6,7 are implementation dependent */
5748 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5752 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5756 goto cp0_unimplemented
;
5762 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5766 CP0_CHECK(ctx
->mrp
);
5767 gen_helper_mfc0_maar(arg
, cpu_env
);
5771 CP0_CHECK(ctx
->mrp
);
5772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5776 goto cp0_unimplemented
;
5789 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
5790 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5794 goto cp0_unimplemented
;
5807 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
5808 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5812 goto cp0_unimplemented
;
5818 #if defined(TARGET_MIPS64)
5819 check_insn(ctx
, ISA_MIPS3
);
5820 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5821 tcg_gen_ext32s_tl(arg
, arg
);
5826 goto cp0_unimplemented
;
5830 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5831 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5834 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5838 goto cp0_unimplemented
;
5842 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5843 rn
= "'Diagnostic"; /* implementation dependent */
5848 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5852 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5853 rn
= "TraceControl";
5854 goto cp0_unimplemented
;
5856 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5857 rn
= "TraceControl2";
5858 goto cp0_unimplemented
;
5860 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5861 rn
= "UserTraceData";
5862 goto cp0_unimplemented
;
5864 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5866 goto cp0_unimplemented
;
5868 goto cp0_unimplemented
;
5875 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5876 tcg_gen_ext32s_tl(arg
, arg
);
5880 goto cp0_unimplemented
;
5886 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5887 rn
= "Performance0";
5890 // gen_helper_mfc0_performance1(arg);
5891 rn
= "Performance1";
5892 goto cp0_unimplemented
;
5894 // gen_helper_mfc0_performance2(arg);
5895 rn
= "Performance2";
5896 goto cp0_unimplemented
;
5898 // gen_helper_mfc0_performance3(arg);
5899 rn
= "Performance3";
5900 goto cp0_unimplemented
;
5902 // gen_helper_mfc0_performance4(arg);
5903 rn
= "Performance4";
5904 goto cp0_unimplemented
;
5906 // gen_helper_mfc0_performance5(arg);
5907 rn
= "Performance5";
5908 goto cp0_unimplemented
;
5910 // gen_helper_mfc0_performance6(arg);
5911 rn
= "Performance6";
5912 goto cp0_unimplemented
;
5914 // gen_helper_mfc0_performance7(arg);
5915 rn
= "Performance7";
5916 goto cp0_unimplemented
;
5918 goto cp0_unimplemented
;
5924 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5928 goto cp0_unimplemented
;
5937 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5941 goto cp0_unimplemented
;
5951 TCGv_i64 tmp
= tcg_temp_new_i64();
5952 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5953 gen_move_low32(arg
, tmp
);
5954 tcg_temp_free_i64(tmp
);
5962 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5966 goto cp0_unimplemented
;
5975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5982 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5986 goto cp0_unimplemented
;
5992 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5993 tcg_gen_ext32s_tl(arg
, arg
);
5997 goto cp0_unimplemented
;
6004 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6013 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6014 tcg_gen_ld_tl(arg
, cpu_env
,
6015 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6016 tcg_gen_ext32s_tl(arg
, arg
);
6020 goto cp0_unimplemented
;
6024 goto cp0_unimplemented
;
6026 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
6030 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6031 gen_mfc0_unimplemented(ctx
, arg
);
6034 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6036 const char *rn
= "invalid";
6039 check_insn(ctx
, ISA_MIPS32
);
6041 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6049 gen_helper_mtc0_index(cpu_env
, arg
);
6053 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6054 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6058 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6063 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6073 goto cp0_unimplemented
;
6083 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6084 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6088 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6089 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6093 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6094 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6098 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6099 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6103 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6104 tcg_gen_st_tl(arg
, cpu_env
,
6105 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6110 tcg_gen_st_tl(arg
, cpu_env
,
6111 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6112 rn
= "VPEScheFBack";
6115 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6116 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6120 goto cp0_unimplemented
;
6126 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6130 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6131 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6135 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6136 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6140 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6141 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6145 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6146 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6150 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6151 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6155 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6156 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6160 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6161 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6165 goto cp0_unimplemented
;
6171 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6177 rn
= "GlobalNumber";
6180 goto cp0_unimplemented
;
6186 gen_helper_mtc0_context(cpu_env
, arg
);
6190 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6191 rn
= "ContextConfig";
6192 goto cp0_unimplemented
;
6194 CP0_CHECK(ctx
->ulri
);
6195 tcg_gen_st_tl(arg
, cpu_env
,
6196 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6200 goto cp0_unimplemented
;
6206 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6210 check_insn(ctx
, ISA_MIPS32R2
);
6211 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6213 ctx
->base
.is_jmp
= DISAS_STOP
;
6217 gen_helper_mtc0_segctl0(cpu_env
, arg
);
6222 gen_helper_mtc0_segctl1(cpu_env
, arg
);
6227 gen_helper_mtc0_segctl2(cpu_env
, arg
);
6231 goto cp0_unimplemented
;
6237 gen_helper_mtc0_wired(cpu_env
, arg
);
6241 check_insn(ctx
, ISA_MIPS32R2
);
6242 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6246 check_insn(ctx
, ISA_MIPS32R2
);
6247 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6251 check_insn(ctx
, ISA_MIPS32R2
);
6252 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6256 check_insn(ctx
, ISA_MIPS32R2
);
6257 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6261 check_insn(ctx
, ISA_MIPS32R2
);
6262 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6266 goto cp0_unimplemented
;
6272 check_insn(ctx
, ISA_MIPS32R2
);
6273 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6274 ctx
->base
.is_jmp
= DISAS_STOP
;
6278 goto cp0_unimplemented
;
6300 goto cp0_unimplemented
;
6306 gen_helper_mtc0_count(cpu_env
, arg
);
6309 /* 6,7 are implementation dependent */
6311 goto cp0_unimplemented
;
6317 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6321 goto cp0_unimplemented
;
6327 gen_helper_mtc0_compare(cpu_env
, arg
);
6330 /* 6,7 are implementation dependent */
6332 goto cp0_unimplemented
;
6338 save_cpu_state(ctx
, 1);
6339 gen_helper_mtc0_status(cpu_env
, arg
);
6340 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6341 gen_save_pc(ctx
->base
.pc_next
+ 4);
6342 ctx
->base
.is_jmp
= DISAS_EXIT
;
6346 check_insn(ctx
, ISA_MIPS32R2
);
6347 gen_helper_mtc0_intctl(cpu_env
, arg
);
6348 /* Stop translation as we may have switched the execution mode */
6349 ctx
->base
.is_jmp
= DISAS_STOP
;
6353 check_insn(ctx
, ISA_MIPS32R2
);
6354 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6355 /* Stop translation as we may have switched the execution mode */
6356 ctx
->base
.is_jmp
= DISAS_STOP
;
6360 check_insn(ctx
, ISA_MIPS32R2
);
6361 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6362 /* Stop translation as we may have switched the execution mode */
6363 ctx
->base
.is_jmp
= DISAS_STOP
;
6367 goto cp0_unimplemented
;
6373 save_cpu_state(ctx
, 1);
6374 gen_helper_mtc0_cause(cpu_env
, arg
);
6375 /* Stop translation as we may have triggered an interrupt.
6376 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6377 * translated code to check for pending interrupts. */
6378 gen_save_pc(ctx
->base
.pc_next
+ 4);
6379 ctx
->base
.is_jmp
= DISAS_EXIT
;
6383 goto cp0_unimplemented
;
6389 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6393 goto cp0_unimplemented
;
6403 check_insn(ctx
, ISA_MIPS32R2
);
6404 gen_helper_mtc0_ebase(cpu_env
, arg
);
6408 goto cp0_unimplemented
;
6414 gen_helper_mtc0_config0(cpu_env
, arg
);
6416 /* Stop translation as we may have switched the execution mode */
6417 ctx
->base
.is_jmp
= DISAS_STOP
;
6420 /* ignored, read only */
6424 gen_helper_mtc0_config2(cpu_env
, arg
);
6426 /* Stop translation as we may have switched the execution mode */
6427 ctx
->base
.is_jmp
= DISAS_STOP
;
6430 gen_helper_mtc0_config3(cpu_env
, arg
);
6432 /* Stop translation as we may have switched the execution mode */
6433 ctx
->base
.is_jmp
= DISAS_STOP
;
6436 gen_helper_mtc0_config4(cpu_env
, arg
);
6438 ctx
->base
.is_jmp
= DISAS_STOP
;
6441 gen_helper_mtc0_config5(cpu_env
, arg
);
6443 /* Stop translation as we may have switched the execution mode */
6444 ctx
->base
.is_jmp
= DISAS_STOP
;
6446 /* 6,7 are implementation dependent */
6456 rn
= "Invalid config selector";
6457 goto cp0_unimplemented
;
6463 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6467 CP0_CHECK(ctx
->mrp
);
6468 gen_helper_mtc0_maar(cpu_env
, arg
);
6472 CP0_CHECK(ctx
->mrp
);
6473 gen_helper_mtc0_maari(cpu_env
, arg
);
6477 goto cp0_unimplemented
;
6490 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6491 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6495 goto cp0_unimplemented
;
6508 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6509 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6513 goto cp0_unimplemented
;
6519 #if defined(TARGET_MIPS64)
6520 check_insn(ctx
, ISA_MIPS3
);
6521 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6526 goto cp0_unimplemented
;
6530 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6531 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6534 gen_helper_mtc0_framemask(cpu_env
, arg
);
6538 goto cp0_unimplemented
;
6543 rn
= "Diagnostic"; /* implementation dependent */
6548 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6549 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6550 gen_save_pc(ctx
->base
.pc_next
+ 4);
6551 ctx
->base
.is_jmp
= DISAS_EXIT
;
6555 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6556 rn
= "TraceControl";
6557 /* Stop translation as we may have switched the execution mode */
6558 ctx
->base
.is_jmp
= DISAS_STOP
;
6559 goto cp0_unimplemented
;
6561 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6562 rn
= "TraceControl2";
6563 /* Stop translation as we may have switched the execution mode */
6564 ctx
->base
.is_jmp
= DISAS_STOP
;
6565 goto cp0_unimplemented
;
6567 /* Stop translation as we may have switched the execution mode */
6568 ctx
->base
.is_jmp
= DISAS_STOP
;
6569 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6570 rn
= "UserTraceData";
6571 /* Stop translation as we may have switched the execution mode */
6572 ctx
->base
.is_jmp
= DISAS_STOP
;
6573 goto cp0_unimplemented
;
6575 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6576 /* Stop translation as we may have switched the execution mode */
6577 ctx
->base
.is_jmp
= DISAS_STOP
;
6579 goto cp0_unimplemented
;
6581 goto cp0_unimplemented
;
6588 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6592 goto cp0_unimplemented
;
6598 gen_helper_mtc0_performance0(cpu_env
, arg
);
6599 rn
= "Performance0";
6602 // gen_helper_mtc0_performance1(arg);
6603 rn
= "Performance1";
6604 goto cp0_unimplemented
;
6606 // gen_helper_mtc0_performance2(arg);
6607 rn
= "Performance2";
6608 goto cp0_unimplemented
;
6610 // gen_helper_mtc0_performance3(arg);
6611 rn
= "Performance3";
6612 goto cp0_unimplemented
;
6614 // gen_helper_mtc0_performance4(arg);
6615 rn
= "Performance4";
6616 goto cp0_unimplemented
;
6618 // gen_helper_mtc0_performance5(arg);
6619 rn
= "Performance5";
6620 goto cp0_unimplemented
;
6622 // gen_helper_mtc0_performance6(arg);
6623 rn
= "Performance6";
6624 goto cp0_unimplemented
;
6626 // gen_helper_mtc0_performance7(arg);
6627 rn
= "Performance7";
6628 goto cp0_unimplemented
;
6630 goto cp0_unimplemented
;
6636 gen_helper_mtc0_errctl(cpu_env
, arg
);
6637 ctx
->base
.is_jmp
= DISAS_STOP
;
6641 goto cp0_unimplemented
;
6654 goto cp0_unimplemented
;
6663 gen_helper_mtc0_taglo(cpu_env
, arg
);
6670 gen_helper_mtc0_datalo(cpu_env
, arg
);
6674 goto cp0_unimplemented
;
6683 gen_helper_mtc0_taghi(cpu_env
, arg
);
6690 gen_helper_mtc0_datahi(cpu_env
, arg
);
6695 goto cp0_unimplemented
;
6701 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6705 goto cp0_unimplemented
;
6712 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6721 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6722 tcg_gen_st_tl(arg
, cpu_env
,
6723 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6727 goto cp0_unimplemented
;
6731 goto cp0_unimplemented
;
6733 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6735 /* For simplicity assume that all writes can cause interrupts. */
6736 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6738 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6739 * translated code to check for pending interrupts. */
6740 gen_save_pc(ctx
->base
.pc_next
+ 4);
6741 ctx
->base
.is_jmp
= DISAS_EXIT
;
6746 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6749 #if defined(TARGET_MIPS64)
6750 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6752 const char *rn
= "invalid";
6755 check_insn(ctx
, ISA_MIPS64
);
6761 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6765 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6766 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6770 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6771 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6775 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6776 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6781 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6785 goto cp0_unimplemented
;
6791 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6792 gen_helper_mfc0_random(arg
, cpu_env
);
6796 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6801 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6802 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6806 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6807 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6811 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6812 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6816 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6817 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6821 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6822 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6823 rn
= "VPEScheFBack";
6826 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6827 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6831 goto cp0_unimplemented
;
6837 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6841 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6842 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6846 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6847 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6851 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6852 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6856 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6857 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6861 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6862 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6866 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6867 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6871 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6872 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6876 goto cp0_unimplemented
;
6882 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6887 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6888 rn
= "GlobalNumber";
6891 goto cp0_unimplemented
;
6897 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6901 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6902 rn
= "ContextConfig";
6903 goto cp0_unimplemented
;
6905 CP0_CHECK(ctx
->ulri
);
6906 tcg_gen_ld_tl(arg
, cpu_env
,
6907 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6911 goto cp0_unimplemented
;
6917 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6921 check_insn(ctx
, ISA_MIPS32R2
);
6922 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6927 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6932 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6937 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6941 goto cp0_unimplemented
;
6947 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6951 check_insn(ctx
, ISA_MIPS32R2
);
6952 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6956 check_insn(ctx
, ISA_MIPS32R2
);
6957 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6961 check_insn(ctx
, ISA_MIPS32R2
);
6962 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6966 check_insn(ctx
, ISA_MIPS32R2
);
6967 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6971 check_insn(ctx
, ISA_MIPS32R2
);
6972 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6976 goto cp0_unimplemented
;
6982 check_insn(ctx
, ISA_MIPS32R2
);
6983 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6987 goto cp0_unimplemented
;
6993 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6998 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7003 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7008 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7009 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7013 goto cp0_unimplemented
;
7019 /* Mark as an IO operation because we read the time. */
7020 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7023 gen_helper_mfc0_count(arg
, cpu_env
);
7024 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7027 /* Break the TB to be able to take timer interrupts immediately
7028 after reading count. DISAS_STOP isn't sufficient, we need to
7029 ensure we break completely out of translated code. */
7030 gen_save_pc(ctx
->base
.pc_next
+ 4);
7031 ctx
->base
.is_jmp
= DISAS_EXIT
;
7034 /* 6,7 are implementation dependent */
7036 goto cp0_unimplemented
;
7042 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7046 goto cp0_unimplemented
;
7052 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7055 /* 6,7 are implementation dependent */
7057 goto cp0_unimplemented
;
7063 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7067 check_insn(ctx
, ISA_MIPS32R2
);
7068 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7072 check_insn(ctx
, ISA_MIPS32R2
);
7073 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7077 check_insn(ctx
, ISA_MIPS32R2
);
7078 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7082 goto cp0_unimplemented
;
7088 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7092 goto cp0_unimplemented
;
7098 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7102 goto cp0_unimplemented
;
7108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7112 check_insn(ctx
, ISA_MIPS32R2
);
7113 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7117 check_insn(ctx
, ISA_MIPS32R2
);
7118 CP0_CHECK(ctx
->cmgcr
);
7119 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7123 goto cp0_unimplemented
;
7129 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7133 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7145 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7149 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7152 /* 6,7 are implementation dependent */
7154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7162 goto cp0_unimplemented
;
7168 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
7172 CP0_CHECK(ctx
->mrp
);
7173 gen_helper_dmfc0_maar(arg
, cpu_env
);
7177 CP0_CHECK(ctx
->mrp
);
7178 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7182 goto cp0_unimplemented
;
7195 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7196 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
7200 goto cp0_unimplemented
;
7213 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7214 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7218 goto cp0_unimplemented
;
7224 check_insn(ctx
, ISA_MIPS3
);
7225 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7229 goto cp0_unimplemented
;
7233 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7234 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7241 goto cp0_unimplemented
;
7245 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7246 rn
= "'Diagnostic"; /* implementation dependent */
7251 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7255 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
7256 rn
= "TraceControl";
7257 goto cp0_unimplemented
;
7259 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
7260 rn
= "TraceControl2";
7261 goto cp0_unimplemented
;
7263 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
7264 rn
= "UserTraceData";
7265 goto cp0_unimplemented
;
7267 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
7269 goto cp0_unimplemented
;
7271 goto cp0_unimplemented
;
7278 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7282 goto cp0_unimplemented
;
7288 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7289 rn
= "Performance0";
7292 // gen_helper_dmfc0_performance1(arg);
7293 rn
= "Performance1";
7294 goto cp0_unimplemented
;
7296 // gen_helper_dmfc0_performance2(arg);
7297 rn
= "Performance2";
7298 goto cp0_unimplemented
;
7300 // gen_helper_dmfc0_performance3(arg);
7301 rn
= "Performance3";
7302 goto cp0_unimplemented
;
7304 // gen_helper_dmfc0_performance4(arg);
7305 rn
= "Performance4";
7306 goto cp0_unimplemented
;
7308 // gen_helper_dmfc0_performance5(arg);
7309 rn
= "Performance5";
7310 goto cp0_unimplemented
;
7312 // gen_helper_dmfc0_performance6(arg);
7313 rn
= "Performance6";
7314 goto cp0_unimplemented
;
7316 // gen_helper_dmfc0_performance7(arg);
7317 rn
= "Performance7";
7318 goto cp0_unimplemented
;
7320 goto cp0_unimplemented
;
7326 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7330 goto cp0_unimplemented
;
7340 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7344 goto cp0_unimplemented
;
7353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7364 goto cp0_unimplemented
;
7373 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7384 goto cp0_unimplemented
;
7390 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7394 goto cp0_unimplemented
;
7401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7410 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7411 tcg_gen_ld_tl(arg
, cpu_env
,
7412 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7416 goto cp0_unimplemented
;
7420 goto cp0_unimplemented
;
7422 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7426 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7427 gen_mfc0_unimplemented(ctx
, arg
);
7430 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7432 const char *rn
= "invalid";
7435 check_insn(ctx
, ISA_MIPS64
);
7437 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7445 gen_helper_mtc0_index(cpu_env
, arg
);
7449 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7450 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7454 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7459 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7469 goto cp0_unimplemented
;
7479 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7480 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7484 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7485 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7489 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7490 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7494 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7495 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7499 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7500 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7504 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7505 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7506 rn
= "VPEScheFBack";
7509 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7510 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7514 goto cp0_unimplemented
;
7520 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7524 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7525 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7529 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7530 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7534 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7535 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7539 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7540 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7544 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7545 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7549 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7550 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7554 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7555 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7559 goto cp0_unimplemented
;
7565 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7571 rn
= "GlobalNumber";
7574 goto cp0_unimplemented
;
7580 gen_helper_mtc0_context(cpu_env
, arg
);
7584 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7585 rn
= "ContextConfig";
7586 goto cp0_unimplemented
;
7588 CP0_CHECK(ctx
->ulri
);
7589 tcg_gen_st_tl(arg
, cpu_env
,
7590 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7594 goto cp0_unimplemented
;
7600 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7604 check_insn(ctx
, ISA_MIPS32R2
);
7605 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7610 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7615 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7620 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7624 goto cp0_unimplemented
;
7630 gen_helper_mtc0_wired(cpu_env
, arg
);
7634 check_insn(ctx
, ISA_MIPS32R2
);
7635 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7639 check_insn(ctx
, ISA_MIPS32R2
);
7640 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7644 check_insn(ctx
, ISA_MIPS32R2
);
7645 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7649 check_insn(ctx
, ISA_MIPS32R2
);
7650 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7654 check_insn(ctx
, ISA_MIPS32R2
);
7655 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7659 goto cp0_unimplemented
;
7665 check_insn(ctx
, ISA_MIPS32R2
);
7666 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7667 ctx
->base
.is_jmp
= DISAS_STOP
;
7671 goto cp0_unimplemented
;
7693 goto cp0_unimplemented
;
7699 gen_helper_mtc0_count(cpu_env
, arg
);
7702 /* 6,7 are implementation dependent */
7704 goto cp0_unimplemented
;
7706 /* Stop translation as we may have switched the execution mode */
7707 ctx
->base
.is_jmp
= DISAS_STOP
;
7712 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7716 goto cp0_unimplemented
;
7722 gen_helper_mtc0_compare(cpu_env
, arg
);
7725 /* 6,7 are implementation dependent */
7727 goto cp0_unimplemented
;
7729 /* Stop translation as we may have switched the execution mode */
7730 ctx
->base
.is_jmp
= DISAS_STOP
;
7735 save_cpu_state(ctx
, 1);
7736 gen_helper_mtc0_status(cpu_env
, arg
);
7737 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7738 gen_save_pc(ctx
->base
.pc_next
+ 4);
7739 ctx
->base
.is_jmp
= DISAS_EXIT
;
7743 check_insn(ctx
, ISA_MIPS32R2
);
7744 gen_helper_mtc0_intctl(cpu_env
, arg
);
7745 /* Stop translation as we may have switched the execution mode */
7746 ctx
->base
.is_jmp
= DISAS_STOP
;
7750 check_insn(ctx
, ISA_MIPS32R2
);
7751 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7752 /* Stop translation as we may have switched the execution mode */
7753 ctx
->base
.is_jmp
= DISAS_STOP
;
7757 check_insn(ctx
, ISA_MIPS32R2
);
7758 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7759 /* Stop translation as we may have switched the execution mode */
7760 ctx
->base
.is_jmp
= DISAS_STOP
;
7764 goto cp0_unimplemented
;
7770 save_cpu_state(ctx
, 1);
7771 gen_helper_mtc0_cause(cpu_env
, arg
);
7772 /* Stop translation as we may have triggered an interrupt.
7773 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7774 * translated code to check for pending interrupts. */
7775 gen_save_pc(ctx
->base
.pc_next
+ 4);
7776 ctx
->base
.is_jmp
= DISAS_EXIT
;
7780 goto cp0_unimplemented
;
7786 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7790 goto cp0_unimplemented
;
7800 check_insn(ctx
, ISA_MIPS32R2
);
7801 gen_helper_mtc0_ebase(cpu_env
, arg
);
7805 goto cp0_unimplemented
;
7811 gen_helper_mtc0_config0(cpu_env
, arg
);
7813 /* Stop translation as we may have switched the execution mode */
7814 ctx
->base
.is_jmp
= DISAS_STOP
;
7817 /* ignored, read only */
7821 gen_helper_mtc0_config2(cpu_env
, arg
);
7823 /* Stop translation as we may have switched the execution mode */
7824 ctx
->base
.is_jmp
= DISAS_STOP
;
7827 gen_helper_mtc0_config3(cpu_env
, arg
);
7829 /* Stop translation as we may have switched the execution mode */
7830 ctx
->base
.is_jmp
= DISAS_STOP
;
7833 /* currently ignored */
7837 gen_helper_mtc0_config5(cpu_env
, arg
);
7839 /* Stop translation as we may have switched the execution mode */
7840 ctx
->base
.is_jmp
= DISAS_STOP
;
7842 /* 6,7 are implementation dependent */
7844 rn
= "Invalid config selector";
7845 goto cp0_unimplemented
;
7851 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7855 CP0_CHECK(ctx
->mrp
);
7856 gen_helper_mtc0_maar(cpu_env
, arg
);
7860 CP0_CHECK(ctx
->mrp
);
7861 gen_helper_mtc0_maari(cpu_env
, arg
);
7865 goto cp0_unimplemented
;
7878 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7879 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7883 goto cp0_unimplemented
;
7896 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7897 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7901 goto cp0_unimplemented
;
7907 check_insn(ctx
, ISA_MIPS3
);
7908 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7912 goto cp0_unimplemented
;
7916 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7917 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7920 gen_helper_mtc0_framemask(cpu_env
, arg
);
7924 goto cp0_unimplemented
;
7929 rn
= "Diagnostic"; /* implementation dependent */
7934 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7935 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7936 gen_save_pc(ctx
->base
.pc_next
+ 4);
7937 ctx
->base
.is_jmp
= DISAS_EXIT
;
7941 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7942 /* Stop translation as we may have switched the execution mode */
7943 ctx
->base
.is_jmp
= DISAS_STOP
;
7944 rn
= "TraceControl";
7945 goto cp0_unimplemented
;
7947 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7948 /* Stop translation as we may have switched the execution mode */
7949 ctx
->base
.is_jmp
= DISAS_STOP
;
7950 rn
= "TraceControl2";
7951 goto cp0_unimplemented
;
7953 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7954 /* Stop translation as we may have switched the execution mode */
7955 ctx
->base
.is_jmp
= DISAS_STOP
;
7956 rn
= "UserTraceData";
7957 goto cp0_unimplemented
;
7959 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7960 /* Stop translation as we may have switched the execution mode */
7961 ctx
->base
.is_jmp
= DISAS_STOP
;
7963 goto cp0_unimplemented
;
7965 goto cp0_unimplemented
;
7972 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7976 goto cp0_unimplemented
;
7982 gen_helper_mtc0_performance0(cpu_env
, arg
);
7983 rn
= "Performance0";
7986 // gen_helper_mtc0_performance1(cpu_env, arg);
7987 rn
= "Performance1";
7988 goto cp0_unimplemented
;
7990 // gen_helper_mtc0_performance2(cpu_env, arg);
7991 rn
= "Performance2";
7992 goto cp0_unimplemented
;
7994 // gen_helper_mtc0_performance3(cpu_env, arg);
7995 rn
= "Performance3";
7996 goto cp0_unimplemented
;
7998 // gen_helper_mtc0_performance4(cpu_env, arg);
7999 rn
= "Performance4";
8000 goto cp0_unimplemented
;
8002 // gen_helper_mtc0_performance5(cpu_env, arg);
8003 rn
= "Performance5";
8004 goto cp0_unimplemented
;
8006 // gen_helper_mtc0_performance6(cpu_env, arg);
8007 rn
= "Performance6";
8008 goto cp0_unimplemented
;
8010 // gen_helper_mtc0_performance7(cpu_env, arg);
8011 rn
= "Performance7";
8012 goto cp0_unimplemented
;
8014 goto cp0_unimplemented
;
8020 gen_helper_mtc0_errctl(cpu_env
, arg
);
8021 ctx
->base
.is_jmp
= DISAS_STOP
;
8025 goto cp0_unimplemented
;
8038 goto cp0_unimplemented
;
8047 gen_helper_mtc0_taglo(cpu_env
, arg
);
8054 gen_helper_mtc0_datalo(cpu_env
, arg
);
8058 goto cp0_unimplemented
;
8067 gen_helper_mtc0_taghi(cpu_env
, arg
);
8074 gen_helper_mtc0_datahi(cpu_env
, arg
);
8079 goto cp0_unimplemented
;
8085 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8089 goto cp0_unimplemented
;
8096 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8105 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8106 tcg_gen_st_tl(arg
, cpu_env
,
8107 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8111 goto cp0_unimplemented
;
8115 goto cp0_unimplemented
;
8117 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
8119 /* For simplicity assume that all writes can cause interrupts. */
8120 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8122 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8123 * translated code to check for pending interrupts. */
8124 gen_save_pc(ctx
->base
.pc_next
+ 4);
8125 ctx
->base
.is_jmp
= DISAS_EXIT
;
8130 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8132 #endif /* TARGET_MIPS64 */
8134 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8135 int u
, int sel
, int h
)
8137 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8138 TCGv t0
= tcg_temp_local_new();
8140 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8141 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8142 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8143 tcg_gen_movi_tl(t0
, -1);
8144 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8145 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
8146 tcg_gen_movi_tl(t0
, -1);
8152 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
8155 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
8165 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
8168 gen_helper_mftc0_tcbind(t0
, cpu_env
);
8171 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
8174 gen_helper_mftc0_tchalt(t0
, cpu_env
);
8177 gen_helper_mftc0_tccontext(t0
, cpu_env
);
8180 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
8183 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
8186 gen_mfc0(ctx
, t0
, rt
, sel
);
8193 gen_helper_mftc0_entryhi(t0
, cpu_env
);
8196 gen_mfc0(ctx
, t0
, rt
, sel
);
8202 gen_helper_mftc0_status(t0
, cpu_env
);
8205 gen_mfc0(ctx
, t0
, rt
, sel
);
8211 gen_helper_mftc0_cause(t0
, cpu_env
);
8221 gen_helper_mftc0_epc(t0
, cpu_env
);
8231 gen_helper_mftc0_ebase(t0
, cpu_env
);
8248 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
8258 gen_helper_mftc0_debug(t0
, cpu_env
);
8261 gen_mfc0(ctx
, t0
, rt
, sel
);
8266 gen_mfc0(ctx
, t0
, rt
, sel
);
8268 } else switch (sel
) {
8269 /* GPR registers. */
8271 gen_helper_1e0i(mftgpr
, t0
, rt
);
8273 /* Auxiliary CPU registers */
8277 gen_helper_1e0i(mftlo
, t0
, 0);
8280 gen_helper_1e0i(mfthi
, t0
, 0);
8283 gen_helper_1e0i(mftacx
, t0
, 0);
8286 gen_helper_1e0i(mftlo
, t0
, 1);
8289 gen_helper_1e0i(mfthi
, t0
, 1);
8292 gen_helper_1e0i(mftacx
, t0
, 1);
8295 gen_helper_1e0i(mftlo
, t0
, 2);
8298 gen_helper_1e0i(mfthi
, t0
, 2);
8301 gen_helper_1e0i(mftacx
, t0
, 2);
8304 gen_helper_1e0i(mftlo
, t0
, 3);
8307 gen_helper_1e0i(mfthi
, t0
, 3);
8310 gen_helper_1e0i(mftacx
, t0
, 3);
8313 gen_helper_mftdsp(t0
, cpu_env
);
8319 /* Floating point (COP1). */
8321 /* XXX: For now we support only a single FPU context. */
8323 TCGv_i32 fp0
= tcg_temp_new_i32();
8325 gen_load_fpr32(ctx
, fp0
, rt
);
8326 tcg_gen_ext_i32_tl(t0
, fp0
);
8327 tcg_temp_free_i32(fp0
);
8329 TCGv_i32 fp0
= tcg_temp_new_i32();
8331 gen_load_fpr32h(ctx
, fp0
, rt
);
8332 tcg_gen_ext_i32_tl(t0
, fp0
);
8333 tcg_temp_free_i32(fp0
);
8337 /* XXX: For now we support only a single FPU context. */
8338 gen_helper_1e0i(cfc1
, t0
, rt
);
8340 /* COP2: Not implemented. */
8347 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8348 gen_store_gpr(t0
, rd
);
8354 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8355 generate_exception_end(ctx
, EXCP_RI
);
8358 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8359 int u
, int sel
, int h
)
8361 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8362 TCGv t0
= tcg_temp_local_new();
8364 gen_load_gpr(t0
, rt
);
8365 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8366 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8367 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
8369 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8370 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
8377 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8380 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8390 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8393 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8396 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8399 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8402 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8405 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8408 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8411 gen_mtc0(ctx
, t0
, rd
, sel
);
8418 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8421 gen_mtc0(ctx
, t0
, rd
, sel
);
8427 gen_helper_mttc0_status(cpu_env
, t0
);
8430 gen_mtc0(ctx
, t0
, rd
, sel
);
8436 gen_helper_mttc0_cause(cpu_env
, t0
);
8446 gen_helper_mttc0_ebase(cpu_env
, t0
);
8456 gen_helper_mttc0_debug(cpu_env
, t0
);
8459 gen_mtc0(ctx
, t0
, rd
, sel
);
8464 gen_mtc0(ctx
, t0
, rd
, sel
);
8466 } else switch (sel
) {
8467 /* GPR registers. */
8469 gen_helper_0e1i(mttgpr
, t0
, rd
);
8471 /* Auxiliary CPU registers */
8475 gen_helper_0e1i(mttlo
, t0
, 0);
8478 gen_helper_0e1i(mtthi
, t0
, 0);
8481 gen_helper_0e1i(mttacx
, t0
, 0);
8484 gen_helper_0e1i(mttlo
, t0
, 1);
8487 gen_helper_0e1i(mtthi
, t0
, 1);
8490 gen_helper_0e1i(mttacx
, t0
, 1);
8493 gen_helper_0e1i(mttlo
, t0
, 2);
8496 gen_helper_0e1i(mtthi
, t0
, 2);
8499 gen_helper_0e1i(mttacx
, t0
, 2);
8502 gen_helper_0e1i(mttlo
, t0
, 3);
8505 gen_helper_0e1i(mtthi
, t0
, 3);
8508 gen_helper_0e1i(mttacx
, t0
, 3);
8511 gen_helper_mttdsp(cpu_env
, t0
);
8517 /* Floating point (COP1). */
8519 /* XXX: For now we support only a single FPU context. */
8521 TCGv_i32 fp0
= tcg_temp_new_i32();
8523 tcg_gen_trunc_tl_i32(fp0
, t0
);
8524 gen_store_fpr32(ctx
, fp0
, rd
);
8525 tcg_temp_free_i32(fp0
);
8527 TCGv_i32 fp0
= tcg_temp_new_i32();
8529 tcg_gen_trunc_tl_i32(fp0
, t0
);
8530 gen_store_fpr32h(ctx
, fp0
, rd
);
8531 tcg_temp_free_i32(fp0
);
8535 /* XXX: For now we support only a single FPU context. */
8537 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8539 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8540 tcg_temp_free_i32(fs_tmp
);
8542 /* Stop translation as we may have changed hflags */
8543 ctx
->base
.is_jmp
= DISAS_STOP
;
8545 /* COP2: Not implemented. */
8552 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8558 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8559 generate_exception_end(ctx
, EXCP_RI
);
8562 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8564 const char *opn
= "ldst";
8566 check_cp0_enabled(ctx
);
8573 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8578 TCGv t0
= tcg_temp_new();
8580 gen_load_gpr(t0
, rt
);
8581 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8586 #if defined(TARGET_MIPS64)
8588 check_insn(ctx
, ISA_MIPS3
);
8593 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8597 check_insn(ctx
, ISA_MIPS3
);
8599 TCGv t0
= tcg_temp_new();
8601 gen_load_gpr(t0
, rt
);
8602 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8614 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8620 TCGv t0
= tcg_temp_new();
8621 gen_load_gpr(t0
, rt
);
8622 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8628 check_cp0_enabled(ctx
);
8633 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8634 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8638 check_cp0_enabled(ctx
);
8639 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8640 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8645 if (!env
->tlb
->helper_tlbwi
)
8647 gen_helper_tlbwi(cpu_env
);
8652 if (!env
->tlb
->helper_tlbinv
) {
8655 gen_helper_tlbinv(cpu_env
);
8656 } /* treat as nop if TLBINV not supported */
8661 if (!env
->tlb
->helper_tlbinvf
) {
8664 gen_helper_tlbinvf(cpu_env
);
8665 } /* treat as nop if TLBINV not supported */
8669 if (!env
->tlb
->helper_tlbwr
)
8671 gen_helper_tlbwr(cpu_env
);
8675 if (!env
->tlb
->helper_tlbp
)
8677 gen_helper_tlbp(cpu_env
);
8681 if (!env
->tlb
->helper_tlbr
)
8683 gen_helper_tlbr(cpu_env
);
8685 case OPC_ERET
: /* OPC_ERETNC */
8686 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8687 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8690 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8691 if (ctx
->opcode
& (1 << bit_shift
)) {
8694 check_insn(ctx
, ISA_MIPS32R5
);
8695 gen_helper_eretnc(cpu_env
);
8699 check_insn(ctx
, ISA_MIPS2
);
8700 gen_helper_eret(cpu_env
);
8702 ctx
->base
.is_jmp
= DISAS_EXIT
;
8707 check_insn(ctx
, ISA_MIPS32
);
8708 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8709 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8712 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8714 generate_exception_end(ctx
, EXCP_RI
);
8716 gen_helper_deret(cpu_env
);
8717 ctx
->base
.is_jmp
= DISAS_EXIT
;
8722 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8723 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8724 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8727 /* If we get an exception, we want to restart at next instruction */
8728 ctx
->base
.pc_next
+= 4;
8729 save_cpu_state(ctx
, 1);
8730 ctx
->base
.pc_next
-= 4;
8731 gen_helper_wait(cpu_env
);
8732 ctx
->base
.is_jmp
= DISAS_NORETURN
;
8737 generate_exception_end(ctx
, EXCP_RI
);
8740 (void)opn
; /* avoid a compiler warning */
8742 #endif /* !CONFIG_USER_ONLY */
8744 /* CP1 Branches (before delay slot) */
8745 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8746 int32_t cc
, int32_t offset
)
8748 target_ulong btarget
;
8749 TCGv_i32 t0
= tcg_temp_new_i32();
8751 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8752 generate_exception_end(ctx
, EXCP_RI
);
8757 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8759 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
8763 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8764 tcg_gen_not_i32(t0
, t0
);
8765 tcg_gen_andi_i32(t0
, t0
, 1);
8766 tcg_gen_extu_i32_tl(bcond
, t0
);
8769 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8770 tcg_gen_not_i32(t0
, t0
);
8771 tcg_gen_andi_i32(t0
, t0
, 1);
8772 tcg_gen_extu_i32_tl(bcond
, t0
);
8775 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8776 tcg_gen_andi_i32(t0
, t0
, 1);
8777 tcg_gen_extu_i32_tl(bcond
, t0
);
8780 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8781 tcg_gen_andi_i32(t0
, t0
, 1);
8782 tcg_gen_extu_i32_tl(bcond
, t0
);
8784 ctx
->hflags
|= MIPS_HFLAG_BL
;
8788 TCGv_i32 t1
= tcg_temp_new_i32();
8789 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8790 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8791 tcg_gen_nand_i32(t0
, t0
, t1
);
8792 tcg_temp_free_i32(t1
);
8793 tcg_gen_andi_i32(t0
, t0
, 1);
8794 tcg_gen_extu_i32_tl(bcond
, t0
);
8799 TCGv_i32 t1
= tcg_temp_new_i32();
8800 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8801 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8802 tcg_gen_or_i32(t0
, t0
, t1
);
8803 tcg_temp_free_i32(t1
);
8804 tcg_gen_andi_i32(t0
, t0
, 1);
8805 tcg_gen_extu_i32_tl(bcond
, t0
);
8810 TCGv_i32 t1
= tcg_temp_new_i32();
8811 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8812 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8813 tcg_gen_and_i32(t0
, t0
, t1
);
8814 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8815 tcg_gen_and_i32(t0
, t0
, t1
);
8816 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8817 tcg_gen_nand_i32(t0
, t0
, t1
);
8818 tcg_temp_free_i32(t1
);
8819 tcg_gen_andi_i32(t0
, t0
, 1);
8820 tcg_gen_extu_i32_tl(bcond
, t0
);
8825 TCGv_i32 t1
= tcg_temp_new_i32();
8826 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8827 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8828 tcg_gen_or_i32(t0
, t0
, t1
);
8829 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8830 tcg_gen_or_i32(t0
, t0
, t1
);
8831 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8832 tcg_gen_or_i32(t0
, t0
, t1
);
8833 tcg_temp_free_i32(t1
);
8834 tcg_gen_andi_i32(t0
, t0
, 1);
8835 tcg_gen_extu_i32_tl(bcond
, t0
);
8838 ctx
->hflags
|= MIPS_HFLAG_BC
;
8841 MIPS_INVAL("cp1 cond branch");
8842 generate_exception_end(ctx
, EXCP_RI
);
8845 ctx
->btarget
= btarget
;
8846 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8848 tcg_temp_free_i32(t0
);
8851 /* R6 CP1 Branches */
8852 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8853 int32_t ft
, int32_t offset
,
8856 target_ulong btarget
;
8857 TCGv_i64 t0
= tcg_temp_new_i64();
8859 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8860 #ifdef MIPS_DEBUG_DISAS
8861 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8862 "\n", ctx
->base
.pc_next
);
8864 generate_exception_end(ctx
, EXCP_RI
);
8868 gen_load_fpr64(ctx
, t0
, ft
);
8869 tcg_gen_andi_i64(t0
, t0
, 1);
8871 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
8875 tcg_gen_xori_i64(t0
, t0
, 1);
8876 ctx
->hflags
|= MIPS_HFLAG_BC
;
8879 /* t0 already set */
8880 ctx
->hflags
|= MIPS_HFLAG_BC
;
8883 MIPS_INVAL("cp1 cond branch");
8884 generate_exception_end(ctx
, EXCP_RI
);
8888 tcg_gen_trunc_i64_tl(bcond
, t0
);
8890 ctx
->btarget
= btarget
;
8892 switch (delayslot_size
) {
8894 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8897 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8902 tcg_temp_free_i64(t0
);
8905 /* Coprocessor 1 (FPU) */
8907 #define FOP(func, fmt) (((fmt) << 21) | (func))
8910 OPC_ADD_S
= FOP(0, FMT_S
),
8911 OPC_SUB_S
= FOP(1, FMT_S
),
8912 OPC_MUL_S
= FOP(2, FMT_S
),
8913 OPC_DIV_S
= FOP(3, FMT_S
),
8914 OPC_SQRT_S
= FOP(4, FMT_S
),
8915 OPC_ABS_S
= FOP(5, FMT_S
),
8916 OPC_MOV_S
= FOP(6, FMT_S
),
8917 OPC_NEG_S
= FOP(7, FMT_S
),
8918 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8919 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8920 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8921 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8922 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8923 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8924 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8925 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8926 OPC_SEL_S
= FOP(16, FMT_S
),
8927 OPC_MOVCF_S
= FOP(17, FMT_S
),
8928 OPC_MOVZ_S
= FOP(18, FMT_S
),
8929 OPC_MOVN_S
= FOP(19, FMT_S
),
8930 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8931 OPC_RECIP_S
= FOP(21, FMT_S
),
8932 OPC_RSQRT_S
= FOP(22, FMT_S
),
8933 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8934 OPC_MADDF_S
= FOP(24, FMT_S
),
8935 OPC_MSUBF_S
= FOP(25, FMT_S
),
8936 OPC_RINT_S
= FOP(26, FMT_S
),
8937 OPC_CLASS_S
= FOP(27, FMT_S
),
8938 OPC_MIN_S
= FOP(28, FMT_S
),
8939 OPC_RECIP2_S
= FOP(28, FMT_S
),
8940 OPC_MINA_S
= FOP(29, FMT_S
),
8941 OPC_RECIP1_S
= FOP(29, FMT_S
),
8942 OPC_MAX_S
= FOP(30, FMT_S
),
8943 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8944 OPC_MAXA_S
= FOP(31, FMT_S
),
8945 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8946 OPC_CVT_D_S
= FOP(33, FMT_S
),
8947 OPC_CVT_W_S
= FOP(36, FMT_S
),
8948 OPC_CVT_L_S
= FOP(37, FMT_S
),
8949 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8950 OPC_CMP_F_S
= FOP (48, FMT_S
),
8951 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8952 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8953 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8954 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8955 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8956 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8957 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8958 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8959 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8960 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8961 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8962 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8963 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8964 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8965 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8967 OPC_ADD_D
= FOP(0, FMT_D
),
8968 OPC_SUB_D
= FOP(1, FMT_D
),
8969 OPC_MUL_D
= FOP(2, FMT_D
),
8970 OPC_DIV_D
= FOP(3, FMT_D
),
8971 OPC_SQRT_D
= FOP(4, FMT_D
),
8972 OPC_ABS_D
= FOP(5, FMT_D
),
8973 OPC_MOV_D
= FOP(6, FMT_D
),
8974 OPC_NEG_D
= FOP(7, FMT_D
),
8975 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8976 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8977 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8978 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8979 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8980 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8981 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8982 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8983 OPC_SEL_D
= FOP(16, FMT_D
),
8984 OPC_MOVCF_D
= FOP(17, FMT_D
),
8985 OPC_MOVZ_D
= FOP(18, FMT_D
),
8986 OPC_MOVN_D
= FOP(19, FMT_D
),
8987 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8988 OPC_RECIP_D
= FOP(21, FMT_D
),
8989 OPC_RSQRT_D
= FOP(22, FMT_D
),
8990 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8991 OPC_MADDF_D
= FOP(24, FMT_D
),
8992 OPC_MSUBF_D
= FOP(25, FMT_D
),
8993 OPC_RINT_D
= FOP(26, FMT_D
),
8994 OPC_CLASS_D
= FOP(27, FMT_D
),
8995 OPC_MIN_D
= FOP(28, FMT_D
),
8996 OPC_RECIP2_D
= FOP(28, FMT_D
),
8997 OPC_MINA_D
= FOP(29, FMT_D
),
8998 OPC_RECIP1_D
= FOP(29, FMT_D
),
8999 OPC_MAX_D
= FOP(30, FMT_D
),
9000 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9001 OPC_MAXA_D
= FOP(31, FMT_D
),
9002 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9003 OPC_CVT_S_D
= FOP(32, FMT_D
),
9004 OPC_CVT_W_D
= FOP(36, FMT_D
),
9005 OPC_CVT_L_D
= FOP(37, FMT_D
),
9006 OPC_CMP_F_D
= FOP (48, FMT_D
),
9007 OPC_CMP_UN_D
= FOP (49, FMT_D
),
9008 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
9009 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
9010 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
9011 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
9012 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
9013 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
9014 OPC_CMP_SF_D
= FOP (56, FMT_D
),
9015 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
9016 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
9017 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
9018 OPC_CMP_LT_D
= FOP (60, FMT_D
),
9019 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
9020 OPC_CMP_LE_D
= FOP (62, FMT_D
),
9021 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
9023 OPC_CVT_S_W
= FOP(32, FMT_W
),
9024 OPC_CVT_D_W
= FOP(33, FMT_W
),
9025 OPC_CVT_S_L
= FOP(32, FMT_L
),
9026 OPC_CVT_D_L
= FOP(33, FMT_L
),
9027 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9029 OPC_ADD_PS
= FOP(0, FMT_PS
),
9030 OPC_SUB_PS
= FOP(1, FMT_PS
),
9031 OPC_MUL_PS
= FOP(2, FMT_PS
),
9032 OPC_DIV_PS
= FOP(3, FMT_PS
),
9033 OPC_ABS_PS
= FOP(5, FMT_PS
),
9034 OPC_MOV_PS
= FOP(6, FMT_PS
),
9035 OPC_NEG_PS
= FOP(7, FMT_PS
),
9036 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9037 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9038 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9039 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9040 OPC_MULR_PS
= FOP(26, FMT_PS
),
9041 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9042 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9043 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9044 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9046 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9047 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9048 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9049 OPC_PLL_PS
= FOP(44, FMT_PS
),
9050 OPC_PLU_PS
= FOP(45, FMT_PS
),
9051 OPC_PUL_PS
= FOP(46, FMT_PS
),
9052 OPC_PUU_PS
= FOP(47, FMT_PS
),
9053 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
9054 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
9055 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
9056 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
9057 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
9058 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
9059 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
9060 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
9061 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
9062 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
9063 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
9064 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
9065 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
9066 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
9067 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
9068 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
9072 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9073 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9074 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9075 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9076 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9077 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9078 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9079 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9080 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9081 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9082 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9083 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9084 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9085 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9086 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9087 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9088 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9089 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9090 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9091 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9092 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9093 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9095 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9096 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9097 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9098 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9099 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9100 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9101 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9102 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9103 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9104 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9105 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9106 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9107 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9108 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9109 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9110 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9111 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9112 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9113 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9114 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9115 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9116 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9118 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9120 TCGv t0
= tcg_temp_new();
9125 TCGv_i32 fp0
= tcg_temp_new_i32();
9127 gen_load_fpr32(ctx
, fp0
, fs
);
9128 tcg_gen_ext_i32_tl(t0
, fp0
);
9129 tcg_temp_free_i32(fp0
);
9131 gen_store_gpr(t0
, rt
);
9134 gen_load_gpr(t0
, rt
);
9136 TCGv_i32 fp0
= tcg_temp_new_i32();
9138 tcg_gen_trunc_tl_i32(fp0
, t0
);
9139 gen_store_fpr32(ctx
, fp0
, fs
);
9140 tcg_temp_free_i32(fp0
);
9144 gen_helper_1e0i(cfc1
, t0
, fs
);
9145 gen_store_gpr(t0
, rt
);
9148 gen_load_gpr(t0
, rt
);
9149 save_cpu_state(ctx
, 0);
9151 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
9153 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
9154 tcg_temp_free_i32(fs_tmp
);
9156 /* Stop translation as we may have changed hflags */
9157 ctx
->base
.is_jmp
= DISAS_STOP
;
9159 #if defined(TARGET_MIPS64)
9161 gen_load_fpr64(ctx
, t0
, fs
);
9162 gen_store_gpr(t0
, rt
);
9165 gen_load_gpr(t0
, rt
);
9166 gen_store_fpr64(ctx
, t0
, fs
);
9171 TCGv_i32 fp0
= tcg_temp_new_i32();
9173 gen_load_fpr32h(ctx
, fp0
, fs
);
9174 tcg_gen_ext_i32_tl(t0
, fp0
);
9175 tcg_temp_free_i32(fp0
);
9177 gen_store_gpr(t0
, rt
);
9180 gen_load_gpr(t0
, rt
);
9182 TCGv_i32 fp0
= tcg_temp_new_i32();
9184 tcg_gen_trunc_tl_i32(fp0
, t0
);
9185 gen_store_fpr32h(ctx
, fp0
, fs
);
9186 tcg_temp_free_i32(fp0
);
9190 MIPS_INVAL("cp1 move");
9191 generate_exception_end(ctx
, EXCP_RI
);
9199 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
9215 l1
= gen_new_label();
9216 t0
= tcg_temp_new_i32();
9217 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9218 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9219 tcg_temp_free_i32(t0
);
9221 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
9223 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
9228 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9232 TCGv_i32 t0
= tcg_temp_new_i32();
9233 TCGLabel
*l1
= gen_new_label();
9240 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9241 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9242 gen_load_fpr32(ctx
, t0
, fs
);
9243 gen_store_fpr32(ctx
, t0
, fd
);
9245 tcg_temp_free_i32(t0
);
9248 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
9251 TCGv_i32 t0
= tcg_temp_new_i32();
9253 TCGLabel
*l1
= gen_new_label();
9260 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9261 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9262 tcg_temp_free_i32(t0
);
9263 fp0
= tcg_temp_new_i64();
9264 gen_load_fpr64(ctx
, fp0
, fs
);
9265 gen_store_fpr64(ctx
, fp0
, fd
);
9266 tcg_temp_free_i64(fp0
);
9270 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
9274 TCGv_i32 t0
= tcg_temp_new_i32();
9275 TCGLabel
*l1
= gen_new_label();
9276 TCGLabel
*l2
= gen_new_label();
9283 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9284 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9285 gen_load_fpr32(ctx
, t0
, fs
);
9286 gen_store_fpr32(ctx
, t0
, fd
);
9289 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
9290 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
9291 gen_load_fpr32h(ctx
, t0
, fs
);
9292 gen_store_fpr32h(ctx
, t0
, fd
);
9293 tcg_temp_free_i32(t0
);
9297 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9300 TCGv_i32 t1
= tcg_const_i32(0);
9301 TCGv_i32 fp0
= tcg_temp_new_i32();
9302 TCGv_i32 fp1
= tcg_temp_new_i32();
9303 TCGv_i32 fp2
= tcg_temp_new_i32();
9304 gen_load_fpr32(ctx
, fp0
, fd
);
9305 gen_load_fpr32(ctx
, fp1
, ft
);
9306 gen_load_fpr32(ctx
, fp2
, fs
);
9310 tcg_gen_andi_i32(fp0
, fp0
, 1);
9311 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9314 tcg_gen_andi_i32(fp1
, fp1
, 1);
9315 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9318 tcg_gen_andi_i32(fp1
, fp1
, 1);
9319 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9322 MIPS_INVAL("gen_sel_s");
9323 generate_exception_end(ctx
, EXCP_RI
);
9327 gen_store_fpr32(ctx
, fp0
, fd
);
9328 tcg_temp_free_i32(fp2
);
9329 tcg_temp_free_i32(fp1
);
9330 tcg_temp_free_i32(fp0
);
9331 tcg_temp_free_i32(t1
);
9334 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9337 TCGv_i64 t1
= tcg_const_i64(0);
9338 TCGv_i64 fp0
= tcg_temp_new_i64();
9339 TCGv_i64 fp1
= tcg_temp_new_i64();
9340 TCGv_i64 fp2
= tcg_temp_new_i64();
9341 gen_load_fpr64(ctx
, fp0
, fd
);
9342 gen_load_fpr64(ctx
, fp1
, ft
);
9343 gen_load_fpr64(ctx
, fp2
, fs
);
9347 tcg_gen_andi_i64(fp0
, fp0
, 1);
9348 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9351 tcg_gen_andi_i64(fp1
, fp1
, 1);
9352 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9355 tcg_gen_andi_i64(fp1
, fp1
, 1);
9356 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9359 MIPS_INVAL("gen_sel_d");
9360 generate_exception_end(ctx
, EXCP_RI
);
9364 gen_store_fpr64(ctx
, fp0
, fd
);
9365 tcg_temp_free_i64(fp2
);
9366 tcg_temp_free_i64(fp1
);
9367 tcg_temp_free_i64(fp0
);
9368 tcg_temp_free_i64(t1
);
9371 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
9372 int ft
, int fs
, int fd
, int cc
)
9374 uint32_t func
= ctx
->opcode
& 0x3f;
9378 TCGv_i32 fp0
= tcg_temp_new_i32();
9379 TCGv_i32 fp1
= tcg_temp_new_i32();
9381 gen_load_fpr32(ctx
, fp0
, fs
);
9382 gen_load_fpr32(ctx
, fp1
, ft
);
9383 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9384 tcg_temp_free_i32(fp1
);
9385 gen_store_fpr32(ctx
, fp0
, fd
);
9386 tcg_temp_free_i32(fp0
);
9391 TCGv_i32 fp0
= tcg_temp_new_i32();
9392 TCGv_i32 fp1
= tcg_temp_new_i32();
9394 gen_load_fpr32(ctx
, fp0
, fs
);
9395 gen_load_fpr32(ctx
, fp1
, ft
);
9396 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9397 tcg_temp_free_i32(fp1
);
9398 gen_store_fpr32(ctx
, fp0
, fd
);
9399 tcg_temp_free_i32(fp0
);
9404 TCGv_i32 fp0
= tcg_temp_new_i32();
9405 TCGv_i32 fp1
= tcg_temp_new_i32();
9407 gen_load_fpr32(ctx
, fp0
, fs
);
9408 gen_load_fpr32(ctx
, fp1
, ft
);
9409 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9410 tcg_temp_free_i32(fp1
);
9411 gen_store_fpr32(ctx
, fp0
, fd
);
9412 tcg_temp_free_i32(fp0
);
9417 TCGv_i32 fp0
= tcg_temp_new_i32();
9418 TCGv_i32 fp1
= tcg_temp_new_i32();
9420 gen_load_fpr32(ctx
, fp0
, fs
);
9421 gen_load_fpr32(ctx
, fp1
, ft
);
9422 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9423 tcg_temp_free_i32(fp1
);
9424 gen_store_fpr32(ctx
, fp0
, fd
);
9425 tcg_temp_free_i32(fp0
);
9430 TCGv_i32 fp0
= tcg_temp_new_i32();
9432 gen_load_fpr32(ctx
, fp0
, fs
);
9433 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9434 gen_store_fpr32(ctx
, fp0
, fd
);
9435 tcg_temp_free_i32(fp0
);
9440 TCGv_i32 fp0
= tcg_temp_new_i32();
9442 gen_load_fpr32(ctx
, fp0
, fs
);
9444 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9446 gen_helper_float_abs_s(fp0
, fp0
);
9448 gen_store_fpr32(ctx
, fp0
, fd
);
9449 tcg_temp_free_i32(fp0
);
9454 TCGv_i32 fp0
= tcg_temp_new_i32();
9456 gen_load_fpr32(ctx
, fp0
, fs
);
9457 gen_store_fpr32(ctx
, fp0
, fd
);
9458 tcg_temp_free_i32(fp0
);
9463 TCGv_i32 fp0
= tcg_temp_new_i32();
9465 gen_load_fpr32(ctx
, fp0
, fs
);
9467 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9469 gen_helper_float_chs_s(fp0
, fp0
);
9471 gen_store_fpr32(ctx
, fp0
, fd
);
9472 tcg_temp_free_i32(fp0
);
9476 check_cp1_64bitmode(ctx
);
9478 TCGv_i32 fp32
= tcg_temp_new_i32();
9479 TCGv_i64 fp64
= tcg_temp_new_i64();
9481 gen_load_fpr32(ctx
, fp32
, fs
);
9483 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9485 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9487 tcg_temp_free_i32(fp32
);
9488 gen_store_fpr64(ctx
, fp64
, fd
);
9489 tcg_temp_free_i64(fp64
);
9493 check_cp1_64bitmode(ctx
);
9495 TCGv_i32 fp32
= tcg_temp_new_i32();
9496 TCGv_i64 fp64
= tcg_temp_new_i64();
9498 gen_load_fpr32(ctx
, fp32
, fs
);
9500 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9502 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9504 tcg_temp_free_i32(fp32
);
9505 gen_store_fpr64(ctx
, fp64
, fd
);
9506 tcg_temp_free_i64(fp64
);
9510 check_cp1_64bitmode(ctx
);
9512 TCGv_i32 fp32
= tcg_temp_new_i32();
9513 TCGv_i64 fp64
= tcg_temp_new_i64();
9515 gen_load_fpr32(ctx
, fp32
, fs
);
9517 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9519 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9521 tcg_temp_free_i32(fp32
);
9522 gen_store_fpr64(ctx
, fp64
, fd
);
9523 tcg_temp_free_i64(fp64
);
9527 check_cp1_64bitmode(ctx
);
9529 TCGv_i32 fp32
= tcg_temp_new_i32();
9530 TCGv_i64 fp64
= tcg_temp_new_i64();
9532 gen_load_fpr32(ctx
, fp32
, fs
);
9534 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9536 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9538 tcg_temp_free_i32(fp32
);
9539 gen_store_fpr64(ctx
, fp64
, fd
);
9540 tcg_temp_free_i64(fp64
);
9545 TCGv_i32 fp0
= tcg_temp_new_i32();
9547 gen_load_fpr32(ctx
, fp0
, fs
);
9549 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9551 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9553 gen_store_fpr32(ctx
, fp0
, fd
);
9554 tcg_temp_free_i32(fp0
);
9559 TCGv_i32 fp0
= tcg_temp_new_i32();
9561 gen_load_fpr32(ctx
, fp0
, fs
);
9563 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9565 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9567 gen_store_fpr32(ctx
, fp0
, fd
);
9568 tcg_temp_free_i32(fp0
);
9573 TCGv_i32 fp0
= tcg_temp_new_i32();
9575 gen_load_fpr32(ctx
, fp0
, fs
);
9577 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9579 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9581 gen_store_fpr32(ctx
, fp0
, fd
);
9582 tcg_temp_free_i32(fp0
);
9587 TCGv_i32 fp0
= tcg_temp_new_i32();
9589 gen_load_fpr32(ctx
, fp0
, fs
);
9591 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9593 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9595 gen_store_fpr32(ctx
, fp0
, fd
);
9596 tcg_temp_free_i32(fp0
);
9600 check_insn(ctx
, ISA_MIPS32R6
);
9601 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9604 check_insn(ctx
, ISA_MIPS32R6
);
9605 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9608 check_insn(ctx
, ISA_MIPS32R6
);
9609 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9613 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9616 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9618 TCGLabel
*l1
= gen_new_label();
9622 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9624 fp0
= tcg_temp_new_i32();
9625 gen_load_fpr32(ctx
, fp0
, fs
);
9626 gen_store_fpr32(ctx
, fp0
, fd
);
9627 tcg_temp_free_i32(fp0
);
9632 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9634 TCGLabel
*l1
= gen_new_label();
9638 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9639 fp0
= tcg_temp_new_i32();
9640 gen_load_fpr32(ctx
, fp0
, fs
);
9641 gen_store_fpr32(ctx
, fp0
, fd
);
9642 tcg_temp_free_i32(fp0
);
9649 TCGv_i32 fp0
= tcg_temp_new_i32();
9651 gen_load_fpr32(ctx
, fp0
, fs
);
9652 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9653 gen_store_fpr32(ctx
, fp0
, fd
);
9654 tcg_temp_free_i32(fp0
);
9659 TCGv_i32 fp0
= tcg_temp_new_i32();
9661 gen_load_fpr32(ctx
, fp0
, fs
);
9662 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9663 gen_store_fpr32(ctx
, fp0
, fd
);
9664 tcg_temp_free_i32(fp0
);
9668 check_insn(ctx
, ISA_MIPS32R6
);
9670 TCGv_i32 fp0
= tcg_temp_new_i32();
9671 TCGv_i32 fp1
= tcg_temp_new_i32();
9672 TCGv_i32 fp2
= tcg_temp_new_i32();
9673 gen_load_fpr32(ctx
, fp0
, fs
);
9674 gen_load_fpr32(ctx
, fp1
, ft
);
9675 gen_load_fpr32(ctx
, fp2
, fd
);
9676 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9677 gen_store_fpr32(ctx
, fp2
, fd
);
9678 tcg_temp_free_i32(fp2
);
9679 tcg_temp_free_i32(fp1
);
9680 tcg_temp_free_i32(fp0
);
9684 check_insn(ctx
, ISA_MIPS32R6
);
9686 TCGv_i32 fp0
= tcg_temp_new_i32();
9687 TCGv_i32 fp1
= tcg_temp_new_i32();
9688 TCGv_i32 fp2
= tcg_temp_new_i32();
9689 gen_load_fpr32(ctx
, fp0
, fs
);
9690 gen_load_fpr32(ctx
, fp1
, ft
);
9691 gen_load_fpr32(ctx
, fp2
, fd
);
9692 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9693 gen_store_fpr32(ctx
, fp2
, fd
);
9694 tcg_temp_free_i32(fp2
);
9695 tcg_temp_free_i32(fp1
);
9696 tcg_temp_free_i32(fp0
);
9700 check_insn(ctx
, ISA_MIPS32R6
);
9702 TCGv_i32 fp0
= tcg_temp_new_i32();
9703 gen_load_fpr32(ctx
, fp0
, fs
);
9704 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9705 gen_store_fpr32(ctx
, fp0
, fd
);
9706 tcg_temp_free_i32(fp0
);
9710 check_insn(ctx
, ISA_MIPS32R6
);
9712 TCGv_i32 fp0
= tcg_temp_new_i32();
9713 gen_load_fpr32(ctx
, fp0
, fs
);
9714 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9715 gen_store_fpr32(ctx
, fp0
, fd
);
9716 tcg_temp_free_i32(fp0
);
9719 case OPC_MIN_S
: /* OPC_RECIP2_S */
9720 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9722 TCGv_i32 fp0
= tcg_temp_new_i32();
9723 TCGv_i32 fp1
= tcg_temp_new_i32();
9724 TCGv_i32 fp2
= tcg_temp_new_i32();
9725 gen_load_fpr32(ctx
, fp0
, fs
);
9726 gen_load_fpr32(ctx
, fp1
, ft
);
9727 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9728 gen_store_fpr32(ctx
, fp2
, fd
);
9729 tcg_temp_free_i32(fp2
);
9730 tcg_temp_free_i32(fp1
);
9731 tcg_temp_free_i32(fp0
);
9734 check_cp1_64bitmode(ctx
);
9736 TCGv_i32 fp0
= tcg_temp_new_i32();
9737 TCGv_i32 fp1
= tcg_temp_new_i32();
9739 gen_load_fpr32(ctx
, fp0
, fs
);
9740 gen_load_fpr32(ctx
, fp1
, ft
);
9741 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9742 tcg_temp_free_i32(fp1
);
9743 gen_store_fpr32(ctx
, fp0
, fd
);
9744 tcg_temp_free_i32(fp0
);
9748 case OPC_MINA_S
: /* OPC_RECIP1_S */
9749 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9751 TCGv_i32 fp0
= tcg_temp_new_i32();
9752 TCGv_i32 fp1
= tcg_temp_new_i32();
9753 TCGv_i32 fp2
= tcg_temp_new_i32();
9754 gen_load_fpr32(ctx
, fp0
, fs
);
9755 gen_load_fpr32(ctx
, fp1
, ft
);
9756 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9757 gen_store_fpr32(ctx
, fp2
, fd
);
9758 tcg_temp_free_i32(fp2
);
9759 tcg_temp_free_i32(fp1
);
9760 tcg_temp_free_i32(fp0
);
9763 check_cp1_64bitmode(ctx
);
9765 TCGv_i32 fp0
= tcg_temp_new_i32();
9767 gen_load_fpr32(ctx
, fp0
, fs
);
9768 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9769 gen_store_fpr32(ctx
, fp0
, fd
);
9770 tcg_temp_free_i32(fp0
);
9774 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9775 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9777 TCGv_i32 fp0
= tcg_temp_new_i32();
9778 TCGv_i32 fp1
= tcg_temp_new_i32();
9779 gen_load_fpr32(ctx
, fp0
, fs
);
9780 gen_load_fpr32(ctx
, fp1
, ft
);
9781 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9782 gen_store_fpr32(ctx
, fp1
, fd
);
9783 tcg_temp_free_i32(fp1
);
9784 tcg_temp_free_i32(fp0
);
9787 check_cp1_64bitmode(ctx
);
9789 TCGv_i32 fp0
= tcg_temp_new_i32();
9791 gen_load_fpr32(ctx
, fp0
, fs
);
9792 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9793 gen_store_fpr32(ctx
, fp0
, fd
);
9794 tcg_temp_free_i32(fp0
);
9798 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9799 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9801 TCGv_i32 fp0
= tcg_temp_new_i32();
9802 TCGv_i32 fp1
= tcg_temp_new_i32();
9803 gen_load_fpr32(ctx
, fp0
, fs
);
9804 gen_load_fpr32(ctx
, fp1
, ft
);
9805 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9806 gen_store_fpr32(ctx
, fp1
, fd
);
9807 tcg_temp_free_i32(fp1
);
9808 tcg_temp_free_i32(fp0
);
9811 check_cp1_64bitmode(ctx
);
9813 TCGv_i32 fp0
= tcg_temp_new_i32();
9814 TCGv_i32 fp1
= tcg_temp_new_i32();
9816 gen_load_fpr32(ctx
, fp0
, fs
);
9817 gen_load_fpr32(ctx
, fp1
, ft
);
9818 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9819 tcg_temp_free_i32(fp1
);
9820 gen_store_fpr32(ctx
, fp0
, fd
);
9821 tcg_temp_free_i32(fp0
);
9826 check_cp1_registers(ctx
, fd
);
9828 TCGv_i32 fp32
= tcg_temp_new_i32();
9829 TCGv_i64 fp64
= tcg_temp_new_i64();
9831 gen_load_fpr32(ctx
, fp32
, fs
);
9832 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9833 tcg_temp_free_i32(fp32
);
9834 gen_store_fpr64(ctx
, fp64
, fd
);
9835 tcg_temp_free_i64(fp64
);
9840 TCGv_i32 fp0
= tcg_temp_new_i32();
9842 gen_load_fpr32(ctx
, fp0
, fs
);
9844 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9846 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9848 gen_store_fpr32(ctx
, fp0
, fd
);
9849 tcg_temp_free_i32(fp0
);
9853 check_cp1_64bitmode(ctx
);
9855 TCGv_i32 fp32
= tcg_temp_new_i32();
9856 TCGv_i64 fp64
= tcg_temp_new_i64();
9858 gen_load_fpr32(ctx
, fp32
, fs
);
9860 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9862 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9864 tcg_temp_free_i32(fp32
);
9865 gen_store_fpr64(ctx
, fp64
, fd
);
9866 tcg_temp_free_i64(fp64
);
9872 TCGv_i64 fp64
= tcg_temp_new_i64();
9873 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9874 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9876 gen_load_fpr32(ctx
, fp32_0
, fs
);
9877 gen_load_fpr32(ctx
, fp32_1
, ft
);
9878 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9879 tcg_temp_free_i32(fp32_1
);
9880 tcg_temp_free_i32(fp32_0
);
9881 gen_store_fpr64(ctx
, fp64
, fd
);
9882 tcg_temp_free_i64(fp64
);
9894 case OPC_CMP_NGLE_S
:
9901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9902 if (ctx
->opcode
& (1 << 6)) {
9903 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9905 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9909 check_cp1_registers(ctx
, fs
| ft
| fd
);
9911 TCGv_i64 fp0
= tcg_temp_new_i64();
9912 TCGv_i64 fp1
= tcg_temp_new_i64();
9914 gen_load_fpr64(ctx
, fp0
, fs
);
9915 gen_load_fpr64(ctx
, fp1
, ft
);
9916 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9917 tcg_temp_free_i64(fp1
);
9918 gen_store_fpr64(ctx
, fp0
, fd
);
9919 tcg_temp_free_i64(fp0
);
9923 check_cp1_registers(ctx
, fs
| ft
| fd
);
9925 TCGv_i64 fp0
= tcg_temp_new_i64();
9926 TCGv_i64 fp1
= tcg_temp_new_i64();
9928 gen_load_fpr64(ctx
, fp0
, fs
);
9929 gen_load_fpr64(ctx
, fp1
, ft
);
9930 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9931 tcg_temp_free_i64(fp1
);
9932 gen_store_fpr64(ctx
, fp0
, fd
);
9933 tcg_temp_free_i64(fp0
);
9937 check_cp1_registers(ctx
, fs
| ft
| fd
);
9939 TCGv_i64 fp0
= tcg_temp_new_i64();
9940 TCGv_i64 fp1
= tcg_temp_new_i64();
9942 gen_load_fpr64(ctx
, fp0
, fs
);
9943 gen_load_fpr64(ctx
, fp1
, ft
);
9944 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9945 tcg_temp_free_i64(fp1
);
9946 gen_store_fpr64(ctx
, fp0
, fd
);
9947 tcg_temp_free_i64(fp0
);
9951 check_cp1_registers(ctx
, fs
| ft
| fd
);
9953 TCGv_i64 fp0
= tcg_temp_new_i64();
9954 TCGv_i64 fp1
= tcg_temp_new_i64();
9956 gen_load_fpr64(ctx
, fp0
, fs
);
9957 gen_load_fpr64(ctx
, fp1
, ft
);
9958 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9959 tcg_temp_free_i64(fp1
);
9960 gen_store_fpr64(ctx
, fp0
, fd
);
9961 tcg_temp_free_i64(fp0
);
9965 check_cp1_registers(ctx
, fs
| fd
);
9967 TCGv_i64 fp0
= tcg_temp_new_i64();
9969 gen_load_fpr64(ctx
, fp0
, fs
);
9970 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9971 gen_store_fpr64(ctx
, fp0
, fd
);
9972 tcg_temp_free_i64(fp0
);
9976 check_cp1_registers(ctx
, fs
| fd
);
9978 TCGv_i64 fp0
= tcg_temp_new_i64();
9980 gen_load_fpr64(ctx
, fp0
, fs
);
9982 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9984 gen_helper_float_abs_d(fp0
, fp0
);
9986 gen_store_fpr64(ctx
, fp0
, fd
);
9987 tcg_temp_free_i64(fp0
);
9991 check_cp1_registers(ctx
, fs
| fd
);
9993 TCGv_i64 fp0
= tcg_temp_new_i64();
9995 gen_load_fpr64(ctx
, fp0
, fs
);
9996 gen_store_fpr64(ctx
, fp0
, fd
);
9997 tcg_temp_free_i64(fp0
);
10001 check_cp1_registers(ctx
, fs
| fd
);
10003 TCGv_i64 fp0
= tcg_temp_new_i64();
10005 gen_load_fpr64(ctx
, fp0
, fs
);
10006 if (ctx
->abs2008
) {
10007 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10009 gen_helper_float_chs_d(fp0
, fp0
);
10011 gen_store_fpr64(ctx
, fp0
, fd
);
10012 tcg_temp_free_i64(fp0
);
10015 case OPC_ROUND_L_D
:
10016 check_cp1_64bitmode(ctx
);
10018 TCGv_i64 fp0
= tcg_temp_new_i64();
10020 gen_load_fpr64(ctx
, fp0
, fs
);
10021 if (ctx
->nan2008
) {
10022 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10024 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10026 gen_store_fpr64(ctx
, fp0
, fd
);
10027 tcg_temp_free_i64(fp0
);
10030 case OPC_TRUNC_L_D
:
10031 check_cp1_64bitmode(ctx
);
10033 TCGv_i64 fp0
= tcg_temp_new_i64();
10035 gen_load_fpr64(ctx
, fp0
, fs
);
10036 if (ctx
->nan2008
) {
10037 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10039 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10041 gen_store_fpr64(ctx
, fp0
, fd
);
10042 tcg_temp_free_i64(fp0
);
10046 check_cp1_64bitmode(ctx
);
10048 TCGv_i64 fp0
= tcg_temp_new_i64();
10050 gen_load_fpr64(ctx
, fp0
, fs
);
10051 if (ctx
->nan2008
) {
10052 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10054 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10056 gen_store_fpr64(ctx
, fp0
, fd
);
10057 tcg_temp_free_i64(fp0
);
10060 case OPC_FLOOR_L_D
:
10061 check_cp1_64bitmode(ctx
);
10063 TCGv_i64 fp0
= tcg_temp_new_i64();
10065 gen_load_fpr64(ctx
, fp0
, fs
);
10066 if (ctx
->nan2008
) {
10067 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10069 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10071 gen_store_fpr64(ctx
, fp0
, fd
);
10072 tcg_temp_free_i64(fp0
);
10075 case OPC_ROUND_W_D
:
10076 check_cp1_registers(ctx
, fs
);
10078 TCGv_i32 fp32
= tcg_temp_new_i32();
10079 TCGv_i64 fp64
= tcg_temp_new_i64();
10081 gen_load_fpr64(ctx
, fp64
, fs
);
10082 if (ctx
->nan2008
) {
10083 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10085 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10087 tcg_temp_free_i64(fp64
);
10088 gen_store_fpr32(ctx
, fp32
, fd
);
10089 tcg_temp_free_i32(fp32
);
10092 case OPC_TRUNC_W_D
:
10093 check_cp1_registers(ctx
, fs
);
10095 TCGv_i32 fp32
= tcg_temp_new_i32();
10096 TCGv_i64 fp64
= tcg_temp_new_i64();
10098 gen_load_fpr64(ctx
, fp64
, fs
);
10099 if (ctx
->nan2008
) {
10100 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10102 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10104 tcg_temp_free_i64(fp64
);
10105 gen_store_fpr32(ctx
, fp32
, fd
);
10106 tcg_temp_free_i32(fp32
);
10110 check_cp1_registers(ctx
, fs
);
10112 TCGv_i32 fp32
= tcg_temp_new_i32();
10113 TCGv_i64 fp64
= tcg_temp_new_i64();
10115 gen_load_fpr64(ctx
, fp64
, fs
);
10116 if (ctx
->nan2008
) {
10117 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10119 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10121 tcg_temp_free_i64(fp64
);
10122 gen_store_fpr32(ctx
, fp32
, fd
);
10123 tcg_temp_free_i32(fp32
);
10126 case OPC_FLOOR_W_D
:
10127 check_cp1_registers(ctx
, fs
);
10129 TCGv_i32 fp32
= tcg_temp_new_i32();
10130 TCGv_i64 fp64
= tcg_temp_new_i64();
10132 gen_load_fpr64(ctx
, fp64
, fs
);
10133 if (ctx
->nan2008
) {
10134 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10136 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10138 tcg_temp_free_i64(fp64
);
10139 gen_store_fpr32(ctx
, fp32
, fd
);
10140 tcg_temp_free_i32(fp32
);
10144 check_insn(ctx
, ISA_MIPS32R6
);
10145 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10148 check_insn(ctx
, ISA_MIPS32R6
);
10149 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10152 check_insn(ctx
, ISA_MIPS32R6
);
10153 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10156 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10157 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10160 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10162 TCGLabel
*l1
= gen_new_label();
10166 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10168 fp0
= tcg_temp_new_i64();
10169 gen_load_fpr64(ctx
, fp0
, fs
);
10170 gen_store_fpr64(ctx
, fp0
, fd
);
10171 tcg_temp_free_i64(fp0
);
10176 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10178 TCGLabel
*l1
= gen_new_label();
10182 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10183 fp0
= tcg_temp_new_i64();
10184 gen_load_fpr64(ctx
, fp0
, fs
);
10185 gen_store_fpr64(ctx
, fp0
, fd
);
10186 tcg_temp_free_i64(fp0
);
10192 check_cp1_registers(ctx
, fs
| fd
);
10194 TCGv_i64 fp0
= tcg_temp_new_i64();
10196 gen_load_fpr64(ctx
, fp0
, fs
);
10197 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
10198 gen_store_fpr64(ctx
, fp0
, fd
);
10199 tcg_temp_free_i64(fp0
);
10203 check_cp1_registers(ctx
, fs
| fd
);
10205 TCGv_i64 fp0
= tcg_temp_new_i64();
10207 gen_load_fpr64(ctx
, fp0
, fs
);
10208 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
10209 gen_store_fpr64(ctx
, fp0
, fd
);
10210 tcg_temp_free_i64(fp0
);
10214 check_insn(ctx
, ISA_MIPS32R6
);
10216 TCGv_i64 fp0
= tcg_temp_new_i64();
10217 TCGv_i64 fp1
= tcg_temp_new_i64();
10218 TCGv_i64 fp2
= tcg_temp_new_i64();
10219 gen_load_fpr64(ctx
, fp0
, fs
);
10220 gen_load_fpr64(ctx
, fp1
, ft
);
10221 gen_load_fpr64(ctx
, fp2
, fd
);
10222 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10223 gen_store_fpr64(ctx
, fp2
, fd
);
10224 tcg_temp_free_i64(fp2
);
10225 tcg_temp_free_i64(fp1
);
10226 tcg_temp_free_i64(fp0
);
10230 check_insn(ctx
, ISA_MIPS32R6
);
10232 TCGv_i64 fp0
= tcg_temp_new_i64();
10233 TCGv_i64 fp1
= tcg_temp_new_i64();
10234 TCGv_i64 fp2
= tcg_temp_new_i64();
10235 gen_load_fpr64(ctx
, fp0
, fs
);
10236 gen_load_fpr64(ctx
, fp1
, ft
);
10237 gen_load_fpr64(ctx
, fp2
, fd
);
10238 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10239 gen_store_fpr64(ctx
, fp2
, fd
);
10240 tcg_temp_free_i64(fp2
);
10241 tcg_temp_free_i64(fp1
);
10242 tcg_temp_free_i64(fp0
);
10246 check_insn(ctx
, ISA_MIPS32R6
);
10248 TCGv_i64 fp0
= tcg_temp_new_i64();
10249 gen_load_fpr64(ctx
, fp0
, fs
);
10250 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
10251 gen_store_fpr64(ctx
, fp0
, fd
);
10252 tcg_temp_free_i64(fp0
);
10256 check_insn(ctx
, ISA_MIPS32R6
);
10258 TCGv_i64 fp0
= tcg_temp_new_i64();
10259 gen_load_fpr64(ctx
, fp0
, fs
);
10260 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
10261 gen_store_fpr64(ctx
, fp0
, fd
);
10262 tcg_temp_free_i64(fp0
);
10265 case OPC_MIN_D
: /* OPC_RECIP2_D */
10266 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10268 TCGv_i64 fp0
= tcg_temp_new_i64();
10269 TCGv_i64 fp1
= tcg_temp_new_i64();
10270 gen_load_fpr64(ctx
, fp0
, fs
);
10271 gen_load_fpr64(ctx
, fp1
, ft
);
10272 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
10273 gen_store_fpr64(ctx
, fp1
, fd
);
10274 tcg_temp_free_i64(fp1
);
10275 tcg_temp_free_i64(fp0
);
10278 check_cp1_64bitmode(ctx
);
10280 TCGv_i64 fp0
= tcg_temp_new_i64();
10281 TCGv_i64 fp1
= tcg_temp_new_i64();
10283 gen_load_fpr64(ctx
, fp0
, fs
);
10284 gen_load_fpr64(ctx
, fp1
, ft
);
10285 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
10286 tcg_temp_free_i64(fp1
);
10287 gen_store_fpr64(ctx
, fp0
, fd
);
10288 tcg_temp_free_i64(fp0
);
10292 case OPC_MINA_D
: /* OPC_RECIP1_D */
10293 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10295 TCGv_i64 fp0
= tcg_temp_new_i64();
10296 TCGv_i64 fp1
= tcg_temp_new_i64();
10297 gen_load_fpr64(ctx
, fp0
, fs
);
10298 gen_load_fpr64(ctx
, fp1
, ft
);
10299 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10300 gen_store_fpr64(ctx
, fp1
, fd
);
10301 tcg_temp_free_i64(fp1
);
10302 tcg_temp_free_i64(fp0
);
10305 check_cp1_64bitmode(ctx
);
10307 TCGv_i64 fp0
= tcg_temp_new_i64();
10309 gen_load_fpr64(ctx
, fp0
, fs
);
10310 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10311 gen_store_fpr64(ctx
, fp0
, fd
);
10312 tcg_temp_free_i64(fp0
);
10316 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10317 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10319 TCGv_i64 fp0
= tcg_temp_new_i64();
10320 TCGv_i64 fp1
= tcg_temp_new_i64();
10321 gen_load_fpr64(ctx
, fp0
, fs
);
10322 gen_load_fpr64(ctx
, fp1
, ft
);
10323 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10324 gen_store_fpr64(ctx
, fp1
, fd
);
10325 tcg_temp_free_i64(fp1
);
10326 tcg_temp_free_i64(fp0
);
10329 check_cp1_64bitmode(ctx
);
10331 TCGv_i64 fp0
= tcg_temp_new_i64();
10333 gen_load_fpr64(ctx
, fp0
, fs
);
10334 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10335 gen_store_fpr64(ctx
, fp0
, fd
);
10336 tcg_temp_free_i64(fp0
);
10340 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10341 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
10343 TCGv_i64 fp0
= tcg_temp_new_i64();
10344 TCGv_i64 fp1
= tcg_temp_new_i64();
10345 gen_load_fpr64(ctx
, fp0
, fs
);
10346 gen_load_fpr64(ctx
, fp1
, ft
);
10347 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10348 gen_store_fpr64(ctx
, fp1
, fd
);
10349 tcg_temp_free_i64(fp1
);
10350 tcg_temp_free_i64(fp0
);
10353 check_cp1_64bitmode(ctx
);
10355 TCGv_i64 fp0
= tcg_temp_new_i64();
10356 TCGv_i64 fp1
= tcg_temp_new_i64();
10358 gen_load_fpr64(ctx
, fp0
, fs
);
10359 gen_load_fpr64(ctx
, fp1
, ft
);
10360 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10361 tcg_temp_free_i64(fp1
);
10362 gen_store_fpr64(ctx
, fp0
, fd
);
10363 tcg_temp_free_i64(fp0
);
10370 case OPC_CMP_UEQ_D
:
10371 case OPC_CMP_OLT_D
:
10372 case OPC_CMP_ULT_D
:
10373 case OPC_CMP_OLE_D
:
10374 case OPC_CMP_ULE_D
:
10376 case OPC_CMP_NGLE_D
:
10377 case OPC_CMP_SEQ_D
:
10378 case OPC_CMP_NGL_D
:
10380 case OPC_CMP_NGE_D
:
10382 case OPC_CMP_NGT_D
:
10383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
10384 if (ctx
->opcode
& (1 << 6)) {
10385 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
10387 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
10391 check_cp1_registers(ctx
, fs
);
10393 TCGv_i32 fp32
= tcg_temp_new_i32();
10394 TCGv_i64 fp64
= tcg_temp_new_i64();
10396 gen_load_fpr64(ctx
, fp64
, fs
);
10397 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10398 tcg_temp_free_i64(fp64
);
10399 gen_store_fpr32(ctx
, fp32
, fd
);
10400 tcg_temp_free_i32(fp32
);
10404 check_cp1_registers(ctx
, fs
);
10406 TCGv_i32 fp32
= tcg_temp_new_i32();
10407 TCGv_i64 fp64
= tcg_temp_new_i64();
10409 gen_load_fpr64(ctx
, fp64
, fs
);
10410 if (ctx
->nan2008
) {
10411 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10413 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10415 tcg_temp_free_i64(fp64
);
10416 gen_store_fpr32(ctx
, fp32
, fd
);
10417 tcg_temp_free_i32(fp32
);
10421 check_cp1_64bitmode(ctx
);
10423 TCGv_i64 fp0
= tcg_temp_new_i64();
10425 gen_load_fpr64(ctx
, fp0
, fs
);
10426 if (ctx
->nan2008
) {
10427 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10429 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10431 gen_store_fpr64(ctx
, fp0
, fd
);
10432 tcg_temp_free_i64(fp0
);
10437 TCGv_i32 fp0
= tcg_temp_new_i32();
10439 gen_load_fpr32(ctx
, fp0
, fs
);
10440 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10441 gen_store_fpr32(ctx
, fp0
, fd
);
10442 tcg_temp_free_i32(fp0
);
10446 check_cp1_registers(ctx
, fd
);
10448 TCGv_i32 fp32
= tcg_temp_new_i32();
10449 TCGv_i64 fp64
= tcg_temp_new_i64();
10451 gen_load_fpr32(ctx
, fp32
, fs
);
10452 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10453 tcg_temp_free_i32(fp32
);
10454 gen_store_fpr64(ctx
, fp64
, fd
);
10455 tcg_temp_free_i64(fp64
);
10459 check_cp1_64bitmode(ctx
);
10461 TCGv_i32 fp32
= tcg_temp_new_i32();
10462 TCGv_i64 fp64
= tcg_temp_new_i64();
10464 gen_load_fpr64(ctx
, fp64
, fs
);
10465 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10466 tcg_temp_free_i64(fp64
);
10467 gen_store_fpr32(ctx
, fp32
, fd
);
10468 tcg_temp_free_i32(fp32
);
10472 check_cp1_64bitmode(ctx
);
10474 TCGv_i64 fp0
= tcg_temp_new_i64();
10476 gen_load_fpr64(ctx
, fp0
, fs
);
10477 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10478 gen_store_fpr64(ctx
, fp0
, fd
);
10479 tcg_temp_free_i64(fp0
);
10482 case OPC_CVT_PS_PW
:
10485 TCGv_i64 fp0
= tcg_temp_new_i64();
10487 gen_load_fpr64(ctx
, fp0
, fs
);
10488 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10489 gen_store_fpr64(ctx
, fp0
, fd
);
10490 tcg_temp_free_i64(fp0
);
10496 TCGv_i64 fp0
= tcg_temp_new_i64();
10497 TCGv_i64 fp1
= tcg_temp_new_i64();
10499 gen_load_fpr64(ctx
, fp0
, fs
);
10500 gen_load_fpr64(ctx
, fp1
, ft
);
10501 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10502 tcg_temp_free_i64(fp1
);
10503 gen_store_fpr64(ctx
, fp0
, fd
);
10504 tcg_temp_free_i64(fp0
);
10510 TCGv_i64 fp0
= tcg_temp_new_i64();
10511 TCGv_i64 fp1
= tcg_temp_new_i64();
10513 gen_load_fpr64(ctx
, fp0
, fs
);
10514 gen_load_fpr64(ctx
, fp1
, ft
);
10515 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10516 tcg_temp_free_i64(fp1
);
10517 gen_store_fpr64(ctx
, fp0
, fd
);
10518 tcg_temp_free_i64(fp0
);
10524 TCGv_i64 fp0
= tcg_temp_new_i64();
10525 TCGv_i64 fp1
= tcg_temp_new_i64();
10527 gen_load_fpr64(ctx
, fp0
, fs
);
10528 gen_load_fpr64(ctx
, fp1
, ft
);
10529 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10530 tcg_temp_free_i64(fp1
);
10531 gen_store_fpr64(ctx
, fp0
, fd
);
10532 tcg_temp_free_i64(fp0
);
10538 TCGv_i64 fp0
= tcg_temp_new_i64();
10540 gen_load_fpr64(ctx
, fp0
, fs
);
10541 gen_helper_float_abs_ps(fp0
, fp0
);
10542 gen_store_fpr64(ctx
, fp0
, fd
);
10543 tcg_temp_free_i64(fp0
);
10549 TCGv_i64 fp0
= tcg_temp_new_i64();
10551 gen_load_fpr64(ctx
, fp0
, fs
);
10552 gen_store_fpr64(ctx
, fp0
, fd
);
10553 tcg_temp_free_i64(fp0
);
10559 TCGv_i64 fp0
= tcg_temp_new_i64();
10561 gen_load_fpr64(ctx
, fp0
, fs
);
10562 gen_helper_float_chs_ps(fp0
, fp0
);
10563 gen_store_fpr64(ctx
, fp0
, fd
);
10564 tcg_temp_free_i64(fp0
);
10569 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10574 TCGLabel
*l1
= gen_new_label();
10578 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10579 fp0
= tcg_temp_new_i64();
10580 gen_load_fpr64(ctx
, fp0
, fs
);
10581 gen_store_fpr64(ctx
, fp0
, fd
);
10582 tcg_temp_free_i64(fp0
);
10589 TCGLabel
*l1
= gen_new_label();
10593 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10594 fp0
= tcg_temp_new_i64();
10595 gen_load_fpr64(ctx
, fp0
, fs
);
10596 gen_store_fpr64(ctx
, fp0
, fd
);
10597 tcg_temp_free_i64(fp0
);
10605 TCGv_i64 fp0
= tcg_temp_new_i64();
10606 TCGv_i64 fp1
= tcg_temp_new_i64();
10608 gen_load_fpr64(ctx
, fp0
, ft
);
10609 gen_load_fpr64(ctx
, fp1
, fs
);
10610 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10611 tcg_temp_free_i64(fp1
);
10612 gen_store_fpr64(ctx
, fp0
, fd
);
10613 tcg_temp_free_i64(fp0
);
10619 TCGv_i64 fp0
= tcg_temp_new_i64();
10620 TCGv_i64 fp1
= tcg_temp_new_i64();
10622 gen_load_fpr64(ctx
, fp0
, ft
);
10623 gen_load_fpr64(ctx
, fp1
, fs
);
10624 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10625 tcg_temp_free_i64(fp1
);
10626 gen_store_fpr64(ctx
, fp0
, fd
);
10627 tcg_temp_free_i64(fp0
);
10630 case OPC_RECIP2_PS
:
10633 TCGv_i64 fp0
= tcg_temp_new_i64();
10634 TCGv_i64 fp1
= tcg_temp_new_i64();
10636 gen_load_fpr64(ctx
, fp0
, fs
);
10637 gen_load_fpr64(ctx
, fp1
, ft
);
10638 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10639 tcg_temp_free_i64(fp1
);
10640 gen_store_fpr64(ctx
, fp0
, fd
);
10641 tcg_temp_free_i64(fp0
);
10644 case OPC_RECIP1_PS
:
10647 TCGv_i64 fp0
= tcg_temp_new_i64();
10649 gen_load_fpr64(ctx
, fp0
, fs
);
10650 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10651 gen_store_fpr64(ctx
, fp0
, fd
);
10652 tcg_temp_free_i64(fp0
);
10655 case OPC_RSQRT1_PS
:
10658 TCGv_i64 fp0
= tcg_temp_new_i64();
10660 gen_load_fpr64(ctx
, fp0
, fs
);
10661 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10662 gen_store_fpr64(ctx
, fp0
, fd
);
10663 tcg_temp_free_i64(fp0
);
10666 case OPC_RSQRT2_PS
:
10669 TCGv_i64 fp0
= tcg_temp_new_i64();
10670 TCGv_i64 fp1
= tcg_temp_new_i64();
10672 gen_load_fpr64(ctx
, fp0
, fs
);
10673 gen_load_fpr64(ctx
, fp1
, ft
);
10674 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10675 tcg_temp_free_i64(fp1
);
10676 gen_store_fpr64(ctx
, fp0
, fd
);
10677 tcg_temp_free_i64(fp0
);
10681 check_cp1_64bitmode(ctx
);
10683 TCGv_i32 fp0
= tcg_temp_new_i32();
10685 gen_load_fpr32h(ctx
, fp0
, fs
);
10686 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10687 gen_store_fpr32(ctx
, fp0
, fd
);
10688 tcg_temp_free_i32(fp0
);
10691 case OPC_CVT_PW_PS
:
10694 TCGv_i64 fp0
= tcg_temp_new_i64();
10696 gen_load_fpr64(ctx
, fp0
, fs
);
10697 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10698 gen_store_fpr64(ctx
, fp0
, fd
);
10699 tcg_temp_free_i64(fp0
);
10703 check_cp1_64bitmode(ctx
);
10705 TCGv_i32 fp0
= tcg_temp_new_i32();
10707 gen_load_fpr32(ctx
, fp0
, fs
);
10708 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10709 gen_store_fpr32(ctx
, fp0
, fd
);
10710 tcg_temp_free_i32(fp0
);
10716 TCGv_i32 fp0
= tcg_temp_new_i32();
10717 TCGv_i32 fp1
= tcg_temp_new_i32();
10719 gen_load_fpr32(ctx
, fp0
, fs
);
10720 gen_load_fpr32(ctx
, fp1
, ft
);
10721 gen_store_fpr32h(ctx
, fp0
, fd
);
10722 gen_store_fpr32(ctx
, fp1
, fd
);
10723 tcg_temp_free_i32(fp0
);
10724 tcg_temp_free_i32(fp1
);
10730 TCGv_i32 fp0
= tcg_temp_new_i32();
10731 TCGv_i32 fp1
= tcg_temp_new_i32();
10733 gen_load_fpr32(ctx
, fp0
, fs
);
10734 gen_load_fpr32h(ctx
, fp1
, ft
);
10735 gen_store_fpr32(ctx
, fp1
, fd
);
10736 gen_store_fpr32h(ctx
, fp0
, fd
);
10737 tcg_temp_free_i32(fp0
);
10738 tcg_temp_free_i32(fp1
);
10744 TCGv_i32 fp0
= tcg_temp_new_i32();
10745 TCGv_i32 fp1
= tcg_temp_new_i32();
10747 gen_load_fpr32h(ctx
, fp0
, fs
);
10748 gen_load_fpr32(ctx
, fp1
, ft
);
10749 gen_store_fpr32(ctx
, fp1
, fd
);
10750 gen_store_fpr32h(ctx
, fp0
, fd
);
10751 tcg_temp_free_i32(fp0
);
10752 tcg_temp_free_i32(fp1
);
10758 TCGv_i32 fp0
= tcg_temp_new_i32();
10759 TCGv_i32 fp1
= tcg_temp_new_i32();
10761 gen_load_fpr32h(ctx
, fp0
, fs
);
10762 gen_load_fpr32h(ctx
, fp1
, ft
);
10763 gen_store_fpr32(ctx
, fp1
, fd
);
10764 gen_store_fpr32h(ctx
, fp0
, fd
);
10765 tcg_temp_free_i32(fp0
);
10766 tcg_temp_free_i32(fp1
);
10770 case OPC_CMP_UN_PS
:
10771 case OPC_CMP_EQ_PS
:
10772 case OPC_CMP_UEQ_PS
:
10773 case OPC_CMP_OLT_PS
:
10774 case OPC_CMP_ULT_PS
:
10775 case OPC_CMP_OLE_PS
:
10776 case OPC_CMP_ULE_PS
:
10777 case OPC_CMP_SF_PS
:
10778 case OPC_CMP_NGLE_PS
:
10779 case OPC_CMP_SEQ_PS
:
10780 case OPC_CMP_NGL_PS
:
10781 case OPC_CMP_LT_PS
:
10782 case OPC_CMP_NGE_PS
:
10783 case OPC_CMP_LE_PS
:
10784 case OPC_CMP_NGT_PS
:
10785 if (ctx
->opcode
& (1 << 6)) {
10786 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10788 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10792 MIPS_INVAL("farith");
10793 generate_exception_end(ctx
, EXCP_RI
);
10798 /* Coprocessor 3 (FPU) */
10799 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10800 int fd
, int fs
, int base
, int index
)
10802 TCGv t0
= tcg_temp_new();
10805 gen_load_gpr(t0
, index
);
10806 } else if (index
== 0) {
10807 gen_load_gpr(t0
, base
);
10809 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10811 /* Don't do NOP if destination is zero: we must perform the actual
10817 TCGv_i32 fp0
= tcg_temp_new_i32();
10819 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10820 tcg_gen_trunc_tl_i32(fp0
, t0
);
10821 gen_store_fpr32(ctx
, fp0
, fd
);
10822 tcg_temp_free_i32(fp0
);
10827 check_cp1_registers(ctx
, fd
);
10829 TCGv_i64 fp0
= tcg_temp_new_i64();
10830 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10831 gen_store_fpr64(ctx
, fp0
, fd
);
10832 tcg_temp_free_i64(fp0
);
10836 check_cp1_64bitmode(ctx
);
10837 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10839 TCGv_i64 fp0
= tcg_temp_new_i64();
10841 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10842 gen_store_fpr64(ctx
, fp0
, fd
);
10843 tcg_temp_free_i64(fp0
);
10849 TCGv_i32 fp0
= tcg_temp_new_i32();
10850 gen_load_fpr32(ctx
, fp0
, fs
);
10851 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10852 tcg_temp_free_i32(fp0
);
10857 check_cp1_registers(ctx
, fs
);
10859 TCGv_i64 fp0
= tcg_temp_new_i64();
10860 gen_load_fpr64(ctx
, fp0
, fs
);
10861 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10862 tcg_temp_free_i64(fp0
);
10866 check_cp1_64bitmode(ctx
);
10867 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10869 TCGv_i64 fp0
= tcg_temp_new_i64();
10870 gen_load_fpr64(ctx
, fp0
, fs
);
10871 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10872 tcg_temp_free_i64(fp0
);
10879 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10880 int fd
, int fr
, int fs
, int ft
)
10886 TCGv t0
= tcg_temp_local_new();
10887 TCGv_i32 fp
= tcg_temp_new_i32();
10888 TCGv_i32 fph
= tcg_temp_new_i32();
10889 TCGLabel
*l1
= gen_new_label();
10890 TCGLabel
*l2
= gen_new_label();
10892 gen_load_gpr(t0
, fr
);
10893 tcg_gen_andi_tl(t0
, t0
, 0x7);
10895 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10896 gen_load_fpr32(ctx
, fp
, fs
);
10897 gen_load_fpr32h(ctx
, fph
, fs
);
10898 gen_store_fpr32(ctx
, fp
, fd
);
10899 gen_store_fpr32h(ctx
, fph
, fd
);
10902 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10904 #ifdef TARGET_WORDS_BIGENDIAN
10905 gen_load_fpr32(ctx
, fp
, fs
);
10906 gen_load_fpr32h(ctx
, fph
, ft
);
10907 gen_store_fpr32h(ctx
, fp
, fd
);
10908 gen_store_fpr32(ctx
, fph
, fd
);
10910 gen_load_fpr32h(ctx
, fph
, fs
);
10911 gen_load_fpr32(ctx
, fp
, ft
);
10912 gen_store_fpr32(ctx
, fph
, fd
);
10913 gen_store_fpr32h(ctx
, fp
, fd
);
10916 tcg_temp_free_i32(fp
);
10917 tcg_temp_free_i32(fph
);
10923 TCGv_i32 fp0
= tcg_temp_new_i32();
10924 TCGv_i32 fp1
= tcg_temp_new_i32();
10925 TCGv_i32 fp2
= tcg_temp_new_i32();
10927 gen_load_fpr32(ctx
, fp0
, fs
);
10928 gen_load_fpr32(ctx
, fp1
, ft
);
10929 gen_load_fpr32(ctx
, fp2
, fr
);
10930 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10931 tcg_temp_free_i32(fp0
);
10932 tcg_temp_free_i32(fp1
);
10933 gen_store_fpr32(ctx
, fp2
, fd
);
10934 tcg_temp_free_i32(fp2
);
10939 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10941 TCGv_i64 fp0
= tcg_temp_new_i64();
10942 TCGv_i64 fp1
= tcg_temp_new_i64();
10943 TCGv_i64 fp2
= tcg_temp_new_i64();
10945 gen_load_fpr64(ctx
, fp0
, fs
);
10946 gen_load_fpr64(ctx
, fp1
, ft
);
10947 gen_load_fpr64(ctx
, fp2
, fr
);
10948 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10949 tcg_temp_free_i64(fp0
);
10950 tcg_temp_free_i64(fp1
);
10951 gen_store_fpr64(ctx
, fp2
, fd
);
10952 tcg_temp_free_i64(fp2
);
10958 TCGv_i64 fp0
= tcg_temp_new_i64();
10959 TCGv_i64 fp1
= tcg_temp_new_i64();
10960 TCGv_i64 fp2
= tcg_temp_new_i64();
10962 gen_load_fpr64(ctx
, fp0
, fs
);
10963 gen_load_fpr64(ctx
, fp1
, ft
);
10964 gen_load_fpr64(ctx
, fp2
, fr
);
10965 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10966 tcg_temp_free_i64(fp0
);
10967 tcg_temp_free_i64(fp1
);
10968 gen_store_fpr64(ctx
, fp2
, fd
);
10969 tcg_temp_free_i64(fp2
);
10975 TCGv_i32 fp0
= tcg_temp_new_i32();
10976 TCGv_i32 fp1
= tcg_temp_new_i32();
10977 TCGv_i32 fp2
= tcg_temp_new_i32();
10979 gen_load_fpr32(ctx
, fp0
, fs
);
10980 gen_load_fpr32(ctx
, fp1
, ft
);
10981 gen_load_fpr32(ctx
, fp2
, fr
);
10982 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10983 tcg_temp_free_i32(fp0
);
10984 tcg_temp_free_i32(fp1
);
10985 gen_store_fpr32(ctx
, fp2
, fd
);
10986 tcg_temp_free_i32(fp2
);
10991 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10993 TCGv_i64 fp0
= tcg_temp_new_i64();
10994 TCGv_i64 fp1
= tcg_temp_new_i64();
10995 TCGv_i64 fp2
= tcg_temp_new_i64();
10997 gen_load_fpr64(ctx
, fp0
, fs
);
10998 gen_load_fpr64(ctx
, fp1
, ft
);
10999 gen_load_fpr64(ctx
, fp2
, fr
);
11000 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11001 tcg_temp_free_i64(fp0
);
11002 tcg_temp_free_i64(fp1
);
11003 gen_store_fpr64(ctx
, fp2
, fd
);
11004 tcg_temp_free_i64(fp2
);
11010 TCGv_i64 fp0
= tcg_temp_new_i64();
11011 TCGv_i64 fp1
= tcg_temp_new_i64();
11012 TCGv_i64 fp2
= tcg_temp_new_i64();
11014 gen_load_fpr64(ctx
, fp0
, fs
);
11015 gen_load_fpr64(ctx
, fp1
, ft
);
11016 gen_load_fpr64(ctx
, fp2
, fr
);
11017 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11018 tcg_temp_free_i64(fp0
);
11019 tcg_temp_free_i64(fp1
);
11020 gen_store_fpr64(ctx
, fp2
, fd
);
11021 tcg_temp_free_i64(fp2
);
11027 TCGv_i32 fp0
= tcg_temp_new_i32();
11028 TCGv_i32 fp1
= tcg_temp_new_i32();
11029 TCGv_i32 fp2
= tcg_temp_new_i32();
11031 gen_load_fpr32(ctx
, fp0
, fs
);
11032 gen_load_fpr32(ctx
, fp1
, ft
);
11033 gen_load_fpr32(ctx
, fp2
, fr
);
11034 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11035 tcg_temp_free_i32(fp0
);
11036 tcg_temp_free_i32(fp1
);
11037 gen_store_fpr32(ctx
, fp2
, fd
);
11038 tcg_temp_free_i32(fp2
);
11043 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11045 TCGv_i64 fp0
= tcg_temp_new_i64();
11046 TCGv_i64 fp1
= tcg_temp_new_i64();
11047 TCGv_i64 fp2
= tcg_temp_new_i64();
11049 gen_load_fpr64(ctx
, fp0
, fs
);
11050 gen_load_fpr64(ctx
, fp1
, ft
);
11051 gen_load_fpr64(ctx
, fp2
, fr
);
11052 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11053 tcg_temp_free_i64(fp0
);
11054 tcg_temp_free_i64(fp1
);
11055 gen_store_fpr64(ctx
, fp2
, fd
);
11056 tcg_temp_free_i64(fp2
);
11062 TCGv_i64 fp0
= tcg_temp_new_i64();
11063 TCGv_i64 fp1
= tcg_temp_new_i64();
11064 TCGv_i64 fp2
= tcg_temp_new_i64();
11066 gen_load_fpr64(ctx
, fp0
, fs
);
11067 gen_load_fpr64(ctx
, fp1
, ft
);
11068 gen_load_fpr64(ctx
, fp2
, fr
);
11069 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11070 tcg_temp_free_i64(fp0
);
11071 tcg_temp_free_i64(fp1
);
11072 gen_store_fpr64(ctx
, fp2
, fd
);
11073 tcg_temp_free_i64(fp2
);
11079 TCGv_i32 fp0
= tcg_temp_new_i32();
11080 TCGv_i32 fp1
= tcg_temp_new_i32();
11081 TCGv_i32 fp2
= tcg_temp_new_i32();
11083 gen_load_fpr32(ctx
, fp0
, fs
);
11084 gen_load_fpr32(ctx
, fp1
, ft
);
11085 gen_load_fpr32(ctx
, fp2
, fr
);
11086 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11087 tcg_temp_free_i32(fp0
);
11088 tcg_temp_free_i32(fp1
);
11089 gen_store_fpr32(ctx
, fp2
, fd
);
11090 tcg_temp_free_i32(fp2
);
11095 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11097 TCGv_i64 fp0
= tcg_temp_new_i64();
11098 TCGv_i64 fp1
= tcg_temp_new_i64();
11099 TCGv_i64 fp2
= tcg_temp_new_i64();
11101 gen_load_fpr64(ctx
, fp0
, fs
);
11102 gen_load_fpr64(ctx
, fp1
, ft
);
11103 gen_load_fpr64(ctx
, fp2
, fr
);
11104 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11105 tcg_temp_free_i64(fp0
);
11106 tcg_temp_free_i64(fp1
);
11107 gen_store_fpr64(ctx
, fp2
, fd
);
11108 tcg_temp_free_i64(fp2
);
11114 TCGv_i64 fp0
= tcg_temp_new_i64();
11115 TCGv_i64 fp1
= tcg_temp_new_i64();
11116 TCGv_i64 fp2
= tcg_temp_new_i64();
11118 gen_load_fpr64(ctx
, fp0
, fs
);
11119 gen_load_fpr64(ctx
, fp1
, ft
);
11120 gen_load_fpr64(ctx
, fp2
, fr
);
11121 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11122 tcg_temp_free_i64(fp0
);
11123 tcg_temp_free_i64(fp1
);
11124 gen_store_fpr64(ctx
, fp2
, fd
);
11125 tcg_temp_free_i64(fp2
);
11129 MIPS_INVAL("flt3_arith");
11130 generate_exception_end(ctx
, EXCP_RI
);
11135 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11139 #if !defined(CONFIG_USER_ONLY)
11140 /* The Linux kernel will emulate rdhwr if it's not supported natively.
11141 Therefore only check the ISA in system mode. */
11142 check_insn(ctx
, ISA_MIPS32R2
);
11144 t0
= tcg_temp_new();
11148 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
11149 gen_store_gpr(t0
, rt
);
11152 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
11153 gen_store_gpr(t0
, rt
);
11156 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11159 gen_helper_rdhwr_cc(t0
, cpu_env
);
11160 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11163 gen_store_gpr(t0
, rt
);
11164 /* Break the TB to be able to take timer interrupts immediately
11165 after reading count. DISAS_STOP isn't sufficient, we need to ensure
11166 we break completely out of translated code. */
11167 gen_save_pc(ctx
->base
.pc_next
+ 4);
11168 ctx
->base
.is_jmp
= DISAS_EXIT
;
11171 gen_helper_rdhwr_ccres(t0
, cpu_env
);
11172 gen_store_gpr(t0
, rt
);
11175 check_insn(ctx
, ISA_MIPS32R6
);
11177 /* Performance counter registers are not implemented other than
11178 * control register 0.
11180 generate_exception(ctx
, EXCP_RI
);
11182 gen_helper_rdhwr_performance(t0
, cpu_env
);
11183 gen_store_gpr(t0
, rt
);
11186 check_insn(ctx
, ISA_MIPS32R6
);
11187 gen_helper_rdhwr_xnp(t0
, cpu_env
);
11188 gen_store_gpr(t0
, rt
);
11191 #if defined(CONFIG_USER_ONLY)
11192 tcg_gen_ld_tl(t0
, cpu_env
,
11193 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11194 gen_store_gpr(t0
, rt
);
11197 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
11198 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
11199 tcg_gen_ld_tl(t0
, cpu_env
,
11200 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11201 gen_store_gpr(t0
, rt
);
11203 generate_exception_end(ctx
, EXCP_RI
);
11207 default: /* Invalid */
11208 MIPS_INVAL("rdhwr");
11209 generate_exception_end(ctx
, EXCP_RI
);
11215 static inline void clear_branch_hflags(DisasContext
*ctx
)
11217 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
11218 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
11219 save_cpu_state(ctx
, 0);
11221 /* it is not safe to save ctx->hflags as hflags may be changed
11222 in execution time by the instruction in delay / forbidden slot. */
11223 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
11227 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
11229 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11230 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
11231 /* Branches completion */
11232 clear_branch_hflags(ctx
);
11233 ctx
->base
.is_jmp
= DISAS_NORETURN
;
11234 /* FIXME: Need to clear can_do_io. */
11235 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
11236 case MIPS_HFLAG_FBNSLOT
:
11237 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
11240 /* unconditional branch */
11241 if (proc_hflags
& MIPS_HFLAG_BX
) {
11242 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
11244 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11246 case MIPS_HFLAG_BL
:
11247 /* blikely taken case */
11248 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11250 case MIPS_HFLAG_BC
:
11251 /* Conditional branch */
11253 TCGLabel
*l1
= gen_new_label();
11255 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11256 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
11258 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11261 case MIPS_HFLAG_BR
:
11262 /* unconditional branch to register */
11263 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
11264 TCGv t0
= tcg_temp_new();
11265 TCGv_i32 t1
= tcg_temp_new_i32();
11267 tcg_gen_andi_tl(t0
, btarget
, 0x1);
11268 tcg_gen_trunc_tl_i32(t1
, t0
);
11270 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
11271 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
11272 tcg_gen_or_i32(hflags
, hflags
, t1
);
11273 tcg_temp_free_i32(t1
);
11275 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
11277 tcg_gen_mov_tl(cpu_PC
, btarget
);
11279 if (ctx
->base
.singlestep_enabled
) {
11280 save_cpu_state(ctx
, 0);
11281 gen_helper_raise_exception_debug(cpu_env
);
11283 tcg_gen_lookup_and_goto_ptr();
11286 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
11292 /* Compact Branches */
11293 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11294 int rs
, int rt
, int32_t offset
)
11296 int bcond_compute
= 0;
11297 TCGv t0
= tcg_temp_new();
11298 TCGv t1
= tcg_temp_new();
11299 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11301 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11302 #ifdef MIPS_DEBUG_DISAS
11303 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11304 "\n", ctx
->base
.pc_next
);
11306 generate_exception_end(ctx
, EXCP_RI
);
11310 /* Load needed operands and calculate btarget */
11312 /* compact branch */
11313 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11314 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11315 gen_load_gpr(t0
, rs
);
11316 gen_load_gpr(t1
, rt
);
11318 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11319 if (rs
<= rt
&& rs
== 0) {
11320 /* OPC_BEQZALC, OPC_BNEZALC */
11321 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11324 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11325 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11326 gen_load_gpr(t0
, rs
);
11327 gen_load_gpr(t1
, rt
);
11329 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11331 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11332 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11333 if (rs
== 0 || rs
== rt
) {
11334 /* OPC_BLEZALC, OPC_BGEZALC */
11335 /* OPC_BGTZALC, OPC_BLTZALC */
11336 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11338 gen_load_gpr(t0
, rs
);
11339 gen_load_gpr(t1
, rt
);
11341 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11345 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11350 /* OPC_BEQZC, OPC_BNEZC */
11351 gen_load_gpr(t0
, rs
);
11353 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11355 /* OPC_JIC, OPC_JIALC */
11356 TCGv tbase
= tcg_temp_new();
11357 TCGv toffset
= tcg_temp_new();
11359 gen_load_gpr(tbase
, rt
);
11360 tcg_gen_movi_tl(toffset
, offset
);
11361 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11362 tcg_temp_free(tbase
);
11363 tcg_temp_free(toffset
);
11367 MIPS_INVAL("Compact branch/jump");
11368 generate_exception_end(ctx
, EXCP_RI
);
11372 if (bcond_compute
== 0) {
11373 /* Uncoditional compact branch */
11376 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11379 ctx
->hflags
|= MIPS_HFLAG_BR
;
11382 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11385 ctx
->hflags
|= MIPS_HFLAG_B
;
11388 MIPS_INVAL("Compact branch/jump");
11389 generate_exception_end(ctx
, EXCP_RI
);
11393 /* Generating branch here as compact branches don't have delay slot */
11394 gen_branch(ctx
, 4);
11396 /* Conditional compact branch */
11397 TCGLabel
*fs
= gen_new_label();
11398 save_cpu_state(ctx
, 0);
11401 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11402 if (rs
== 0 && rt
!= 0) {
11404 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11405 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11407 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11410 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11413 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11414 if (rs
== 0 && rt
!= 0) {
11416 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11417 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11419 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11422 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11425 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11426 if (rs
== 0 && rt
!= 0) {
11428 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11429 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11431 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11434 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11437 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11438 if (rs
== 0 && rt
!= 0) {
11440 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11441 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11443 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11446 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11449 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11450 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11452 /* OPC_BOVC, OPC_BNVC */
11453 TCGv t2
= tcg_temp_new();
11454 TCGv t3
= tcg_temp_new();
11455 TCGv t4
= tcg_temp_new();
11456 TCGv input_overflow
= tcg_temp_new();
11458 gen_load_gpr(t0
, rs
);
11459 gen_load_gpr(t1
, rt
);
11460 tcg_gen_ext32s_tl(t2
, t0
);
11461 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11462 tcg_gen_ext32s_tl(t3
, t1
);
11463 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11464 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11466 tcg_gen_add_tl(t4
, t2
, t3
);
11467 tcg_gen_ext32s_tl(t4
, t4
);
11468 tcg_gen_xor_tl(t2
, t2
, t3
);
11469 tcg_gen_xor_tl(t3
, t4
, t3
);
11470 tcg_gen_andc_tl(t2
, t3
, t2
);
11471 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11472 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11473 if (opc
== OPC_BOVC
) {
11475 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11478 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11480 tcg_temp_free(input_overflow
);
11484 } else if (rs
< rt
&& rs
== 0) {
11485 /* OPC_BEQZALC, OPC_BNEZALC */
11486 if (opc
== OPC_BEQZALC
) {
11488 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11491 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11494 /* OPC_BEQC, OPC_BNEC */
11495 if (opc
== OPC_BEQC
) {
11497 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11500 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11505 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11508 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11511 MIPS_INVAL("Compact conditional branch/jump");
11512 generate_exception_end(ctx
, EXCP_RI
);
11516 /* Generating branch here as compact branches don't have delay slot */
11517 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11520 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11528 /* ISA extensions (ASEs) */
11529 /* MIPS16 extension to MIPS32 */
11531 /* MIPS16 major opcodes */
11533 M16_OPC_ADDIUSP
= 0x00,
11534 M16_OPC_ADDIUPC
= 0x01,
11536 M16_OPC_JAL
= 0x03,
11537 M16_OPC_BEQZ
= 0x04,
11538 M16_OPC_BNEQZ
= 0x05,
11539 M16_OPC_SHIFT
= 0x06,
11541 M16_OPC_RRIA
= 0x08,
11542 M16_OPC_ADDIU8
= 0x09,
11543 M16_OPC_SLTI
= 0x0a,
11544 M16_OPC_SLTIU
= 0x0b,
11547 M16_OPC_CMPI
= 0x0e,
11551 M16_OPC_LWSP
= 0x12,
11553 M16_OPC_LBU
= 0x14,
11554 M16_OPC_LHU
= 0x15,
11555 M16_OPC_LWPC
= 0x16,
11556 M16_OPC_LWU
= 0x17,
11559 M16_OPC_SWSP
= 0x1a,
11561 M16_OPC_RRR
= 0x1c,
11563 M16_OPC_EXTEND
= 0x1e,
11567 /* I8 funct field */
11586 /* RR funct field */
11620 /* I64 funct field */
11628 I64_DADDIUPC
= 0x6,
11632 /* RR ry field for CNVT */
11634 RR_RY_CNVT_ZEB
= 0x0,
11635 RR_RY_CNVT_ZEH
= 0x1,
11636 RR_RY_CNVT_ZEW
= 0x2,
11637 RR_RY_CNVT_SEB
= 0x4,
11638 RR_RY_CNVT_SEH
= 0x5,
11639 RR_RY_CNVT_SEW
= 0x6,
11642 static int xlat (int r
)
11644 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11649 static void gen_mips16_save (DisasContext
*ctx
,
11650 int xsregs
, int aregs
,
11651 int do_ra
, int do_s0
, int do_s1
,
11654 TCGv t0
= tcg_temp_new();
11655 TCGv t1
= tcg_temp_new();
11656 TCGv t2
= tcg_temp_new();
11686 generate_exception_end(ctx
, EXCP_RI
);
11692 gen_base_offset_addr(ctx
, t0
, 29, 12);
11693 gen_load_gpr(t1
, 7);
11694 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11697 gen_base_offset_addr(ctx
, t0
, 29, 8);
11698 gen_load_gpr(t1
, 6);
11699 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11702 gen_base_offset_addr(ctx
, t0
, 29, 4);
11703 gen_load_gpr(t1
, 5);
11704 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11707 gen_base_offset_addr(ctx
, t0
, 29, 0);
11708 gen_load_gpr(t1
, 4);
11709 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11712 gen_load_gpr(t0
, 29);
11714 #define DECR_AND_STORE(reg) do { \
11715 tcg_gen_movi_tl(t2, -4); \
11716 gen_op_addr_add(ctx, t0, t0, t2); \
11717 gen_load_gpr(t1, reg); \
11718 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11722 DECR_AND_STORE(31);
11727 DECR_AND_STORE(30);
11730 DECR_AND_STORE(23);
11733 DECR_AND_STORE(22);
11736 DECR_AND_STORE(21);
11739 DECR_AND_STORE(20);
11742 DECR_AND_STORE(19);
11745 DECR_AND_STORE(18);
11749 DECR_AND_STORE(17);
11752 DECR_AND_STORE(16);
11782 generate_exception_end(ctx
, EXCP_RI
);
11798 #undef DECR_AND_STORE
11800 tcg_gen_movi_tl(t2
, -framesize
);
11801 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11807 static void gen_mips16_restore (DisasContext
*ctx
,
11808 int xsregs
, int aregs
,
11809 int do_ra
, int do_s0
, int do_s1
,
11813 TCGv t0
= tcg_temp_new();
11814 TCGv t1
= tcg_temp_new();
11815 TCGv t2
= tcg_temp_new();
11817 tcg_gen_movi_tl(t2
, framesize
);
11818 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11820 #define DECR_AND_LOAD(reg) do { \
11821 tcg_gen_movi_tl(t2, -4); \
11822 gen_op_addr_add(ctx, t0, t0, t2); \
11823 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11824 gen_store_gpr(t1, reg); \
11888 generate_exception_end(ctx
, EXCP_RI
);
11904 #undef DECR_AND_LOAD
11906 tcg_gen_movi_tl(t2
, framesize
);
11907 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11913 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11914 int is_64_bit
, int extended
)
11918 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11919 generate_exception_end(ctx
, EXCP_RI
);
11923 t0
= tcg_temp_new();
11925 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11926 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11928 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11934 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11937 TCGv_i32 t0
= tcg_const_i32(op
);
11938 TCGv t1
= tcg_temp_new();
11939 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11940 gen_helper_cache(cpu_env
, t1
, t0
);
11943 #if defined(TARGET_MIPS64)
11944 static void decode_i64_mips16 (DisasContext
*ctx
,
11945 int ry
, int funct
, int16_t offset
,
11950 check_insn(ctx
, ISA_MIPS3
);
11951 check_mips_64(ctx
);
11952 offset
= extended
? offset
: offset
<< 3;
11953 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11956 check_insn(ctx
, ISA_MIPS3
);
11957 check_mips_64(ctx
);
11958 offset
= extended
? offset
: offset
<< 3;
11959 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11962 check_insn(ctx
, ISA_MIPS3
);
11963 check_mips_64(ctx
);
11964 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11965 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11968 check_insn(ctx
, ISA_MIPS3
);
11969 check_mips_64(ctx
);
11970 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11971 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11974 check_insn(ctx
, ISA_MIPS3
);
11975 check_mips_64(ctx
);
11976 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11977 generate_exception_end(ctx
, EXCP_RI
);
11979 offset
= extended
? offset
: offset
<< 3;
11980 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11984 check_insn(ctx
, ISA_MIPS3
);
11985 check_mips_64(ctx
);
11986 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11987 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11990 check_insn(ctx
, ISA_MIPS3
);
11991 check_mips_64(ctx
);
11992 offset
= extended
? offset
: offset
<< 2;
11993 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11996 check_insn(ctx
, ISA_MIPS3
);
11997 check_mips_64(ctx
);
11998 offset
= extended
? offset
: offset
<< 2;
11999 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
12005 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12007 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
12008 int op
, rx
, ry
, funct
, sa
;
12009 int16_t imm
, offset
;
12011 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
12012 op
= (ctx
->opcode
>> 11) & 0x1f;
12013 sa
= (ctx
->opcode
>> 22) & 0x1f;
12014 funct
= (ctx
->opcode
>> 8) & 0x7;
12015 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
12016 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
12017 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
12018 | ((ctx
->opcode
>> 21) & 0x3f) << 5
12019 | (ctx
->opcode
& 0x1f));
12021 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
12024 case M16_OPC_ADDIUSP
:
12025 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
12027 case M16_OPC_ADDIUPC
:
12028 gen_addiupc(ctx
, rx
, imm
, 0, 1);
12031 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
12032 /* No delay slot, so just process as a normal instruction */
12035 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
12036 /* No delay slot, so just process as a normal instruction */
12038 case M16_OPC_BNEQZ
:
12039 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
12040 /* No delay slot, so just process as a normal instruction */
12042 case M16_OPC_SHIFT
:
12043 switch (ctx
->opcode
& 0x3) {
12045 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
12048 #if defined(TARGET_MIPS64)
12049 check_mips_64(ctx
);
12050 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
12052 generate_exception_end(ctx
, EXCP_RI
);
12056 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
12059 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
12063 #if defined(TARGET_MIPS64)
12065 check_insn(ctx
, ISA_MIPS3
);
12066 check_mips_64(ctx
);
12067 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
12071 imm
= ctx
->opcode
& 0xf;
12072 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
12073 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
12074 imm
= (int16_t) (imm
<< 1) >> 1;
12075 if ((ctx
->opcode
>> 4) & 0x1) {
12076 #if defined(TARGET_MIPS64)
12077 check_mips_64(ctx
);
12078 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12080 generate_exception_end(ctx
, EXCP_RI
);
12083 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12086 case M16_OPC_ADDIU8
:
12087 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12090 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12092 case M16_OPC_SLTIU
:
12093 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12098 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
12101 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
12104 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
12107 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
12110 check_insn(ctx
, ISA_MIPS32
);
12112 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
12113 int aregs
= (ctx
->opcode
>> 16) & 0xf;
12114 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
12115 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
12116 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
12117 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
12118 | (ctx
->opcode
& 0xf)) << 3;
12120 if (ctx
->opcode
& (1 << 7)) {
12121 gen_mips16_save(ctx
, xsregs
, aregs
,
12122 do_ra
, do_s0
, do_s1
,
12125 gen_mips16_restore(ctx
, xsregs
, aregs
,
12126 do_ra
, do_s0
, do_s1
,
12132 generate_exception_end(ctx
, EXCP_RI
);
12137 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
12140 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
12142 #if defined(TARGET_MIPS64)
12144 check_insn(ctx
, ISA_MIPS3
);
12145 check_mips_64(ctx
);
12146 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
12150 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12153 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
12156 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
12159 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
12162 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12165 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
12168 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
12170 #if defined(TARGET_MIPS64)
12172 check_insn(ctx
, ISA_MIPS3
);
12173 check_mips_64(ctx
);
12174 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
12178 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12181 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
12184 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
12187 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
12189 #if defined(TARGET_MIPS64)
12191 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
12195 generate_exception_end(ctx
, EXCP_RI
);
12202 static inline bool is_uhi(int sdbbp_code
)
12204 #ifdef CONFIG_USER_ONLY
12207 return semihosting_enabled() && sdbbp_code
== 1;
12211 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
12215 int op
, cnvt_op
, op1
, offset
;
12219 op
= (ctx
->opcode
>> 11) & 0x1f;
12220 sa
= (ctx
->opcode
>> 2) & 0x7;
12221 sa
= sa
== 0 ? 8 : sa
;
12222 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
12223 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
12224 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
12225 op1
= offset
= ctx
->opcode
& 0x1f;
12230 case M16_OPC_ADDIUSP
:
12232 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
12234 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
12237 case M16_OPC_ADDIUPC
:
12238 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
12241 offset
= (ctx
->opcode
& 0x7ff) << 1;
12242 offset
= (int16_t)(offset
<< 4) >> 4;
12243 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
12244 /* No delay slot, so just process as a normal instruction */
12247 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
12248 offset
= (((ctx
->opcode
& 0x1f) << 21)
12249 | ((ctx
->opcode
>> 5) & 0x1f) << 16
12251 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
12252 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
12256 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
12257 ((int8_t)ctx
->opcode
) << 1, 0);
12258 /* No delay slot, so just process as a normal instruction */
12260 case M16_OPC_BNEQZ
:
12261 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
12262 ((int8_t)ctx
->opcode
) << 1, 0);
12263 /* No delay slot, so just process as a normal instruction */
12265 case M16_OPC_SHIFT
:
12266 switch (ctx
->opcode
& 0x3) {
12268 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
12271 #if defined(TARGET_MIPS64)
12272 check_insn(ctx
, ISA_MIPS3
);
12273 check_mips_64(ctx
);
12274 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
12276 generate_exception_end(ctx
, EXCP_RI
);
12280 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
12283 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
12287 #if defined(TARGET_MIPS64)
12289 check_insn(ctx
, ISA_MIPS3
);
12290 check_mips_64(ctx
);
12291 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
12296 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
12298 if ((ctx
->opcode
>> 4) & 1) {
12299 #if defined(TARGET_MIPS64)
12300 check_insn(ctx
, ISA_MIPS3
);
12301 check_mips_64(ctx
);
12302 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
12304 generate_exception_end(ctx
, EXCP_RI
);
12307 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
12311 case M16_OPC_ADDIU8
:
12313 int16_t imm
= (int8_t) ctx
->opcode
;
12315 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
12320 int16_t imm
= (uint8_t) ctx
->opcode
;
12321 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
12324 case M16_OPC_SLTIU
:
12326 int16_t imm
= (uint8_t) ctx
->opcode
;
12327 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
12334 funct
= (ctx
->opcode
>> 8) & 0x7;
12337 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
12338 ((int8_t)ctx
->opcode
) << 1, 0);
12341 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
12342 ((int8_t)ctx
->opcode
) << 1, 0);
12345 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
12348 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
12349 ((int8_t)ctx
->opcode
) << 3);
12352 check_insn(ctx
, ISA_MIPS32
);
12354 int do_ra
= ctx
->opcode
& (1 << 6);
12355 int do_s0
= ctx
->opcode
& (1 << 5);
12356 int do_s1
= ctx
->opcode
& (1 << 4);
12357 int framesize
= ctx
->opcode
& 0xf;
12359 if (framesize
== 0) {
12362 framesize
= framesize
<< 3;
12365 if (ctx
->opcode
& (1 << 7)) {
12366 gen_mips16_save(ctx
, 0, 0,
12367 do_ra
, do_s0
, do_s1
, framesize
);
12369 gen_mips16_restore(ctx
, 0, 0,
12370 do_ra
, do_s0
, do_s1
, framesize
);
12376 int rz
= xlat(ctx
->opcode
& 0x7);
12378 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
12379 ((ctx
->opcode
>> 5) & 0x7);
12380 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
12384 reg32
= ctx
->opcode
& 0x1f;
12385 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
12388 generate_exception_end(ctx
, EXCP_RI
);
12395 int16_t imm
= (uint8_t) ctx
->opcode
;
12397 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12402 int16_t imm
= (uint8_t) ctx
->opcode
;
12403 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12406 #if defined(TARGET_MIPS64)
12408 check_insn(ctx
, ISA_MIPS3
);
12409 check_mips_64(ctx
);
12410 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12414 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12417 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12420 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12423 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12426 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12429 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12432 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12434 #if defined (TARGET_MIPS64)
12436 check_insn(ctx
, ISA_MIPS3
);
12437 check_mips_64(ctx
);
12438 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12442 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12445 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12448 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12451 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12455 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12458 switch (ctx
->opcode
& 0x3) {
12460 mips32_op
= OPC_ADDU
;
12463 mips32_op
= OPC_SUBU
;
12465 #if defined(TARGET_MIPS64)
12467 mips32_op
= OPC_DADDU
;
12468 check_insn(ctx
, ISA_MIPS3
);
12469 check_mips_64(ctx
);
12472 mips32_op
= OPC_DSUBU
;
12473 check_insn(ctx
, ISA_MIPS3
);
12474 check_mips_64(ctx
);
12478 generate_exception_end(ctx
, EXCP_RI
);
12482 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12491 int nd
= (ctx
->opcode
>> 7) & 0x1;
12492 int link
= (ctx
->opcode
>> 6) & 0x1;
12493 int ra
= (ctx
->opcode
>> 5) & 0x1;
12496 check_insn(ctx
, ISA_MIPS32
);
12505 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12510 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12511 gen_helper_do_semihosting(cpu_env
);
12513 /* XXX: not clear which exception should be raised
12514 * when in debug mode...
12516 check_insn(ctx
, ISA_MIPS32
);
12517 generate_exception_end(ctx
, EXCP_DBp
);
12521 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12524 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12527 generate_exception_end(ctx
, EXCP_BREAK
);
12530 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12533 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12536 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12538 #if defined (TARGET_MIPS64)
12540 check_insn(ctx
, ISA_MIPS3
);
12541 check_mips_64(ctx
);
12542 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12546 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12549 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12552 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12555 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12558 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12561 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12564 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12567 check_insn(ctx
, ISA_MIPS32
);
12569 case RR_RY_CNVT_ZEB
:
12570 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12572 case RR_RY_CNVT_ZEH
:
12573 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12575 case RR_RY_CNVT_SEB
:
12576 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12578 case RR_RY_CNVT_SEH
:
12579 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12581 #if defined (TARGET_MIPS64)
12582 case RR_RY_CNVT_ZEW
:
12583 check_insn(ctx
, ISA_MIPS64
);
12584 check_mips_64(ctx
);
12585 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12587 case RR_RY_CNVT_SEW
:
12588 check_insn(ctx
, ISA_MIPS64
);
12589 check_mips_64(ctx
);
12590 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12594 generate_exception_end(ctx
, EXCP_RI
);
12599 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12601 #if defined (TARGET_MIPS64)
12603 check_insn(ctx
, ISA_MIPS3
);
12604 check_mips_64(ctx
);
12605 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12608 check_insn(ctx
, ISA_MIPS3
);
12609 check_mips_64(ctx
);
12610 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12613 check_insn(ctx
, ISA_MIPS3
);
12614 check_mips_64(ctx
);
12615 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12618 check_insn(ctx
, ISA_MIPS3
);
12619 check_mips_64(ctx
);
12620 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12624 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12627 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12630 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12633 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12635 #if defined (TARGET_MIPS64)
12637 check_insn(ctx
, ISA_MIPS3
);
12638 check_mips_64(ctx
);
12639 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12642 check_insn(ctx
, ISA_MIPS3
);
12643 check_mips_64(ctx
);
12644 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12647 check_insn(ctx
, ISA_MIPS3
);
12648 check_mips_64(ctx
);
12649 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12652 check_insn(ctx
, ISA_MIPS3
);
12653 check_mips_64(ctx
);
12654 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12658 generate_exception_end(ctx
, EXCP_RI
);
12662 case M16_OPC_EXTEND
:
12663 decode_extended_mips16_opc(env
, ctx
);
12666 #if defined(TARGET_MIPS64)
12668 funct
= (ctx
->opcode
>> 8) & 0x7;
12669 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12673 generate_exception_end(ctx
, EXCP_RI
);
12680 /* microMIPS extension to MIPS32/MIPS64 */
12683 * microMIPS32/microMIPS64 major opcodes
12685 * 1. MIPS Architecture for Programmers Volume II-B:
12686 * The microMIPS32 Instruction Set (Revision 3.05)
12688 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12690 * 2. MIPS Architecture For Programmers Volume II-A:
12691 * The MIPS64 Instruction Set (Revision 3.51)
12721 POOL32S
= 0x16, /* MIPS64 */
12722 DADDIU32
= 0x17, /* MIPS64 */
12751 /* 0x29 is reserved */
12764 /* 0x31 is reserved */
12777 SD32
= 0x36, /* MIPS64 */
12778 LD32
= 0x37, /* MIPS64 */
12780 /* 0x39 is reserved */
12796 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12818 /* POOL32A encoding of minor opcode field */
12821 /* These opcodes are distinguished only by bits 9..6; those bits are
12822 * what are recorded below. */
12859 /* The following can be distinguished by their lower 6 bits. */
12869 /* POOL32AXF encoding of minor opcode field extension */
12872 * 1. MIPS Architecture for Programmers Volume II-B:
12873 * The microMIPS32 Instruction Set (Revision 3.05)
12875 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12877 * 2. MIPS Architecture for Programmers VolumeIV-e:
12878 * The MIPS DSP Application-Specific Extension
12879 * to the microMIPS32 Architecture (Revision 2.34)
12881 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12896 /* begin of microMIPS32 DSP */
12898 /* bits 13..12 for 0x01 */
12904 /* bits 13..12 for 0x2a */
12910 /* bits 13..12 for 0x32 */
12914 /* end of microMIPS32 DSP */
12916 /* bits 15..12 for 0x2c */
12933 /* bits 15..12 for 0x34 */
12941 /* bits 15..12 for 0x3c */
12943 JR
= 0x0, /* alias */
12951 /* bits 15..12 for 0x05 */
12955 /* bits 15..12 for 0x0d */
12967 /* bits 15..12 for 0x15 */
12973 /* bits 15..12 for 0x1d */
12977 /* bits 15..12 for 0x2d */
12982 /* bits 15..12 for 0x35 */
12989 /* POOL32B encoding of minor opcode field (bits 15..12) */
13005 /* POOL32C encoding of minor opcode field (bits 15..12) */
13026 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
13039 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
13052 /* POOL32F encoding of minor opcode field (bits 5..0) */
13055 /* These are the bit 7..6 values */
13064 /* These are the bit 8..6 values */
13089 MOVZ_FMT_05
= 0x05,
13123 CABS_COND_FMT
= 0x1c, /* MIPS3D */
13130 /* POOL32Fxf encoding of minor opcode extension field */
13168 /* POOL32I encoding of minor opcode field (bits 25..21) */
13198 /* These overlap and are distinguished by bit16 of the instruction */
13207 /* POOL16A encoding of minor opcode field */
13214 /* POOL16B encoding of minor opcode field */
13221 /* POOL16C encoding of minor opcode field */
13241 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
13265 /* POOL16D encoding of minor opcode field */
13272 /* POOL16E encoding of minor opcode field */
13279 static int mmreg (int r
)
13281 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13286 /* Used for 16-bit store instructions. */
13287 static int mmreg2 (int r
)
13289 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
13294 #define uMIPS_RD(op) ((op >> 7) & 0x7)
13295 #define uMIPS_RS(op) ((op >> 4) & 0x7)
13296 #define uMIPS_RS2(op) uMIPS_RS(op)
13297 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
13298 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
13299 #define uMIPS_RS5(op) (op & 0x1f)
13301 /* Signed immediate */
13302 #define SIMM(op, start, width) \
13303 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
13306 /* Zero-extended immediate */
13307 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
13309 static void gen_addiur1sp(DisasContext
*ctx
)
13311 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13313 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
13316 static void gen_addiur2(DisasContext
*ctx
)
13318 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
13319 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13320 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13322 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
13325 static void gen_addiusp(DisasContext
*ctx
)
13327 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
13330 if (encoded
<= 1) {
13331 decoded
= 256 + encoded
;
13332 } else if (encoded
<= 255) {
13334 } else if (encoded
<= 509) {
13335 decoded
= encoded
- 512;
13337 decoded
= encoded
- 768;
13340 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
13343 static void gen_addius5(DisasContext
*ctx
)
13345 int imm
= SIMM(ctx
->opcode
, 1, 4);
13346 int rd
= (ctx
->opcode
>> 5) & 0x1f;
13348 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
13351 static void gen_andi16(DisasContext
*ctx
)
13353 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
13354 31, 32, 63, 64, 255, 32768, 65535 };
13355 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
13356 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
13357 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
13359 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
13362 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
13363 int base
, int16_t offset
)
13368 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
13369 generate_exception_end(ctx
, EXCP_RI
);
13373 t0
= tcg_temp_new();
13375 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13377 t1
= tcg_const_tl(reglist
);
13378 t2
= tcg_const_i32(ctx
->mem_idx
);
13380 save_cpu_state(ctx
, 1);
13383 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
13386 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
13388 #ifdef TARGET_MIPS64
13390 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
13393 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
13399 tcg_temp_free_i32(t2
);
13403 static void gen_pool16c_insn(DisasContext
*ctx
)
13405 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
13406 int rs
= mmreg(ctx
->opcode
& 0x7);
13408 switch (((ctx
->opcode
) >> 4) & 0x3f) {
13413 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13419 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13425 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13431 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13438 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13439 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13441 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13450 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13451 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13453 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13460 int reg
= ctx
->opcode
& 0x1f;
13462 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13468 int reg
= ctx
->opcode
& 0x1f;
13469 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13470 /* Let normal delay slot handling in our caller take us
13471 to the branch target. */
13476 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13477 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13481 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13482 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13486 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13490 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13493 generate_exception_end(ctx
, EXCP_BREAK
);
13496 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13497 gen_helper_do_semihosting(cpu_env
);
13499 /* XXX: not clear which exception should be raised
13500 * when in debug mode...
13502 check_insn(ctx
, ISA_MIPS32
);
13503 generate_exception_end(ctx
, EXCP_DBp
);
13506 case JRADDIUSP
+ 0:
13507 case JRADDIUSP
+ 1:
13509 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13510 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13511 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13512 /* Let normal delay slot handling in our caller take us
13513 to the branch target. */
13517 generate_exception_end(ctx
, EXCP_RI
);
13522 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13525 int rd
, rs
, re
, rt
;
13526 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13527 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13528 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13529 rd
= rd_enc
[enc_dest
];
13530 re
= re_enc
[enc_dest
];
13531 rs
= rs_rt_enc
[enc_rs
];
13532 rt
= rs_rt_enc
[enc_rt
];
13534 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13536 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13539 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13541 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13545 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13547 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13548 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13550 switch (ctx
->opcode
& 0xf) {
13552 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13555 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13559 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13560 int offset
= extract32(ctx
->opcode
, 4, 4);
13561 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13564 case R6_JRC16
: /* JRCADDIUSP */
13565 if ((ctx
->opcode
>> 4) & 1) {
13567 int imm
= extract32(ctx
->opcode
, 5, 5);
13568 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13569 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13572 rs
= extract32(ctx
->opcode
, 5, 5);
13573 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13585 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13586 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13587 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13588 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13592 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13595 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13599 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13600 int offset
= extract32(ctx
->opcode
, 4, 4);
13601 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13604 case JALRC16
: /* BREAK16, SDBBP16 */
13605 switch (ctx
->opcode
& 0x3f) {
13607 case JALRC16
+ 0x20:
13609 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13614 generate_exception(ctx
, EXCP_BREAK
);
13618 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13619 gen_helper_do_semihosting(cpu_env
);
13621 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13622 generate_exception(ctx
, EXCP_RI
);
13624 generate_exception(ctx
, EXCP_DBp
);
13631 generate_exception(ctx
, EXCP_RI
);
13636 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13638 TCGv t0
= tcg_temp_new();
13639 TCGv t1
= tcg_temp_new();
13641 gen_load_gpr(t0
, base
);
13644 gen_load_gpr(t1
, index
);
13645 tcg_gen_shli_tl(t1
, t1
, 2);
13646 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13649 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13650 gen_store_gpr(t1
, rd
);
13656 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13657 int base
, int16_t offset
)
13661 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13662 generate_exception_end(ctx
, EXCP_RI
);
13666 t0
= tcg_temp_new();
13667 t1
= tcg_temp_new();
13669 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13674 generate_exception_end(ctx
, EXCP_RI
);
13677 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13678 gen_store_gpr(t1
, rd
);
13679 tcg_gen_movi_tl(t1
, 4);
13680 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13681 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13682 gen_store_gpr(t1
, rd
+1);
13685 gen_load_gpr(t1
, rd
);
13686 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13687 tcg_gen_movi_tl(t1
, 4);
13688 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13689 gen_load_gpr(t1
, rd
+1);
13690 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13692 #ifdef TARGET_MIPS64
13695 generate_exception_end(ctx
, EXCP_RI
);
13698 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13699 gen_store_gpr(t1
, rd
);
13700 tcg_gen_movi_tl(t1
, 8);
13701 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13702 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13703 gen_store_gpr(t1
, rd
+1);
13706 gen_load_gpr(t1
, rd
);
13707 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13708 tcg_gen_movi_tl(t1
, 8);
13709 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13710 gen_load_gpr(t1
, rd
+1);
13711 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13719 static void gen_sync(int stype
)
13721 TCGBar tcg_mo
= TCG_BAR_SC
;
13724 case 0x4: /* SYNC_WMB */
13725 tcg_mo
|= TCG_MO_ST_ST
;
13727 case 0x10: /* SYNC_MB */
13728 tcg_mo
|= TCG_MO_ALL
;
13730 case 0x11: /* SYNC_ACQUIRE */
13731 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13733 case 0x12: /* SYNC_RELEASE */
13734 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13736 case 0x13: /* SYNC_RMB */
13737 tcg_mo
|= TCG_MO_LD_LD
;
13740 tcg_mo
|= TCG_MO_ALL
;
13744 tcg_gen_mb(tcg_mo
);
13747 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13749 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13750 int minor
= (ctx
->opcode
>> 12) & 0xf;
13751 uint32_t mips32_op
;
13753 switch (extension
) {
13755 mips32_op
= OPC_TEQ
;
13758 mips32_op
= OPC_TGE
;
13761 mips32_op
= OPC_TGEU
;
13764 mips32_op
= OPC_TLT
;
13767 mips32_op
= OPC_TLTU
;
13770 mips32_op
= OPC_TNE
;
13772 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13774 #ifndef CONFIG_USER_ONLY
13777 check_cp0_enabled(ctx
);
13779 /* Treat as NOP. */
13782 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13786 check_cp0_enabled(ctx
);
13788 TCGv t0
= tcg_temp_new();
13790 gen_load_gpr(t0
, rt
);
13791 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13797 switch (minor
& 3) {
13799 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13802 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13805 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13808 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13811 goto pool32axf_invalid
;
13815 switch (minor
& 3) {
13817 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13820 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13823 goto pool32axf_invalid
;
13829 check_insn(ctx
, ISA_MIPS32R6
);
13830 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13833 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13836 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13839 mips32_op
= OPC_CLO
;
13842 mips32_op
= OPC_CLZ
;
13844 check_insn(ctx
, ISA_MIPS32
);
13845 gen_cl(ctx
, mips32_op
, rt
, rs
);
13848 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13849 gen_rdhwr(ctx
, rt
, rs
, 0);
13852 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13856 mips32_op
= OPC_MULT
;
13859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13860 mips32_op
= OPC_MULTU
;
13863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13864 mips32_op
= OPC_DIV
;
13867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13868 mips32_op
= OPC_DIVU
;
13871 check_insn(ctx
, ISA_MIPS32
);
13872 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13875 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13876 mips32_op
= OPC_MADD
;
13879 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13880 mips32_op
= OPC_MADDU
;
13883 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13884 mips32_op
= OPC_MSUB
;
13887 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13888 mips32_op
= OPC_MSUBU
;
13890 check_insn(ctx
, ISA_MIPS32
);
13891 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13894 goto pool32axf_invalid
;
13905 generate_exception_err(ctx
, EXCP_CpU
, 2);
13908 goto pool32axf_invalid
;
13913 case JALR
: /* JALRC */
13914 case JALR_HB
: /* JALRC_HB */
13915 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13916 /* JALRC, JALRC_HB */
13917 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13919 /* JALR, JALR_HB */
13920 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13921 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13927 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13928 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13931 goto pool32axf_invalid
;
13937 check_cp0_enabled(ctx
);
13938 check_insn(ctx
, ISA_MIPS32R2
);
13939 gen_load_srsgpr(rs
, rt
);
13942 check_cp0_enabled(ctx
);
13943 check_insn(ctx
, ISA_MIPS32R2
);
13944 gen_store_srsgpr(rs
, rt
);
13947 goto pool32axf_invalid
;
13950 #ifndef CONFIG_USER_ONLY
13954 mips32_op
= OPC_TLBP
;
13957 mips32_op
= OPC_TLBR
;
13960 mips32_op
= OPC_TLBWI
;
13963 mips32_op
= OPC_TLBWR
;
13966 mips32_op
= OPC_TLBINV
;
13969 mips32_op
= OPC_TLBINVF
;
13972 mips32_op
= OPC_WAIT
;
13975 mips32_op
= OPC_DERET
;
13978 mips32_op
= OPC_ERET
;
13980 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13983 goto pool32axf_invalid
;
13989 check_cp0_enabled(ctx
);
13991 TCGv t0
= tcg_temp_new();
13993 save_cpu_state(ctx
, 1);
13994 gen_helper_di(t0
, cpu_env
);
13995 gen_store_gpr(t0
, rs
);
13996 /* Stop translation as we may have switched the execution mode */
13997 ctx
->base
.is_jmp
= DISAS_STOP
;
14002 check_cp0_enabled(ctx
);
14004 TCGv t0
= tcg_temp_new();
14006 save_cpu_state(ctx
, 1);
14007 gen_helper_ei(t0
, cpu_env
);
14008 gen_store_gpr(t0
, rs
);
14009 /* DISAS_STOP isn't sufficient, we need to ensure we break out
14010 of translated code to check for pending interrupts. */
14011 gen_save_pc(ctx
->base
.pc_next
+ 4);
14012 ctx
->base
.is_jmp
= DISAS_EXIT
;
14017 goto pool32axf_invalid
;
14024 gen_sync(extract32(ctx
->opcode
, 16, 5));
14027 generate_exception_end(ctx
, EXCP_SYSCALL
);
14030 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
14031 gen_helper_do_semihosting(cpu_env
);
14033 check_insn(ctx
, ISA_MIPS32
);
14034 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
14035 generate_exception_end(ctx
, EXCP_RI
);
14037 generate_exception_end(ctx
, EXCP_DBp
);
14042 goto pool32axf_invalid
;
14046 switch (minor
& 3) {
14048 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
14051 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
14054 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
14057 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
14060 goto pool32axf_invalid
;
14064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14067 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
14070 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
14073 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
14076 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
14079 goto pool32axf_invalid
;
14084 MIPS_INVAL("pool32axf");
14085 generate_exception_end(ctx
, EXCP_RI
);
14090 /* Values for microMIPS fmt field. Variable-width, depending on which
14091 formats the instruction supports. */
14110 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
14112 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
14113 uint32_t mips32_op
;
14115 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
14116 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
14117 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
14119 switch (extension
) {
14120 case FLOAT_1BIT_FMT(CFC1
, 0):
14121 mips32_op
= OPC_CFC1
;
14123 case FLOAT_1BIT_FMT(CTC1
, 0):
14124 mips32_op
= OPC_CTC1
;
14126 case FLOAT_1BIT_FMT(MFC1
, 0):
14127 mips32_op
= OPC_MFC1
;
14129 case FLOAT_1BIT_FMT(MTC1
, 0):
14130 mips32_op
= OPC_MTC1
;
14132 case FLOAT_1BIT_FMT(MFHC1
, 0):
14133 mips32_op
= OPC_MFHC1
;
14135 case FLOAT_1BIT_FMT(MTHC1
, 0):
14136 mips32_op
= OPC_MTHC1
;
14138 gen_cp1(ctx
, mips32_op
, rt
, rs
);
14141 /* Reciprocal square root */
14142 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
14143 mips32_op
= OPC_RSQRT_S
;
14145 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
14146 mips32_op
= OPC_RSQRT_D
;
14150 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
14151 mips32_op
= OPC_SQRT_S
;
14153 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
14154 mips32_op
= OPC_SQRT_D
;
14158 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
14159 mips32_op
= OPC_RECIP_S
;
14161 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
14162 mips32_op
= OPC_RECIP_D
;
14166 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
14167 mips32_op
= OPC_FLOOR_L_S
;
14169 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
14170 mips32_op
= OPC_FLOOR_L_D
;
14172 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
14173 mips32_op
= OPC_FLOOR_W_S
;
14175 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
14176 mips32_op
= OPC_FLOOR_W_D
;
14180 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
14181 mips32_op
= OPC_CEIL_L_S
;
14183 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
14184 mips32_op
= OPC_CEIL_L_D
;
14186 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
14187 mips32_op
= OPC_CEIL_W_S
;
14189 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
14190 mips32_op
= OPC_CEIL_W_D
;
14194 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
14195 mips32_op
= OPC_TRUNC_L_S
;
14197 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
14198 mips32_op
= OPC_TRUNC_L_D
;
14200 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
14201 mips32_op
= OPC_TRUNC_W_S
;
14203 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
14204 mips32_op
= OPC_TRUNC_W_D
;
14208 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
14209 mips32_op
= OPC_ROUND_L_S
;
14211 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
14212 mips32_op
= OPC_ROUND_L_D
;
14214 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
14215 mips32_op
= OPC_ROUND_W_S
;
14217 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
14218 mips32_op
= OPC_ROUND_W_D
;
14221 /* Integer to floating-point conversion */
14222 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
14223 mips32_op
= OPC_CVT_L_S
;
14225 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
14226 mips32_op
= OPC_CVT_L_D
;
14228 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
14229 mips32_op
= OPC_CVT_W_S
;
14231 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
14232 mips32_op
= OPC_CVT_W_D
;
14235 /* Paired-foo conversions */
14236 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
14237 mips32_op
= OPC_CVT_S_PL
;
14239 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
14240 mips32_op
= OPC_CVT_S_PU
;
14242 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
14243 mips32_op
= OPC_CVT_PW_PS
;
14245 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
14246 mips32_op
= OPC_CVT_PS_PW
;
14249 /* Floating-point moves */
14250 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
14251 mips32_op
= OPC_MOV_S
;
14253 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
14254 mips32_op
= OPC_MOV_D
;
14256 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
14257 mips32_op
= OPC_MOV_PS
;
14260 /* Absolute value */
14261 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
14262 mips32_op
= OPC_ABS_S
;
14264 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
14265 mips32_op
= OPC_ABS_D
;
14267 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
14268 mips32_op
= OPC_ABS_PS
;
14272 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
14273 mips32_op
= OPC_NEG_S
;
14275 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
14276 mips32_op
= OPC_NEG_D
;
14278 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
14279 mips32_op
= OPC_NEG_PS
;
14282 /* Reciprocal square root step */
14283 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
14284 mips32_op
= OPC_RSQRT1_S
;
14286 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
14287 mips32_op
= OPC_RSQRT1_D
;
14289 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
14290 mips32_op
= OPC_RSQRT1_PS
;
14293 /* Reciprocal step */
14294 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
14295 mips32_op
= OPC_RECIP1_S
;
14297 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
14298 mips32_op
= OPC_RECIP1_S
;
14300 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
14301 mips32_op
= OPC_RECIP1_PS
;
14304 /* Conversions from double */
14305 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
14306 mips32_op
= OPC_CVT_D_S
;
14308 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
14309 mips32_op
= OPC_CVT_D_W
;
14311 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
14312 mips32_op
= OPC_CVT_D_L
;
14315 /* Conversions from single */
14316 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
14317 mips32_op
= OPC_CVT_S_D
;
14319 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
14320 mips32_op
= OPC_CVT_S_W
;
14322 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
14323 mips32_op
= OPC_CVT_S_L
;
14325 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
14328 /* Conditional moves on floating-point codes */
14329 case COND_FLOAT_MOV(MOVT
, 0):
14330 case COND_FLOAT_MOV(MOVT
, 1):
14331 case COND_FLOAT_MOV(MOVT
, 2):
14332 case COND_FLOAT_MOV(MOVT
, 3):
14333 case COND_FLOAT_MOV(MOVT
, 4):
14334 case COND_FLOAT_MOV(MOVT
, 5):
14335 case COND_FLOAT_MOV(MOVT
, 6):
14336 case COND_FLOAT_MOV(MOVT
, 7):
14337 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14338 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
14340 case COND_FLOAT_MOV(MOVF
, 0):
14341 case COND_FLOAT_MOV(MOVF
, 1):
14342 case COND_FLOAT_MOV(MOVF
, 2):
14343 case COND_FLOAT_MOV(MOVF
, 3):
14344 case COND_FLOAT_MOV(MOVF
, 4):
14345 case COND_FLOAT_MOV(MOVF
, 5):
14346 case COND_FLOAT_MOV(MOVF
, 6):
14347 case COND_FLOAT_MOV(MOVF
, 7):
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
14352 MIPS_INVAL("pool32fxf");
14353 generate_exception_end(ctx
, EXCP_RI
);
14358 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
14362 int rt
, rs
, rd
, rr
;
14364 uint32_t op
, minor
, minor2
, mips32_op
;
14365 uint32_t cond
, fmt
, cc
;
14367 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
14368 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
14370 rt
= (ctx
->opcode
>> 21) & 0x1f;
14371 rs
= (ctx
->opcode
>> 16) & 0x1f;
14372 rd
= (ctx
->opcode
>> 11) & 0x1f;
14373 rr
= (ctx
->opcode
>> 6) & 0x1f;
14374 imm
= (int16_t) ctx
->opcode
;
14376 op
= (ctx
->opcode
>> 26) & 0x3f;
14379 minor
= ctx
->opcode
& 0x3f;
14382 minor
= (ctx
->opcode
>> 6) & 0xf;
14385 mips32_op
= OPC_SLL
;
14388 mips32_op
= OPC_SRA
;
14391 mips32_op
= OPC_SRL
;
14394 mips32_op
= OPC_ROTR
;
14396 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
14399 check_insn(ctx
, ISA_MIPS32R6
);
14400 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
14403 check_insn(ctx
, ISA_MIPS32R6
);
14404 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
14407 check_insn(ctx
, ISA_MIPS32R6
);
14408 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
14411 goto pool32a_invalid
;
14415 minor
= (ctx
->opcode
>> 6) & 0xf;
14419 mips32_op
= OPC_ADD
;
14422 mips32_op
= OPC_ADDU
;
14425 mips32_op
= OPC_SUB
;
14428 mips32_op
= OPC_SUBU
;
14431 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14432 mips32_op
= OPC_MUL
;
14434 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14438 mips32_op
= OPC_SLLV
;
14441 mips32_op
= OPC_SRLV
;
14444 mips32_op
= OPC_SRAV
;
14447 mips32_op
= OPC_ROTRV
;
14449 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14451 /* Logical operations */
14453 mips32_op
= OPC_AND
;
14456 mips32_op
= OPC_OR
;
14459 mips32_op
= OPC_NOR
;
14462 mips32_op
= OPC_XOR
;
14464 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14466 /* Set less than */
14468 mips32_op
= OPC_SLT
;
14471 mips32_op
= OPC_SLTU
;
14473 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14476 goto pool32a_invalid
;
14480 minor
= (ctx
->opcode
>> 6) & 0xf;
14482 /* Conditional moves */
14483 case MOVN
: /* MUL */
14484 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14486 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14489 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14492 case MOVZ
: /* MUH */
14493 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14495 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14498 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14502 check_insn(ctx
, ISA_MIPS32R6
);
14503 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14506 check_insn(ctx
, ISA_MIPS32R6
);
14507 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14509 case LWXS
: /* DIV */
14510 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14512 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14515 gen_ldxs(ctx
, rs
, rt
, rd
);
14519 check_insn(ctx
, ISA_MIPS32R6
);
14520 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14523 check_insn(ctx
, ISA_MIPS32R6
);
14524 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14527 check_insn(ctx
, ISA_MIPS32R6
);
14528 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14531 goto pool32a_invalid
;
14535 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14538 check_insn(ctx
, ISA_MIPS32R6
);
14539 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14540 extract32(ctx
->opcode
, 9, 2));
14543 check_insn(ctx
, ISA_MIPS32R6
);
14544 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
14547 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14550 gen_pool32axf(env
, ctx
, rt
, rs
);
14553 generate_exception_end(ctx
, EXCP_BREAK
);
14556 check_insn(ctx
, ISA_MIPS32R6
);
14557 generate_exception_end(ctx
, EXCP_RI
);
14561 MIPS_INVAL("pool32a");
14562 generate_exception_end(ctx
, EXCP_RI
);
14567 minor
= (ctx
->opcode
>> 12) & 0xf;
14570 check_cp0_enabled(ctx
);
14571 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14572 gen_cache_operation(ctx
, rt
, rs
, imm
);
14577 /* COP2: Not implemented. */
14578 generate_exception_err(ctx
, EXCP_CpU
, 2);
14580 #ifdef TARGET_MIPS64
14583 check_insn(ctx
, ISA_MIPS3
);
14584 check_mips_64(ctx
);
14589 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14591 #ifdef TARGET_MIPS64
14594 check_insn(ctx
, ISA_MIPS3
);
14595 check_mips_64(ctx
);
14600 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14603 MIPS_INVAL("pool32b");
14604 generate_exception_end(ctx
, EXCP_RI
);
14609 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14610 minor
= ctx
->opcode
& 0x3f;
14611 check_cp1_enabled(ctx
);
14614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14615 mips32_op
= OPC_ALNV_PS
;
14618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14619 mips32_op
= OPC_MADD_S
;
14622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14623 mips32_op
= OPC_MADD_D
;
14626 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14627 mips32_op
= OPC_MADD_PS
;
14630 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14631 mips32_op
= OPC_MSUB_S
;
14634 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14635 mips32_op
= OPC_MSUB_D
;
14638 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14639 mips32_op
= OPC_MSUB_PS
;
14642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14643 mips32_op
= OPC_NMADD_S
;
14646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14647 mips32_op
= OPC_NMADD_D
;
14650 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14651 mips32_op
= OPC_NMADD_PS
;
14654 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14655 mips32_op
= OPC_NMSUB_S
;
14658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14659 mips32_op
= OPC_NMSUB_D
;
14662 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14663 mips32_op
= OPC_NMSUB_PS
;
14665 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14667 case CABS_COND_FMT
:
14668 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14669 cond
= (ctx
->opcode
>> 6) & 0xf;
14670 cc
= (ctx
->opcode
>> 13) & 0x7;
14671 fmt
= (ctx
->opcode
>> 10) & 0x3;
14674 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14677 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14680 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14683 goto pool32f_invalid
;
14687 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14688 cond
= (ctx
->opcode
>> 6) & 0xf;
14689 cc
= (ctx
->opcode
>> 13) & 0x7;
14690 fmt
= (ctx
->opcode
>> 10) & 0x3;
14693 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14696 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14699 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14702 goto pool32f_invalid
;
14706 check_insn(ctx
, ISA_MIPS32R6
);
14707 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14710 check_insn(ctx
, ISA_MIPS32R6
);
14711 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14714 gen_pool32fxf(ctx
, rt
, rs
);
14718 switch ((ctx
->opcode
>> 6) & 0x7) {
14720 mips32_op
= OPC_PLL_PS
;
14723 mips32_op
= OPC_PLU_PS
;
14726 mips32_op
= OPC_PUL_PS
;
14729 mips32_op
= OPC_PUU_PS
;
14732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14733 mips32_op
= OPC_CVT_PS_S
;
14735 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14738 goto pool32f_invalid
;
14742 check_insn(ctx
, ISA_MIPS32R6
);
14743 switch ((ctx
->opcode
>> 9) & 0x3) {
14745 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14748 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14751 goto pool32f_invalid
;
14756 switch ((ctx
->opcode
>> 6) & 0x7) {
14758 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14759 mips32_op
= OPC_LWXC1
;
14762 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14763 mips32_op
= OPC_SWXC1
;
14766 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14767 mips32_op
= OPC_LDXC1
;
14770 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14771 mips32_op
= OPC_SDXC1
;
14774 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14775 mips32_op
= OPC_LUXC1
;
14778 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14779 mips32_op
= OPC_SUXC1
;
14781 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14784 goto pool32f_invalid
;
14788 check_insn(ctx
, ISA_MIPS32R6
);
14789 switch ((ctx
->opcode
>> 9) & 0x3) {
14791 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14794 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14797 goto pool32f_invalid
;
14802 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14803 fmt
= (ctx
->opcode
>> 9) & 0x3;
14804 switch ((ctx
->opcode
>> 6) & 0x7) {
14808 mips32_op
= OPC_RSQRT2_S
;
14811 mips32_op
= OPC_RSQRT2_D
;
14814 mips32_op
= OPC_RSQRT2_PS
;
14817 goto pool32f_invalid
;
14823 mips32_op
= OPC_RECIP2_S
;
14826 mips32_op
= OPC_RECIP2_D
;
14829 mips32_op
= OPC_RECIP2_PS
;
14832 goto pool32f_invalid
;
14836 mips32_op
= OPC_ADDR_PS
;
14839 mips32_op
= OPC_MULR_PS
;
14841 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14844 goto pool32f_invalid
;
14848 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14849 cc
= (ctx
->opcode
>> 13) & 0x7;
14850 fmt
= (ctx
->opcode
>> 9) & 0x3;
14851 switch ((ctx
->opcode
>> 6) & 0x7) {
14852 case MOVF_FMT
: /* RINT_FMT */
14853 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14857 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14860 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14863 goto pool32f_invalid
;
14869 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14872 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14876 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14879 goto pool32f_invalid
;
14883 case MOVT_FMT
: /* CLASS_FMT */
14884 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14888 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14891 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14894 goto pool32f_invalid
;
14900 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14903 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14907 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14910 goto pool32f_invalid
;
14915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14918 goto pool32f_invalid
;
14921 #define FINSN_3ARG_SDPS(prfx) \
14922 switch ((ctx->opcode >> 8) & 0x3) { \
14924 mips32_op = OPC_##prfx##_S; \
14927 mips32_op = OPC_##prfx##_D; \
14929 case FMT_SDPS_PS: \
14931 mips32_op = OPC_##prfx##_PS; \
14934 goto pool32f_invalid; \
14937 check_insn(ctx
, ISA_MIPS32R6
);
14938 switch ((ctx
->opcode
>> 9) & 0x3) {
14940 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14943 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14946 goto pool32f_invalid
;
14950 check_insn(ctx
, ISA_MIPS32R6
);
14951 switch ((ctx
->opcode
>> 9) & 0x3) {
14953 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14956 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14959 goto pool32f_invalid
;
14963 /* regular FP ops */
14964 switch ((ctx
->opcode
>> 6) & 0x3) {
14966 FINSN_3ARG_SDPS(ADD
);
14969 FINSN_3ARG_SDPS(SUB
);
14972 FINSN_3ARG_SDPS(MUL
);
14975 fmt
= (ctx
->opcode
>> 8) & 0x3;
14977 mips32_op
= OPC_DIV_D
;
14978 } else if (fmt
== 0) {
14979 mips32_op
= OPC_DIV_S
;
14981 goto pool32f_invalid
;
14985 goto pool32f_invalid
;
14990 switch ((ctx
->opcode
>> 6) & 0x7) {
14991 case MOVN_FMT
: /* SELNEZ_FMT */
14992 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14994 switch ((ctx
->opcode
>> 9) & 0x3) {
14996 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14999 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
15002 goto pool32f_invalid
;
15006 FINSN_3ARG_SDPS(MOVN
);
15010 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15011 FINSN_3ARG_SDPS(MOVN
);
15013 case MOVZ_FMT
: /* SELEQZ_FMT */
15014 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15016 switch ((ctx
->opcode
>> 9) & 0x3) {
15018 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
15021 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
15024 goto pool32f_invalid
;
15028 FINSN_3ARG_SDPS(MOVZ
);
15032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15033 FINSN_3ARG_SDPS(MOVZ
);
15036 check_insn(ctx
, ISA_MIPS32R6
);
15037 switch ((ctx
->opcode
>> 9) & 0x3) {
15039 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
15042 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
15045 goto pool32f_invalid
;
15049 check_insn(ctx
, ISA_MIPS32R6
);
15050 switch ((ctx
->opcode
>> 9) & 0x3) {
15052 mips32_op
= OPC_MADDF_S
;
15055 mips32_op
= OPC_MADDF_D
;
15058 goto pool32f_invalid
;
15062 check_insn(ctx
, ISA_MIPS32R6
);
15063 switch ((ctx
->opcode
>> 9) & 0x3) {
15065 mips32_op
= OPC_MSUBF_S
;
15068 mips32_op
= OPC_MSUBF_D
;
15071 goto pool32f_invalid
;
15075 goto pool32f_invalid
;
15079 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
15083 MIPS_INVAL("pool32f");
15084 generate_exception_end(ctx
, EXCP_RI
);
15088 generate_exception_err(ctx
, EXCP_CpU
, 1);
15092 minor
= (ctx
->opcode
>> 21) & 0x1f;
15095 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15096 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
15099 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15100 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
15101 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15104 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15105 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
15106 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15109 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15110 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
15113 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15114 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
15115 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15118 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15119 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
15120 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15124 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
15127 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15128 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
15132 case TLTI
: /* BC1EQZC */
15133 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15135 check_cp1_enabled(ctx
);
15136 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
15139 mips32_op
= OPC_TLTI
;
15143 case TGEI
: /* BC1NEZC */
15144 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15146 check_cp1_enabled(ctx
);
15147 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
15150 mips32_op
= OPC_TGEI
;
15155 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15156 mips32_op
= OPC_TLTIU
;
15159 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15160 mips32_op
= OPC_TGEIU
;
15162 case TNEI
: /* SYNCI */
15163 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15165 /* Break the TB to be able to sync copied instructions
15167 ctx
->base
.is_jmp
= DISAS_STOP
;
15170 mips32_op
= OPC_TNEI
;
15175 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15176 mips32_op
= OPC_TEQI
;
15178 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
15183 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15184 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
15185 4, rs
, 0, imm
<< 1, 0);
15186 /* Compact branches don't have a delay slot, so just let
15187 the normal delay slot handling take us to the branch
15191 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15192 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
15195 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15196 /* Break the TB to be able to sync copied instructions
15198 ctx
->base
.is_jmp
= DISAS_STOP
;
15202 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15203 /* COP2: Not implemented. */
15204 generate_exception_err(ctx
, EXCP_CpU
, 2);
15207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15208 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
15211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15212 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
15215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15216 mips32_op
= OPC_BC1FANY4
;
15219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15220 mips32_op
= OPC_BC1TANY4
;
15223 check_insn(ctx
, ASE_MIPS3D
);
15226 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
15227 check_cp1_enabled(ctx
);
15228 gen_compute_branch1(ctx
, mips32_op
,
15229 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
15231 generate_exception_err(ctx
, EXCP_CpU
, 1);
15236 /* MIPS DSP: not implemented */
15239 MIPS_INVAL("pool32i");
15240 generate_exception_end(ctx
, EXCP_RI
);
15245 minor
= (ctx
->opcode
>> 12) & 0xf;
15246 offset
= sextract32(ctx
->opcode
, 0,
15247 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
15250 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15251 mips32_op
= OPC_LWL
;
15254 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15255 mips32_op
= OPC_SWL
;
15258 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15259 mips32_op
= OPC_LWR
;
15262 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15263 mips32_op
= OPC_SWR
;
15265 #if defined(TARGET_MIPS64)
15267 check_insn(ctx
, ISA_MIPS3
);
15268 check_mips_64(ctx
);
15269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15270 mips32_op
= OPC_LDL
;
15273 check_insn(ctx
, ISA_MIPS3
);
15274 check_mips_64(ctx
);
15275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15276 mips32_op
= OPC_SDL
;
15279 check_insn(ctx
, ISA_MIPS3
);
15280 check_mips_64(ctx
);
15281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15282 mips32_op
= OPC_LDR
;
15285 check_insn(ctx
, ISA_MIPS3
);
15286 check_mips_64(ctx
);
15287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15288 mips32_op
= OPC_SDR
;
15291 check_insn(ctx
, ISA_MIPS3
);
15292 check_mips_64(ctx
);
15293 mips32_op
= OPC_LWU
;
15296 check_insn(ctx
, ISA_MIPS3
);
15297 check_mips_64(ctx
);
15298 mips32_op
= OPC_LLD
;
15302 mips32_op
= OPC_LL
;
15305 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
15308 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
15311 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
15313 #if defined(TARGET_MIPS64)
15315 check_insn(ctx
, ISA_MIPS3
);
15316 check_mips_64(ctx
);
15317 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
15322 MIPS_INVAL("pool32c ld-eva");
15323 generate_exception_end(ctx
, EXCP_RI
);
15326 check_cp0_enabled(ctx
);
15328 minor2
= (ctx
->opcode
>> 9) & 0x7;
15329 offset
= sextract32(ctx
->opcode
, 0, 9);
15332 mips32_op
= OPC_LBUE
;
15335 mips32_op
= OPC_LHUE
;
15338 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15339 mips32_op
= OPC_LWLE
;
15342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15343 mips32_op
= OPC_LWRE
;
15346 mips32_op
= OPC_LBE
;
15349 mips32_op
= OPC_LHE
;
15352 mips32_op
= OPC_LLE
;
15355 mips32_op
= OPC_LWE
;
15361 MIPS_INVAL("pool32c st-eva");
15362 generate_exception_end(ctx
, EXCP_RI
);
15365 check_cp0_enabled(ctx
);
15367 minor2
= (ctx
->opcode
>> 9) & 0x7;
15368 offset
= sextract32(ctx
->opcode
, 0, 9);
15371 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15372 mips32_op
= OPC_SWLE
;
15375 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15376 mips32_op
= OPC_SWRE
;
15379 /* Treat as no-op */
15380 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15381 /* hint codes 24-31 are reserved and signal RI */
15382 generate_exception(ctx
, EXCP_RI
);
15386 /* Treat as no-op */
15387 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15388 gen_cache_operation(ctx
, rt
, rs
, offset
);
15392 mips32_op
= OPC_SBE
;
15395 mips32_op
= OPC_SHE
;
15398 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
15401 mips32_op
= OPC_SWE
;
15406 /* Treat as no-op */
15407 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
15408 /* hint codes 24-31 are reserved and signal RI */
15409 generate_exception(ctx
, EXCP_RI
);
15413 MIPS_INVAL("pool32c");
15414 generate_exception_end(ctx
, EXCP_RI
);
15418 case ADDI32
: /* AUI, LUI */
15419 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15421 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15424 mips32_op
= OPC_ADDI
;
15429 mips32_op
= OPC_ADDIU
;
15431 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15434 /* Logical operations */
15436 mips32_op
= OPC_ORI
;
15439 mips32_op
= OPC_XORI
;
15442 mips32_op
= OPC_ANDI
;
15444 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15447 /* Set less than immediate */
15449 mips32_op
= OPC_SLTI
;
15452 mips32_op
= OPC_SLTIU
;
15454 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15458 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15459 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15460 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15462 case JALS32
: /* BOVC, BEQC, BEQZALC */
15463 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15466 mips32_op
= OPC_BOVC
;
15467 } else if (rs
< rt
&& rs
== 0) {
15469 mips32_op
= OPC_BEQZALC
;
15472 mips32_op
= OPC_BEQC
;
15474 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15477 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15478 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15479 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15482 case BEQ32
: /* BC */
15483 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15485 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15486 sextract32(ctx
->opcode
<< 1, 0, 27));
15489 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15492 case BNE32
: /* BALC */
15493 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15495 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15496 sextract32(ctx
->opcode
<< 1, 0, 27));
15499 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15502 case J32
: /* BGTZC, BLTZC, BLTC */
15503 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15504 if (rs
== 0 && rt
!= 0) {
15506 mips32_op
= OPC_BGTZC
;
15507 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15509 mips32_op
= OPC_BLTZC
;
15512 mips32_op
= OPC_BLTC
;
15514 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15517 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15518 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15521 case JAL32
: /* BLEZC, BGEZC, BGEC */
15522 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15523 if (rs
== 0 && rt
!= 0) {
15525 mips32_op
= OPC_BLEZC
;
15526 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15528 mips32_op
= OPC_BGEZC
;
15531 mips32_op
= OPC_BGEC
;
15533 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15536 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15537 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15538 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15541 /* Floating point (COP1) */
15543 mips32_op
= OPC_LWC1
;
15546 mips32_op
= OPC_LDC1
;
15549 mips32_op
= OPC_SWC1
;
15552 mips32_op
= OPC_SDC1
;
15554 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15556 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15557 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15558 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15559 switch ((ctx
->opcode
>> 16) & 0x1f) {
15568 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15571 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
15574 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
15584 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15587 generate_exception(ctx
, EXCP_RI
);
15592 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15593 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15595 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15598 case BNVC
: /* BNEC, BNEZALC */
15599 check_insn(ctx
, ISA_MIPS32R6
);
15602 mips32_op
= OPC_BNVC
;
15603 } else if (rs
< rt
&& rs
== 0) {
15605 mips32_op
= OPC_BNEZALC
;
15608 mips32_op
= OPC_BNEC
;
15610 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15612 case R6_BNEZC
: /* JIALC */
15613 check_insn(ctx
, ISA_MIPS32R6
);
15616 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15617 sextract32(ctx
->opcode
<< 1, 0, 22));
15620 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15623 case R6_BEQZC
: /* JIC */
15624 check_insn(ctx
, ISA_MIPS32R6
);
15627 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15628 sextract32(ctx
->opcode
<< 1, 0, 22));
15631 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15634 case BLEZALC
: /* BGEZALC, BGEUC */
15635 check_insn(ctx
, ISA_MIPS32R6
);
15636 if (rs
== 0 && rt
!= 0) {
15638 mips32_op
= OPC_BLEZALC
;
15639 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15641 mips32_op
= OPC_BGEZALC
;
15644 mips32_op
= OPC_BGEUC
;
15646 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15648 case BGTZALC
: /* BLTZALC, BLTUC */
15649 check_insn(ctx
, ISA_MIPS32R6
);
15650 if (rs
== 0 && rt
!= 0) {
15652 mips32_op
= OPC_BGTZALC
;
15653 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15655 mips32_op
= OPC_BLTZALC
;
15658 mips32_op
= OPC_BLTUC
;
15660 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15662 /* Loads and stores */
15664 mips32_op
= OPC_LB
;
15667 mips32_op
= OPC_LBU
;
15670 mips32_op
= OPC_LH
;
15673 mips32_op
= OPC_LHU
;
15676 mips32_op
= OPC_LW
;
15678 #ifdef TARGET_MIPS64
15680 check_insn(ctx
, ISA_MIPS3
);
15681 check_mips_64(ctx
);
15682 mips32_op
= OPC_LD
;
15685 check_insn(ctx
, ISA_MIPS3
);
15686 check_mips_64(ctx
);
15687 mips32_op
= OPC_SD
;
15691 mips32_op
= OPC_SB
;
15694 mips32_op
= OPC_SH
;
15697 mips32_op
= OPC_SW
;
15700 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15703 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15706 generate_exception_end(ctx
, EXCP_RI
);
15711 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15715 /* make sure instructions are on a halfword boundary */
15716 if (ctx
->base
.pc_next
& 0x1) {
15717 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15718 generate_exception_end(ctx
, EXCP_AdEL
);
15722 op
= (ctx
->opcode
>> 10) & 0x3f;
15723 /* Enforce properly-sized instructions in a delay slot */
15724 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15725 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15727 /* POOL32A, POOL32B, POOL32I, POOL32C */
15729 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15731 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15733 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15735 /* LB32, LH32, LWC132, LDC132, LW32 */
15736 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15737 generate_exception_end(ctx
, EXCP_RI
);
15742 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15744 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15746 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15747 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15748 generate_exception_end(ctx
, EXCP_RI
);
15758 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15759 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15760 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15763 switch (ctx
->opcode
& 0x1) {
15771 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15772 /* In the Release 6 the register number location in
15773 * the instruction encoding has changed.
15775 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15777 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15783 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15784 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15785 int amount
= (ctx
->opcode
>> 1) & 0x7;
15787 amount
= amount
== 0 ? 8 : amount
;
15789 switch (ctx
->opcode
& 0x1) {
15798 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15802 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15803 gen_pool16c_r6_insn(ctx
);
15805 gen_pool16c_insn(ctx
);
15810 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15811 int rb
= 28; /* GP */
15812 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15814 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15818 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15819 if (ctx
->opcode
& 1) {
15820 generate_exception_end(ctx
, EXCP_RI
);
15823 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15824 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15825 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15826 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15831 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15832 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15833 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15834 offset
= (offset
== 0xf ? -1 : offset
);
15836 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15841 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15842 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15843 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15845 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15850 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15851 int rb
= 29; /* SP */
15852 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15854 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15859 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15860 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15861 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15863 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15868 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15869 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15870 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15872 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15877 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15878 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15879 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15881 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15886 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15887 int rb
= 29; /* SP */
15888 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15890 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15895 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15896 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15897 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15899 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15904 int rd
= uMIPS_RD5(ctx
->opcode
);
15905 int rs
= uMIPS_RS5(ctx
->opcode
);
15907 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15914 switch (ctx
->opcode
& 0x1) {
15924 switch (ctx
->opcode
& 0x1) {
15929 gen_addiur1sp(ctx
);
15933 case B16
: /* BC16 */
15934 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15935 sextract32(ctx
->opcode
, 0, 10) << 1,
15936 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15938 case BNEZ16
: /* BNEZC16 */
15939 case BEQZ16
: /* BEQZC16 */
15940 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15941 mmreg(uMIPS_RD(ctx
->opcode
)),
15942 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15943 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15948 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15949 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15951 imm
= (imm
== 0x7f ? -1 : imm
);
15952 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15958 generate_exception_end(ctx
, EXCP_RI
);
15961 decode_micromips32_opc(env
, ctx
);
15974 /* MAJOR, P16, and P32 pools opcodes */
15978 NM_MOVE_BALC
= 0x02,
15986 NM_P16_SHIFT
= 0x0c,
16004 NM_P_LS_U12
= 0x21,
16014 NM_P16_ADDU
= 0x2c,
16028 NM_MOVEPREV
= 0x3f,
16031 /* POOL32A instruction pool */
16033 NM_POOL32A0
= 0x00,
16034 NM_SPECIAL2
= 0x01,
16037 NM_POOL32A5
= 0x05,
16038 NM_POOL32A7
= 0x07,
16041 /* P.GP.W instruction pool */
16043 NM_ADDIUGP_W
= 0x00,
16048 /* P48I instruction pool */
16052 NM_ADDIUGP48
= 0x02,
16053 NM_ADDIUPC48
= 0x03,
16058 /* P.U12 instruction pool */
16067 NM_ADDIUNEG
= 0x08,
16074 /* POOL32F instruction pool */
16076 NM_POOL32F_0
= 0x00,
16077 NM_POOL32F_3
= 0x03,
16078 NM_POOL32F_5
= 0x05,
16081 /* POOL32S instruction pool */
16083 NM_POOL32S_0
= 0x00,
16084 NM_POOL32S_4
= 0x04,
16087 /* P.LUI instruction pool */
16093 /* P.GP.BH instruction pool */
16098 NM_ADDIUGP_B
= 0x03,
16101 NM_P_GP_CP1
= 0x06,
16104 /* P.LS.U12 instruction pool */
16109 NM_P_PREFU12
= 0x03,
16122 /* P.LS.S9 instruction pool */
16128 NM_P_LS_UAWM
= 0x05,
16131 /* P.BAL instruction pool */
16137 /* P.J instruction pool */
16140 NM_JALRC_HB
= 0x01,
16141 NM_P_BALRSC
= 0x08,
16144 /* P.BR1 instruction pool */
16152 /* P.BR2 instruction pool */
16159 /* P.BRI instruction pool */
16171 /* P16.SHIFT instruction pool */
16177 /* POOL16C instruction pool */
16179 NM_POOL16C_0
= 0x00,
16183 /* P16.A1 instruction pool */
16185 NM_ADDIUR1SP
= 0x01,
16188 /* P16.A2 instruction pool */
16191 NM_P_ADDIURS5
= 0x01,
16194 /* P16.ADDU instruction pool */
16200 /* P16.SR instruction pool */
16203 NM_RESTORE_JRC16
= 0x01,
16206 /* P16.4X4 instruction pool */
16212 /* P16.LB instruction pool */
16219 /* P16.LH instruction pool */
16226 /* P.RI instruction pool */
16229 NM_P_SYSCALL
= 0x01,
16234 /* POOL32A0 instruction pool */
16269 NM_D_E_MT_VPE
= 0x56,
16277 /* POOL32A5 instruction pool */
16279 NM_CMP_EQ_PH
= 0x00,
16280 NM_CMP_LT_PH
= 0x08,
16281 NM_CMP_LE_PH
= 0x10,
16282 NM_CMPGU_EQ_QB
= 0x18,
16283 NM_CMPGU_LT_QB
= 0x20,
16284 NM_CMPGU_LE_QB
= 0x28,
16285 NM_CMPGDU_EQ_QB
= 0x30,
16286 NM_CMPGDU_LT_QB
= 0x38,
16287 NM_CMPGDU_LE_QB
= 0x40,
16288 NM_CMPU_EQ_QB
= 0x48,
16289 NM_CMPU_LT_QB
= 0x50,
16290 NM_CMPU_LE_QB
= 0x58,
16291 NM_ADDQ_S_W
= 0x60,
16292 NM_SUBQ_S_W
= 0x68,
16296 NM_ADDQ_S_PH
= 0x01,
16297 NM_ADDQH_R_PH
= 0x09,
16298 NM_ADDQH_R_W
= 0x11,
16299 NM_ADDU_S_QB
= 0x19,
16300 NM_ADDU_S_PH
= 0x21,
16301 NM_ADDUH_R_QB
= 0x29,
16302 NM_SHRAV_R_PH
= 0x31,
16303 NM_SHRAV_R_QB
= 0x39,
16304 NM_SUBQ_S_PH
= 0x41,
16305 NM_SUBQH_R_PH
= 0x49,
16306 NM_SUBQH_R_W
= 0x51,
16307 NM_SUBU_S_QB
= 0x59,
16308 NM_SUBU_S_PH
= 0x61,
16309 NM_SUBUH_R_QB
= 0x69,
16310 NM_SHLLV_S_PH
= 0x71,
16311 NM_PRECR_SRA_R_PH_W
= 0x79,
16313 NM_MULEU_S_PH_QBL
= 0x12,
16314 NM_MULEU_S_PH_QBR
= 0x1a,
16315 NM_MULQ_RS_PH
= 0x22,
16316 NM_MULQ_S_PH
= 0x2a,
16317 NM_MULQ_RS_W
= 0x32,
16318 NM_MULQ_S_W
= 0x3a,
16321 NM_SHRAV_R_W
= 0x5a,
16322 NM_SHRLV_PH
= 0x62,
16323 NM_SHRLV_QB
= 0x6a,
16324 NM_SHLLV_QB
= 0x72,
16325 NM_SHLLV_S_W
= 0x7a,
16329 NM_MULEQ_S_W_PHL
= 0x04,
16330 NM_MULEQ_S_W_PHR
= 0x0c,
16332 NM_MUL_S_PH
= 0x05,
16333 NM_PRECR_QB_PH
= 0x0d,
16334 NM_PRECRQ_QB_PH
= 0x15,
16335 NM_PRECRQ_PH_W
= 0x1d,
16336 NM_PRECRQ_RS_PH_W
= 0x25,
16337 NM_PRECRQU_S_QB_PH
= 0x2d,
16338 NM_PACKRL_PH
= 0x35,
16342 NM_SHRA_R_W
= 0x5e,
16343 NM_SHRA_R_PH
= 0x66,
16344 NM_SHLL_S_PH
= 0x76,
16345 NM_SHLL_S_W
= 0x7e,
16350 /* POOL32A7 instruction pool */
16355 NM_POOL32AXF
= 0x07,
16358 /* P.SR instruction pool */
16364 /* P.SHIFT instruction pool */
16372 /* P.ROTX instruction pool */
16377 /* P.INS instruction pool */
16382 /* P.EXT instruction pool */
16387 /* POOL32F_0 (fmt) instruction pool */
16392 NM_SELEQZ_S
= 0x07,
16393 NM_SELEQZ_D
= 0x47,
16397 NM_SELNEZ_S
= 0x0f,
16398 NM_SELNEZ_D
= 0x4f,
16413 /* POOL32F_3 instruction pool */
16417 NM_MINA_FMT
= 0x04,
16418 NM_MAXA_FMT
= 0x05,
16419 NM_POOL32FXF
= 0x07,
16422 /* POOL32F_5 instruction pool */
16424 NM_CMP_CONDN_S
= 0x00,
16425 NM_CMP_CONDN_D
= 0x02,
16428 /* P.GP.LH instruction pool */
16434 /* P.GP.SH instruction pool */
16439 /* P.GP.CP1 instruction pool */
16447 /* P.LS.S0 instruction pool */
16464 NM_P_PREFS9
= 0x03,
16470 /* P.LS.S1 instruction pool */
16472 NM_ASET_ACLR
= 0x02,
16480 /* P.LS.WM instruction pool */
16486 /* P.LS.UAWM instruction pool */
16492 /* P.BR3A instruction pool */
16498 NM_BPOSGE32C
= 0x04,
16501 /* P16.RI instruction pool */
16503 NM_P16_SYSCALL
= 0x01,
16508 /* POOL16C_0 instruction pool */
16510 NM_POOL16C_00
= 0x00,
16513 /* P16.JRC instruction pool */
16519 /* P.SYSCALL instruction pool */
16525 /* P.TRAP instruction pool */
16531 /* P.CMOVE instruction pool */
16537 /* POOL32Axf instruction pool */
16539 NM_POOL32AXF_1
= 0x01,
16540 NM_POOL32AXF_2
= 0x02,
16541 NM_POOL32AXF_4
= 0x04,
16542 NM_POOL32AXF_5
= 0x05,
16543 NM_POOL32AXF_7
= 0x07,
16546 /* POOL32Axf_1 instruction pool */
16548 NM_POOL32AXF_1_0
= 0x00,
16549 NM_POOL32AXF_1_1
= 0x01,
16550 NM_POOL32AXF_1_3
= 0x03,
16551 NM_POOL32AXF_1_4
= 0x04,
16552 NM_POOL32AXF_1_5
= 0x05,
16553 NM_POOL32AXF_1_7
= 0x07,
16556 /* POOL32Axf_2 instruction pool */
16558 NM_POOL32AXF_2_0_7
= 0x00,
16559 NM_POOL32AXF_2_8_15
= 0x01,
16560 NM_POOL32AXF_2_16_23
= 0x02,
16561 NM_POOL32AXF_2_24_31
= 0x03,
16564 /* POOL32Axf_7 instruction pool */
16566 NM_SHRA_R_QB
= 0x0,
16571 /* POOL32Axf_1_0 instruction pool */
16579 /* POOL32Axf_1_1 instruction pool */
16585 /* POOL32Axf_1_3 instruction pool */
16593 /* POOL32Axf_1_4 instruction pool */
16599 /* POOL32Axf_1_5 instruction pool */
16601 NM_MAQ_S_W_PHR
= 0x0,
16602 NM_MAQ_S_W_PHL
= 0x1,
16603 NM_MAQ_SA_W_PHR
= 0x2,
16604 NM_MAQ_SA_W_PHL
= 0x3,
16607 /* POOL32Axf_1_7 instruction pool */
16611 NM_EXTR_RS_W
= 0x2,
16615 /* POOL32Axf_2_0_7 instruction pool */
16618 NM_DPAQ_S_W_PH
= 0x1,
16620 NM_DPSQ_S_W_PH
= 0x3,
16627 /* POOL32Axf_2_8_15 instruction pool */
16629 NM_DPAX_W_PH
= 0x0,
16630 NM_DPAQ_SA_L_W
= 0x1,
16631 NM_DPSX_W_PH
= 0x2,
16632 NM_DPSQ_SA_L_W
= 0x3,
16635 NM_EXTRV_R_W
= 0x7,
16638 /* POOL32Axf_2_16_23 instruction pool */
16640 NM_DPAU_H_QBL
= 0x0,
16641 NM_DPAQX_S_W_PH
= 0x1,
16642 NM_DPSU_H_QBL
= 0x2,
16643 NM_DPSQX_S_W_PH
= 0x3,
16646 NM_MULSA_W_PH
= 0x6,
16647 NM_EXTRV_RS_W
= 0x7,
16650 /* POOL32Axf_2_24_31 instruction pool */
16652 NM_DPAU_H_QBR
= 0x0,
16653 NM_DPAQX_SA_W_PH
= 0x1,
16654 NM_DPSU_H_QBR
= 0x2,
16655 NM_DPSQX_SA_W_PH
= 0x3,
16658 NM_MULSAQ_S_W_PH
= 0x6,
16659 NM_EXTRV_S_H
= 0x7,
16662 /* POOL32Axf_{4, 5} instruction pool */
16681 /* nanoMIPS DSP instructions */
16682 NM_ABSQ_S_QB
= 0x00,
16683 NM_ABSQ_S_PH
= 0x08,
16684 NM_ABSQ_S_W
= 0x10,
16685 NM_PRECEQ_W_PHL
= 0x28,
16686 NM_PRECEQ_W_PHR
= 0x30,
16687 NM_PRECEQU_PH_QBL
= 0x38,
16688 NM_PRECEQU_PH_QBR
= 0x48,
16689 NM_PRECEU_PH_QBL
= 0x58,
16690 NM_PRECEU_PH_QBR
= 0x68,
16691 NM_PRECEQU_PH_QBLA
= 0x39,
16692 NM_PRECEQU_PH_QBRA
= 0x49,
16693 NM_PRECEU_PH_QBLA
= 0x59,
16694 NM_PRECEU_PH_QBRA
= 0x69,
16695 NM_REPLV_PH
= 0x01,
16696 NM_REPLV_QB
= 0x09,
16699 NM_RADDU_W_QB
= 0x78,
16705 /* PP.SR instruction pool */
16709 NM_RESTORE_JRC
= 0x03,
16712 /* P.SR.F instruction pool */
16715 NM_RESTOREF
= 0x01,
16718 /* P16.SYSCALL instruction pool */
16720 NM_SYSCALL16
= 0x00,
16721 NM_HYPCALL16
= 0x01,
16724 /* POOL16C_00 instruction pool */
16732 /* PP.LSX and PP.LSXS instruction pool */
16770 /* ERETx instruction pool */
16776 /* POOL32FxF_{0, 1} insturction pool */
16785 NM_CVT_S_PL
= 0x84,
16786 NM_CVT_S_PU
= 0xa4,
16788 NM_CVT_L_S
= 0x004,
16789 NM_CVT_L_D
= 0x104,
16790 NM_CVT_W_S
= 0x024,
16791 NM_CVT_W_D
= 0x124,
16793 NM_RSQRT_S
= 0x008,
16794 NM_RSQRT_D
= 0x108,
16799 NM_RECIP_S
= 0x048,
16800 NM_RECIP_D
= 0x148,
16802 NM_FLOOR_L_S
= 0x00c,
16803 NM_FLOOR_L_D
= 0x10c,
16805 NM_FLOOR_W_S
= 0x02c,
16806 NM_FLOOR_W_D
= 0x12c,
16808 NM_CEIL_L_S
= 0x04c,
16809 NM_CEIL_L_D
= 0x14c,
16810 NM_CEIL_W_S
= 0x06c,
16811 NM_CEIL_W_D
= 0x16c,
16812 NM_TRUNC_L_S
= 0x08c,
16813 NM_TRUNC_L_D
= 0x18c,
16814 NM_TRUNC_W_S
= 0x0ac,
16815 NM_TRUNC_W_D
= 0x1ac,
16816 NM_ROUND_L_S
= 0x0cc,
16817 NM_ROUND_L_D
= 0x1cc,
16818 NM_ROUND_W_S
= 0x0ec,
16819 NM_ROUND_W_D
= 0x1ec,
16827 NM_CVT_D_S
= 0x04d,
16828 NM_CVT_D_W
= 0x0cd,
16829 NM_CVT_D_L
= 0x14d,
16830 NM_CVT_S_D
= 0x06d,
16831 NM_CVT_S_W
= 0x0ed,
16832 NM_CVT_S_L
= 0x16d,
16835 /* P.LL instruction pool */
16841 /* P.SC instruction pool */
16847 /* P.DVP instruction pool */
16856 * nanoMIPS decoding engine
16861 /* extraction utilities */
16863 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16864 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16865 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16866 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16867 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16868 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16870 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
16871 static inline int decode_gpr_gpr3(int r
)
16873 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
16875 return map
[r
& 0x7];
16878 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
16879 static inline int decode_gpr_gpr3_src_store(int r
)
16881 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
16883 return map
[r
& 0x7];
16886 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
16887 static inline int decode_gpr_gpr4(int r
)
16889 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
16890 16, 17, 18, 19, 20, 21, 22, 23 };
16892 return map
[r
& 0xf];
16895 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
16896 static inline int decode_gpr_gpr4_zero(int r
)
16898 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
16899 16, 17, 18, 19, 20, 21, 22, 23 };
16901 return map
[r
& 0xf];
16905 /* extraction utilities */
16907 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16908 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16909 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16910 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16911 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16912 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16915 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
16917 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
16920 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
16921 uint8_t gp
, uint16_t u
)
16924 TCGv va
= tcg_temp_new();
16925 TCGv t0
= tcg_temp_new();
16927 while (counter
!= count
) {
16928 bool use_gp
= gp
&& (counter
== count
- 1);
16929 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
16930 int this_offset
= -((counter
+ 1) << 2);
16931 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
16932 gen_load_gpr(t0
, this_rt
);
16933 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
16934 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
16938 /* adjust stack pointer */
16939 gen_adjust_sp(ctx
, -u
);
16945 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
16946 uint8_t gp
, uint16_t u
)
16949 TCGv va
= tcg_temp_new();
16950 TCGv t0
= tcg_temp_new();
16952 while (counter
!= count
) {
16953 bool use_gp
= gp
&& (counter
== count
- 1);
16954 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
16955 int this_offset
= u
- ((counter
+ 1) << 2);
16956 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
16957 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
16958 ctx
->default_tcg_memop_mask
);
16959 tcg_gen_ext32s_tl(t0
, t0
);
16960 gen_store_gpr(t0
, this_rt
);
16964 /* adjust stack pointer */
16965 gen_adjust_sp(ctx
, u
);
16971 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
16973 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
16974 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
16976 switch (extract32(ctx
->opcode
, 2, 2)) {
16978 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
16981 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
16984 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
16987 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
16992 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
16994 int rt
= extract32(ctx
->opcode
, 21, 5);
16995 int rs
= extract32(ctx
->opcode
, 16, 5);
16996 int rd
= extract32(ctx
->opcode
, 11, 5);
16998 switch (extract32(ctx
->opcode
, 3, 7)) {
17000 switch (extract32(ctx
->opcode
, 10, 1)) {
17002 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
17005 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
17010 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
17013 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
17016 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
17019 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
17022 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
17025 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
17028 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
17031 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
17034 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
17037 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
17040 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
17043 switch (extract32(ctx
->opcode
, 10, 1)) {
17045 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
17048 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
17053 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
17056 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
17059 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
17062 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
17065 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
17070 #ifndef CONFIG_USER_ONLY
17071 TCGv t0
= tcg_temp_new();
17072 switch (extract32(ctx
->opcode
, 10, 1)) {
17075 check_cp0_enabled(ctx
);
17076 gen_helper_dvp(t0
, cpu_env
);
17077 gen_store_gpr(t0
, rt
);
17082 check_cp0_enabled(ctx
);
17083 gen_helper_evp(t0
, cpu_env
);
17084 gen_store_gpr(t0
, rt
);
17091 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
17096 TCGv t0
= tcg_temp_new();
17097 TCGv t1
= tcg_temp_new();
17098 TCGv t2
= tcg_temp_new();
17100 gen_load_gpr(t1
, rs
);
17101 gen_load_gpr(t2
, rt
);
17102 tcg_gen_add_tl(t0
, t1
, t2
);
17103 tcg_gen_ext32s_tl(t0
, t0
);
17104 tcg_gen_xor_tl(t1
, t1
, t2
);
17105 tcg_gen_xor_tl(t2
, t0
, t2
);
17106 tcg_gen_andc_tl(t1
, t2
, t1
);
17108 /* operands of same sign, result different sign */
17109 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
17110 gen_store_gpr(t0
, rd
);
17118 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
17121 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
17124 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
17127 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
17130 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
17133 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
17136 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
17139 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
17141 #ifndef CONFIG_USER_ONLY
17143 check_cp0_enabled(ctx
);
17145 /* Treat as NOP. */
17148 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
17151 check_cp0_enabled(ctx
);
17153 TCGv t0
= tcg_temp_new();
17155 gen_load_gpr(t0
, rt
);
17156 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
17160 case NM_D_E_MT_VPE
:
17162 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
17163 TCGv t0
= tcg_temp_new();
17170 gen_helper_dmt(t0
);
17171 gen_store_gpr(t0
, rt
);
17172 } else if (rs
== 0) {
17175 gen_helper_dvpe(t0
, cpu_env
);
17176 gen_store_gpr(t0
, rt
);
17178 generate_exception_end(ctx
, EXCP_RI
);
17185 gen_helper_emt(t0
);
17186 gen_store_gpr(t0
, rt
);
17187 } else if (rs
== 0) {
17190 gen_helper_evpe(t0
, cpu_env
);
17191 gen_store_gpr(t0
, rt
);
17193 generate_exception_end(ctx
, EXCP_RI
);
17204 TCGv t0
= tcg_temp_new();
17205 TCGv t1
= tcg_temp_new();
17207 gen_load_gpr(t0
, rt
);
17208 gen_load_gpr(t1
, rs
);
17209 gen_helper_fork(t0
, t1
);
17216 check_cp0_enabled(ctx
);
17218 /* Treat as NOP. */
17221 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
17222 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
17226 check_cp0_enabled(ctx
);
17227 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
17228 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
17233 TCGv t0
= tcg_temp_new();
17235 gen_load_gpr(t0
, rs
);
17236 gen_helper_yield(t0
, cpu_env
, t0
);
17237 gen_store_gpr(t0
, rt
);
17243 generate_exception_end(ctx
, EXCP_RI
);
17249 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17250 int ret
, int v1
, int v2
)
17256 t0
= tcg_temp_new_i32();
17258 v0_t
= tcg_temp_new();
17259 v1_t
= tcg_temp_new();
17261 tcg_gen_movi_i32(t0
, v2
>> 3);
17263 gen_load_gpr(v0_t
, ret
);
17264 gen_load_gpr(v1_t
, v1
);
17267 case NM_MAQ_S_W_PHR
:
17269 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
17271 case NM_MAQ_S_W_PHL
:
17273 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
17275 case NM_MAQ_SA_W_PHR
:
17277 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
17279 case NM_MAQ_SA_W_PHL
:
17281 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
17284 generate_exception_end(ctx
, EXCP_RI
);
17288 tcg_temp_free_i32(t0
);
17290 tcg_temp_free(v0_t
);
17291 tcg_temp_free(v1_t
);
17295 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17296 int ret
, int v1
, int v2
)
17299 TCGv t0
= tcg_temp_new();
17300 TCGv t1
= tcg_temp_new();
17301 TCGv v0_t
= tcg_temp_new();
17303 gen_load_gpr(v0_t
, v1
);
17306 case NM_POOL32AXF_1_0
:
17308 switch (extract32(ctx
->opcode
, 12, 2)) {
17310 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
17313 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
17316 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
17319 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
17323 case NM_POOL32AXF_1_1
:
17325 switch (extract32(ctx
->opcode
, 12, 2)) {
17327 tcg_gen_movi_tl(t0
, v2
);
17328 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
17331 tcg_gen_movi_tl(t0
, v2
>> 3);
17332 gen_helper_shilo(t0
, v0_t
, cpu_env
);
17335 generate_exception_end(ctx
, EXCP_RI
);
17339 case NM_POOL32AXF_1_3
:
17341 imm
= extract32(ctx
->opcode
, 14, 7);
17342 switch (extract32(ctx
->opcode
, 12, 2)) {
17344 tcg_gen_movi_tl(t0
, imm
);
17345 gen_helper_rddsp(t0
, t0
, cpu_env
);
17346 gen_store_gpr(t0
, ret
);
17349 gen_load_gpr(t0
, ret
);
17350 tcg_gen_movi_tl(t1
, imm
);
17351 gen_helper_wrdsp(t0
, t1
, cpu_env
);
17354 tcg_gen_movi_tl(t0
, v2
>> 3);
17355 tcg_gen_movi_tl(t1
, v1
);
17356 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
17357 gen_store_gpr(t0
, ret
);
17360 tcg_gen_movi_tl(t0
, v2
>> 3);
17361 tcg_gen_movi_tl(t1
, v1
);
17362 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
17363 gen_store_gpr(t0
, ret
);
17367 case NM_POOL32AXF_1_4
:
17369 tcg_gen_movi_tl(t0
, v2
>> 2);
17370 switch (extract32(ctx
->opcode
, 12, 1)) {
17372 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
17373 gen_store_gpr(t0
, ret
);
17376 gen_helper_shrl_qb(t0
, t0
, v0_t
);
17377 gen_store_gpr(t0
, ret
);
17381 case NM_POOL32AXF_1_5
:
17382 opc
= extract32(ctx
->opcode
, 12, 2);
17383 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
17385 case NM_POOL32AXF_1_7
:
17387 tcg_gen_movi_tl(t0
, v2
>> 3);
17388 tcg_gen_movi_tl(t1
, v1
);
17389 switch (extract32(ctx
->opcode
, 12, 2)) {
17391 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
17392 gen_store_gpr(t0
, ret
);
17395 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
17396 gen_store_gpr(t0
, ret
);
17399 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
17400 gen_store_gpr(t0
, ret
);
17403 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
17404 gen_store_gpr(t0
, ret
);
17409 generate_exception_end(ctx
, EXCP_RI
);
17415 tcg_temp_free(v0_t
);
17418 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
17419 TCGv v0
, TCGv v1
, int rd
)
17423 t0
= tcg_temp_new_i32();
17425 tcg_gen_movi_i32(t0
, rd
>> 3);
17428 case NM_POOL32AXF_2_0_7
:
17429 switch (extract32(ctx
->opcode
, 9, 3)) {
17432 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
17434 case NM_DPAQ_S_W_PH
:
17436 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17440 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
17442 case NM_DPSQ_S_W_PH
:
17444 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17447 generate_exception_end(ctx
, EXCP_RI
);
17451 case NM_POOL32AXF_2_8_15
:
17452 switch (extract32(ctx
->opcode
, 9, 3)) {
17455 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
17457 case NM_DPAQ_SA_L_W
:
17459 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
17463 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
17465 case NM_DPSQ_SA_L_W
:
17467 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
17470 generate_exception_end(ctx
, EXCP_RI
);
17474 case NM_POOL32AXF_2_16_23
:
17475 switch (extract32(ctx
->opcode
, 9, 3)) {
17476 case NM_DPAU_H_QBL
:
17478 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
17480 case NM_DPAQX_S_W_PH
:
17482 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
17484 case NM_DPSU_H_QBL
:
17486 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
17488 case NM_DPSQX_S_W_PH
:
17490 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
17492 case NM_MULSA_W_PH
:
17494 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
17497 generate_exception_end(ctx
, EXCP_RI
);
17501 case NM_POOL32AXF_2_24_31
:
17502 switch (extract32(ctx
->opcode
, 9, 3)) {
17503 case NM_DPAU_H_QBR
:
17505 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
17507 case NM_DPAQX_SA_W_PH
:
17509 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
17511 case NM_DPSU_H_QBR
:
17513 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
17515 case NM_DPSQX_SA_W_PH
:
17517 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
17519 case NM_MULSAQ_S_W_PH
:
17521 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
17524 generate_exception_end(ctx
, EXCP_RI
);
17529 generate_exception_end(ctx
, EXCP_RI
);
17533 tcg_temp_free_i32(t0
);
17536 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
17537 int rt
, int rs
, int rd
)
17540 TCGv t0
= tcg_temp_new();
17541 TCGv t1
= tcg_temp_new();
17542 TCGv v0_t
= tcg_temp_new();
17543 TCGv v1_t
= tcg_temp_new();
17545 gen_load_gpr(v0_t
, rt
);
17546 gen_load_gpr(v1_t
, rs
);
17549 case NM_POOL32AXF_2_0_7
:
17550 switch (extract32(ctx
->opcode
, 9, 3)) {
17552 case NM_DPAQ_S_W_PH
:
17554 case NM_DPSQ_S_W_PH
:
17555 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17560 gen_load_gpr(t0
, rs
);
17562 if (rd
!= 0 && rd
!= 2) {
17563 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
17564 tcg_gen_ext32u_tl(t0
, t0
);
17565 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
17566 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
17568 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
17574 int acc
= extract32(ctx
->opcode
, 14, 2);
17575 TCGv_i64 t2
= tcg_temp_new_i64();
17576 TCGv_i64 t3
= tcg_temp_new_i64();
17578 gen_load_gpr(t0
, rt
);
17579 gen_load_gpr(t1
, rs
);
17580 tcg_gen_ext_tl_i64(t2
, t0
);
17581 tcg_gen_ext_tl_i64(t3
, t1
);
17582 tcg_gen_mul_i64(t2
, t2
, t3
);
17583 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17584 tcg_gen_add_i64(t2
, t2
, t3
);
17585 tcg_temp_free_i64(t3
);
17586 gen_move_low32(cpu_LO
[acc
], t2
);
17587 gen_move_high32(cpu_HI
[acc
], t2
);
17588 tcg_temp_free_i64(t2
);
17594 int acc
= extract32(ctx
->opcode
, 14, 2);
17595 TCGv_i32 t2
= tcg_temp_new_i32();
17596 TCGv_i32 t3
= tcg_temp_new_i32();
17598 gen_load_gpr(t0
, rs
);
17599 gen_load_gpr(t1
, rt
);
17600 tcg_gen_trunc_tl_i32(t2
, t0
);
17601 tcg_gen_trunc_tl_i32(t3
, t1
);
17602 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
17603 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
17604 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
17605 tcg_temp_free_i32(t2
);
17606 tcg_temp_free_i32(t3
);
17611 gen_load_gpr(v1_t
, rs
);
17612 tcg_gen_movi_tl(t0
, rd
>> 3);
17613 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
17614 gen_store_gpr(t0
, ret
);
17618 case NM_POOL32AXF_2_8_15
:
17619 switch (extract32(ctx
->opcode
, 9, 3)) {
17621 case NM_DPAQ_SA_L_W
:
17623 case NM_DPSQ_SA_L_W
:
17624 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17629 int acc
= extract32(ctx
->opcode
, 14, 2);
17630 TCGv_i64 t2
= tcg_temp_new_i64();
17631 TCGv_i64 t3
= tcg_temp_new_i64();
17633 gen_load_gpr(t0
, rs
);
17634 gen_load_gpr(t1
, rt
);
17635 tcg_gen_ext32u_tl(t0
, t0
);
17636 tcg_gen_ext32u_tl(t1
, t1
);
17637 tcg_gen_extu_tl_i64(t2
, t0
);
17638 tcg_gen_extu_tl_i64(t3
, t1
);
17639 tcg_gen_mul_i64(t2
, t2
, t3
);
17640 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17641 tcg_gen_add_i64(t2
, t2
, t3
);
17642 tcg_temp_free_i64(t3
);
17643 gen_move_low32(cpu_LO
[acc
], t2
);
17644 gen_move_high32(cpu_HI
[acc
], t2
);
17645 tcg_temp_free_i64(t2
);
17651 int acc
= extract32(ctx
->opcode
, 14, 2);
17652 TCGv_i32 t2
= tcg_temp_new_i32();
17653 TCGv_i32 t3
= tcg_temp_new_i32();
17655 gen_load_gpr(t0
, rs
);
17656 gen_load_gpr(t1
, rt
);
17657 tcg_gen_trunc_tl_i32(t2
, t0
);
17658 tcg_gen_trunc_tl_i32(t3
, t1
);
17659 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
17660 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
17661 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
17662 tcg_temp_free_i32(t2
);
17663 tcg_temp_free_i32(t3
);
17668 tcg_gen_movi_tl(t0
, rd
>> 3);
17669 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
17670 gen_store_gpr(t0
, ret
);
17673 generate_exception_end(ctx
, EXCP_RI
);
17677 case NM_POOL32AXF_2_16_23
:
17678 switch (extract32(ctx
->opcode
, 9, 3)) {
17679 case NM_DPAU_H_QBL
:
17680 case NM_DPAQX_S_W_PH
:
17681 case NM_DPSU_H_QBL
:
17682 case NM_DPSQX_S_W_PH
:
17683 case NM_MULSA_W_PH
:
17684 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17688 tcg_gen_movi_tl(t0
, rd
>> 3);
17689 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
17690 gen_store_gpr(t0
, ret
);
17695 int acc
= extract32(ctx
->opcode
, 14, 2);
17696 TCGv_i64 t2
= tcg_temp_new_i64();
17697 TCGv_i64 t3
= tcg_temp_new_i64();
17699 gen_load_gpr(t0
, rs
);
17700 gen_load_gpr(t1
, rt
);
17701 tcg_gen_ext_tl_i64(t2
, t0
);
17702 tcg_gen_ext_tl_i64(t3
, t1
);
17703 tcg_gen_mul_i64(t2
, t2
, t3
);
17704 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17705 tcg_gen_sub_i64(t2
, t3
, t2
);
17706 tcg_temp_free_i64(t3
);
17707 gen_move_low32(cpu_LO
[acc
], t2
);
17708 gen_move_high32(cpu_HI
[acc
], t2
);
17709 tcg_temp_free_i64(t2
);
17712 case NM_EXTRV_RS_W
:
17714 tcg_gen_movi_tl(t0
, rd
>> 3);
17715 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
17716 gen_store_gpr(t0
, ret
);
17720 case NM_POOL32AXF_2_24_31
:
17721 switch (extract32(ctx
->opcode
, 9, 3)) {
17722 case NM_DPAU_H_QBR
:
17723 case NM_DPAQX_SA_W_PH
:
17724 case NM_DPSU_H_QBR
:
17725 case NM_DPSQX_SA_W_PH
:
17726 case NM_MULSAQ_S_W_PH
:
17727 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
17731 tcg_gen_movi_tl(t0
, rd
>> 3);
17732 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
17733 gen_store_gpr(t0
, ret
);
17738 int acc
= extract32(ctx
->opcode
, 14, 2);
17739 TCGv_i64 t2
= tcg_temp_new_i64();
17740 TCGv_i64 t3
= tcg_temp_new_i64();
17742 gen_load_gpr(t0
, rs
);
17743 gen_load_gpr(t1
, rt
);
17744 tcg_gen_ext32u_tl(t0
, t0
);
17745 tcg_gen_ext32u_tl(t1
, t1
);
17746 tcg_gen_extu_tl_i64(t2
, t0
);
17747 tcg_gen_extu_tl_i64(t3
, t1
);
17748 tcg_gen_mul_i64(t2
, t2
, t3
);
17749 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
17750 tcg_gen_sub_i64(t2
, t3
, t2
);
17751 tcg_temp_free_i64(t3
);
17752 gen_move_low32(cpu_LO
[acc
], t2
);
17753 gen_move_high32(cpu_HI
[acc
], t2
);
17754 tcg_temp_free_i64(t2
);
17759 tcg_gen_movi_tl(t0
, rd
>> 3);
17760 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
17761 gen_store_gpr(t0
, ret
);
17766 generate_exception_end(ctx
, EXCP_RI
);
17773 tcg_temp_free(v0_t
);
17774 tcg_temp_free(v1_t
);
17778 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
17780 int rt
= extract32(ctx
->opcode
, 21, 5);
17781 int rs
= extract32(ctx
->opcode
, 16, 5);
17782 int rd
= extract32(ctx
->opcode
, 11, 5);
17784 switch (extract32(ctx
->opcode
, 6, 3)) {
17785 case NM_POOL32AXF_1
:
17787 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
17788 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
17791 case NM_POOL32AXF_2
:
17793 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
17794 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
17797 case NM_POOL32AXF_4
:
17799 case NM_POOL32AXF_5
:
17800 switch (extract32(ctx
->opcode
, 9, 7)) {
17801 #ifndef CONFIG_USER_ONLY
17803 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
17806 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
17809 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
17812 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
17815 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
17818 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
17821 check_cp0_enabled(ctx
);
17823 TCGv t0
= tcg_temp_new();
17825 save_cpu_state(ctx
, 1);
17826 gen_helper_di(t0
, cpu_env
);
17827 gen_store_gpr(t0
, rt
);
17828 /* Stop translation as we may have switched the execution mode */
17829 ctx
->base
.is_jmp
= DISAS_STOP
;
17834 check_cp0_enabled(ctx
);
17836 TCGv t0
= tcg_temp_new();
17838 save_cpu_state(ctx
, 1);
17839 gen_helper_ei(t0
, cpu_env
);
17840 gen_store_gpr(t0
, rt
);
17841 /* Stop translation as we may have switched the execution mode */
17842 ctx
->base
.is_jmp
= DISAS_STOP
;
17847 gen_load_srsgpr(rs
, rt
);
17850 gen_store_srsgpr(rs
, rt
);
17853 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
17856 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
17859 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
17863 generate_exception_end(ctx
, EXCP_RI
);
17867 case NM_POOL32AXF_7
:
17870 generate_exception_end(ctx
, EXCP_RI
);
17875 /* Immediate Value Compact Branches */
17876 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
17877 int rt
, int32_t imm
, int32_t offset
)
17880 int bcond_compute
= 0;
17881 TCGv t0
= tcg_temp_new();
17882 TCGv t1
= tcg_temp_new();
17884 gen_load_gpr(t0
, rt
);
17885 tcg_gen_movi_tl(t1
, imm
);
17886 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
17888 /* Load needed operands and calculate btarget */
17891 if (rt
== 0 && imm
== 0) {
17892 /* Unconditional branch */
17893 } else if (rt
== 0 && imm
!= 0) {
17898 cond
= TCG_COND_EQ
;
17903 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
17904 generate_exception_end(ctx
, EXCP_RI
);
17906 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
17907 /* Unconditional branch */
17908 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
17912 tcg_gen_shri_tl(t0
, t0
, imm
);
17913 tcg_gen_andi_tl(t0
, t0
, 1);
17914 tcg_gen_movi_tl(t1
, 0);
17916 if (opc
== NM_BBEQZC
) {
17917 cond
= TCG_COND_EQ
;
17919 cond
= TCG_COND_NE
;
17924 if (rt
== 0 && imm
== 0) {
17927 } else if (rt
== 0 && imm
!= 0) {
17928 /* Unconditional branch */
17931 cond
= TCG_COND_NE
;
17935 if (rt
== 0 && imm
== 0) {
17936 /* Unconditional branch */
17939 cond
= TCG_COND_GE
;
17944 cond
= TCG_COND_LT
;
17947 if (rt
== 0 && imm
== 0) {
17948 /* Unconditional branch */
17951 cond
= TCG_COND_GEU
;
17956 cond
= TCG_COND_LTU
;
17959 MIPS_INVAL("Immediate Value Compact branch");
17960 generate_exception_end(ctx
, EXCP_RI
);
17964 if (bcond_compute
== 0) {
17965 /* Uncoditional compact branch */
17966 gen_goto_tb(ctx
, 0, ctx
->btarget
);
17968 /* Conditional compact branch */
17969 TCGLabel
*fs
= gen_new_label();
17971 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
17973 gen_goto_tb(ctx
, 1, ctx
->btarget
);
17976 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
17984 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
17985 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
17988 TCGv t0
= tcg_temp_new();
17989 TCGv t1
= tcg_temp_new();
17992 gen_load_gpr(t0
, rs
);
17996 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
17999 /* calculate btarget */
18000 tcg_gen_shli_tl(t0
, t0
, 1);
18001 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
18002 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
18004 /* unconditional branch to register */
18005 tcg_gen_mov_tl(cpu_PC
, btarget
);
18006 tcg_gen_lookup_and_goto_ptr();
18012 /* nanoMIPS Branches */
18013 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
18014 int rs
, int rt
, int32_t offset
)
18016 int bcond_compute
= 0;
18017 TCGv t0
= tcg_temp_new();
18018 TCGv t1
= tcg_temp_new();
18020 /* Load needed operands and calculate btarget */
18022 /* compact branch */
18025 gen_load_gpr(t0
, rs
);
18026 gen_load_gpr(t1
, rt
);
18028 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18032 if (rs
== 0 || rs
== rt
) {
18033 /* OPC_BLEZALC, OPC_BGEZALC */
18034 /* OPC_BGTZALC, OPC_BLTZALC */
18035 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
18037 gen_load_gpr(t0
, rs
);
18038 gen_load_gpr(t1
, rt
);
18040 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18043 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18047 /* OPC_BEQZC, OPC_BNEZC */
18048 gen_load_gpr(t0
, rs
);
18050 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18052 /* OPC_JIC, OPC_JIALC */
18053 TCGv tbase
= tcg_temp_new();
18054 TCGv toffset
= tcg_temp_new();
18056 gen_load_gpr(tbase
, rt
);
18057 tcg_gen_movi_tl(toffset
, offset
);
18058 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
18059 tcg_temp_free(tbase
);
18060 tcg_temp_free(toffset
);
18064 MIPS_INVAL("Compact branch/jump");
18065 generate_exception_end(ctx
, EXCP_RI
);
18069 if (bcond_compute
== 0) {
18070 /* Uncoditional compact branch */
18073 gen_goto_tb(ctx
, 0, ctx
->btarget
);
18076 MIPS_INVAL("Compact branch/jump");
18077 generate_exception_end(ctx
, EXCP_RI
);
18081 /* Conditional compact branch */
18082 TCGLabel
*fs
= gen_new_label();
18086 if (rs
== 0 && rt
!= 0) {
18088 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
18089 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18091 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
18094 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
18098 if (rs
== 0 && rt
!= 0) {
18100 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
18101 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18103 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
18106 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
18110 if (rs
== 0 && rt
!= 0) {
18112 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
18113 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18115 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
18118 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
18122 if (rs
== 0 && rt
!= 0) {
18124 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
18125 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
18127 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
18130 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
18134 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
18137 MIPS_INVAL("Compact conditional branch/jump");
18138 generate_exception_end(ctx
, EXCP_RI
);
18142 /* Generating branch here as compact branches don't have delay slot */
18143 gen_goto_tb(ctx
, 1, ctx
->btarget
);
18146 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
18155 /* nanoMIPS CP1 Branches */
18156 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
18157 int32_t ft
, int32_t offset
)
18159 target_ulong btarget
;
18160 TCGv_i64 t0
= tcg_temp_new_i64();
18162 gen_load_fpr64(ctx
, t0
, ft
);
18163 tcg_gen_andi_i64(t0
, t0
, 1);
18165 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
18169 tcg_gen_xori_i64(t0
, t0
, 1);
18170 ctx
->hflags
|= MIPS_HFLAG_BC
;
18173 /* t0 already set */
18174 ctx
->hflags
|= MIPS_HFLAG_BC
;
18177 MIPS_INVAL("cp1 cond branch");
18178 generate_exception_end(ctx
, EXCP_RI
);
18182 tcg_gen_trunc_i64_tl(bcond
, t0
);
18184 ctx
->btarget
= btarget
;
18187 tcg_temp_free_i64(t0
);
18191 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
18194 t0
= tcg_temp_new();
18195 t1
= tcg_temp_new();
18197 gen_load_gpr(t0
, rs
);
18198 gen_load_gpr(t1
, rt
);
18200 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
18201 /* PP.LSXS instructions require shifting */
18202 switch (extract32(ctx
->opcode
, 7, 4)) {
18206 tcg_gen_shli_tl(t0
, t0
, 1);
18212 tcg_gen_shli_tl(t0
, t0
, 2);
18216 tcg_gen_shli_tl(t0
, t0
, 3);
18220 gen_op_addr_add(ctx
, t0
, t0
, t1
);
18222 switch (extract32(ctx
->opcode
, 7, 4)) {
18224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18226 gen_store_gpr(t0
, rd
);
18230 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18232 gen_store_gpr(t0
, rd
);
18236 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18238 gen_store_gpr(t0
, rd
);
18241 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18243 gen_store_gpr(t0
, rd
);
18247 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
18249 gen_store_gpr(t0
, rd
);
18252 gen_load_gpr(t1
, rd
);
18253 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18258 gen_load_gpr(t1
, rd
);
18259 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18264 gen_load_gpr(t1
, rd
);
18265 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
18269 /*case NM_LWC1XS:*/
18271 /*case NM_LDC1XS:*/
18273 /*case NM_SWC1XS:*/
18275 /*case NM_SDC1XS:*/
18276 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
18277 check_cp1_enabled(ctx
);
18278 switch (extract32(ctx
->opcode
, 7, 4)) {
18280 /*case NM_LWC1XS:*/
18281 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
18284 /*case NM_LDC1XS:*/
18285 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
18288 /*case NM_SWC1XS:*/
18289 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
18292 /*case NM_SDC1XS:*/
18293 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
18297 generate_exception_err(ctx
, EXCP_CpU
, 1);
18301 generate_exception_end(ctx
, EXCP_RI
);
18309 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
18313 rt
= extract32(ctx
->opcode
, 21, 5);
18314 rs
= extract32(ctx
->opcode
, 16, 5);
18315 rd
= extract32(ctx
->opcode
, 11, 5);
18317 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
18318 generate_exception_end(ctx
, EXCP_RI
);
18321 check_cp1_enabled(ctx
);
18322 switch (extract32(ctx
->opcode
, 0, 3)) {
18324 switch (extract32(ctx
->opcode
, 3, 7)) {
18326 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
18329 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
18332 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
18335 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
18338 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
18341 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
18344 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
18347 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
18350 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
18353 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
18356 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
18359 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
18362 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
18365 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
18368 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
18371 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
18374 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
18377 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
18380 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
18383 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
18386 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
18389 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
18392 generate_exception_end(ctx
, EXCP_RI
);
18397 switch (extract32(ctx
->opcode
, 3, 3)) {
18399 switch (extract32(ctx
->opcode
, 9, 1)) {
18401 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
18404 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
18409 switch (extract32(ctx
->opcode
, 9, 1)) {
18411 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
18414 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
18419 switch (extract32(ctx
->opcode
, 9, 1)) {
18421 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
18424 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
18429 switch (extract32(ctx
->opcode
, 9, 1)) {
18431 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
18434 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
18439 switch (extract32(ctx
->opcode
, 6, 8)) {
18441 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
18444 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
18447 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
18450 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
18453 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
18456 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
18459 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
18462 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
18465 switch (extract32(ctx
->opcode
, 6, 9)) {
18467 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
18470 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
18473 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
18476 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
18479 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
18482 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
18485 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
18488 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
18491 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
18494 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
18497 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
18500 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
18503 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
18506 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
18509 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
18512 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
18515 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
18518 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
18521 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
18524 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
18527 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
18530 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
18533 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
18536 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
18539 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
18542 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
18545 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
18548 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
18551 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
18554 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
18557 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
18560 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
18563 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
18566 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
18569 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
18572 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
18575 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
18578 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
18581 generate_exception_end(ctx
, EXCP_RI
);
18590 switch (extract32(ctx
->opcode
, 3, 3)) {
18591 case NM_CMP_CONDN_S
:
18592 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
18594 case NM_CMP_CONDN_D
:
18595 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
18598 generate_exception_end(ctx
, EXCP_RI
);
18603 generate_exception_end(ctx
, EXCP_RI
);
18608 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
18609 int rd
, int rs
, int rt
)
18612 TCGv t0
= tcg_temp_new();
18613 TCGv v1_t
= tcg_temp_new();
18614 TCGv v2_t
= tcg_temp_new();
18616 gen_load_gpr(v1_t
, rs
);
18617 gen_load_gpr(v2_t
, rt
);
18622 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
18626 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
18630 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
18632 case NM_CMPU_EQ_QB
:
18634 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
18636 case NM_CMPU_LT_QB
:
18638 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
18640 case NM_CMPU_LE_QB
:
18642 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
18644 case NM_CMPGU_EQ_QB
:
18646 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
18647 gen_store_gpr(v1_t
, ret
);
18649 case NM_CMPGU_LT_QB
:
18651 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
18652 gen_store_gpr(v1_t
, ret
);
18654 case NM_CMPGU_LE_QB
:
18656 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
18657 gen_store_gpr(v1_t
, ret
);
18659 case NM_CMPGDU_EQ_QB
:
18661 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
18662 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18663 gen_store_gpr(v1_t
, ret
);
18665 case NM_CMPGDU_LT_QB
:
18667 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
18668 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18669 gen_store_gpr(v1_t
, ret
);
18671 case NM_CMPGDU_LE_QB
:
18673 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
18674 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
18675 gen_store_gpr(v1_t
, ret
);
18679 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
18680 gen_store_gpr(v1_t
, ret
);
18684 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18685 gen_store_gpr(v1_t
, ret
);
18689 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18690 gen_store_gpr(v1_t
, ret
);
18694 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18695 gen_store_gpr(v1_t
, ret
);
18699 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18700 gen_store_gpr(v1_t
, ret
);
18704 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
18705 gen_store_gpr(v1_t
, ret
);
18709 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
18710 gen_store_gpr(v1_t
, ret
);
18714 switch (extract32(ctx
->opcode
, 10, 1)) {
18717 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18718 gen_store_gpr(v1_t
, ret
);
18722 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18723 gen_store_gpr(v1_t
, ret
);
18727 case NM_ADDQH_R_PH
:
18729 switch (extract32(ctx
->opcode
, 10, 1)) {
18732 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
18733 gen_store_gpr(v1_t
, ret
);
18737 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
18738 gen_store_gpr(v1_t
, ret
);
18744 switch (extract32(ctx
->opcode
, 10, 1)) {
18747 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
18748 gen_store_gpr(v1_t
, ret
);
18752 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
18753 gen_store_gpr(v1_t
, ret
);
18759 switch (extract32(ctx
->opcode
, 10, 1)) {
18762 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18763 gen_store_gpr(v1_t
, ret
);
18767 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18768 gen_store_gpr(v1_t
, ret
);
18774 switch (extract32(ctx
->opcode
, 10, 1)) {
18777 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18778 gen_store_gpr(v1_t
, ret
);
18782 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18783 gen_store_gpr(v1_t
, ret
);
18787 case NM_ADDUH_R_QB
:
18789 switch (extract32(ctx
->opcode
, 10, 1)) {
18792 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
18793 gen_store_gpr(v1_t
, ret
);
18797 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
18798 gen_store_gpr(v1_t
, ret
);
18802 case NM_SHRAV_R_PH
:
18804 switch (extract32(ctx
->opcode
, 10, 1)) {
18807 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
18808 gen_store_gpr(v1_t
, ret
);
18812 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
18813 gen_store_gpr(v1_t
, ret
);
18817 case NM_SHRAV_R_QB
:
18819 switch (extract32(ctx
->opcode
, 10, 1)) {
18822 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
18823 gen_store_gpr(v1_t
, ret
);
18827 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
18828 gen_store_gpr(v1_t
, ret
);
18834 switch (extract32(ctx
->opcode
, 10, 1)) {
18837 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18838 gen_store_gpr(v1_t
, ret
);
18842 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18843 gen_store_gpr(v1_t
, ret
);
18847 case NM_SUBQH_R_PH
:
18849 switch (extract32(ctx
->opcode
, 10, 1)) {
18852 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
18853 gen_store_gpr(v1_t
, ret
);
18857 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
18858 gen_store_gpr(v1_t
, ret
);
18864 switch (extract32(ctx
->opcode
, 10, 1)) {
18867 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
18868 gen_store_gpr(v1_t
, ret
);
18872 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
18873 gen_store_gpr(v1_t
, ret
);
18879 switch (extract32(ctx
->opcode
, 10, 1)) {
18882 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18883 gen_store_gpr(v1_t
, ret
);
18887 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
18888 gen_store_gpr(v1_t
, ret
);
18894 switch (extract32(ctx
->opcode
, 10, 1)) {
18897 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18898 gen_store_gpr(v1_t
, ret
);
18902 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18903 gen_store_gpr(v1_t
, ret
);
18907 case NM_SUBUH_R_QB
:
18909 switch (extract32(ctx
->opcode
, 10, 1)) {
18912 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
18913 gen_store_gpr(v1_t
, ret
);
18917 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
18918 gen_store_gpr(v1_t
, ret
);
18922 case NM_SHLLV_S_PH
:
18924 switch (extract32(ctx
->opcode
, 10, 1)) {
18927 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18928 gen_store_gpr(v1_t
, ret
);
18932 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18933 gen_store_gpr(v1_t
, ret
);
18937 case NM_PRECR_SRA_R_PH_W
:
18939 switch (extract32(ctx
->opcode
, 10, 1)) {
18941 /* PRECR_SRA_PH_W */
18943 TCGv_i32 sa_t
= tcg_const_i32(rd
);
18944 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
18946 gen_store_gpr(v1_t
, rt
);
18947 tcg_temp_free_i32(sa_t
);
18951 /* PRECR_SRA_R_PH_W */
18953 TCGv_i32 sa_t
= tcg_const_i32(rd
);
18954 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
18956 gen_store_gpr(v1_t
, rt
);
18957 tcg_temp_free_i32(sa_t
);
18962 case NM_MULEU_S_PH_QBL
:
18964 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
18965 gen_store_gpr(v1_t
, ret
);
18967 case NM_MULEU_S_PH_QBR
:
18969 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
18970 gen_store_gpr(v1_t
, ret
);
18972 case NM_MULQ_RS_PH
:
18974 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18975 gen_store_gpr(v1_t
, ret
);
18979 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
18980 gen_store_gpr(v1_t
, ret
);
18984 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18985 gen_store_gpr(v1_t
, ret
);
18989 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
18990 gen_store_gpr(v1_t
, ret
);
18994 gen_load_gpr(t0
, rs
);
18996 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
18998 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19002 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
19003 gen_store_gpr(v1_t
, ret
);
19007 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
19008 gen_store_gpr(v1_t
, ret
);
19012 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
19013 gen_store_gpr(v1_t
, ret
);
19017 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
19018 gen_store_gpr(v1_t
, ret
);
19022 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
19023 gen_store_gpr(v1_t
, ret
);
19027 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19028 gen_store_gpr(v1_t
, ret
);
19033 TCGv tv0
= tcg_temp_new();
19034 TCGv tv1
= tcg_temp_new();
19035 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
19037 tcg_gen_movi_tl(tv0
, rd
>> 3);
19038 tcg_gen_movi_tl(tv1
, imm
);
19039 gen_helper_shilo(tv0
, tv1
, cpu_env
);
19042 case NM_MULEQ_S_W_PHL
:
19044 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
19045 gen_store_gpr(v1_t
, ret
);
19047 case NM_MULEQ_S_W_PHR
:
19049 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
19050 gen_store_gpr(v1_t
, ret
);
19054 switch (extract32(ctx
->opcode
, 10, 1)) {
19057 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19058 gen_store_gpr(v1_t
, ret
);
19062 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19063 gen_store_gpr(v1_t
, ret
);
19067 case NM_PRECR_QB_PH
:
19069 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
19070 gen_store_gpr(v1_t
, ret
);
19072 case NM_PRECRQ_QB_PH
:
19074 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
19075 gen_store_gpr(v1_t
, ret
);
19077 case NM_PRECRQ_PH_W
:
19079 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
19080 gen_store_gpr(v1_t
, ret
);
19082 case NM_PRECRQ_RS_PH_W
:
19084 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
19085 gen_store_gpr(v1_t
, ret
);
19087 case NM_PRECRQU_S_QB_PH
:
19089 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
19090 gen_store_gpr(v1_t
, ret
);
19094 tcg_gen_movi_tl(t0
, rd
);
19095 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
19096 gen_store_gpr(v1_t
, rt
);
19100 tcg_gen_movi_tl(t0
, rd
>> 1);
19101 switch (extract32(ctx
->opcode
, 10, 1)) {
19104 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
19106 gen_store_gpr(v1_t
, rt
);
19109 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
19110 gen_store_gpr(v1_t
, rt
);
19116 tcg_gen_movi_tl(t0
, rd
>> 1);
19117 switch (extract32(ctx
->opcode
, 10, 2)) {
19120 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
19121 gen_store_gpr(v1_t
, rt
);
19125 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
19126 gen_store_gpr(v1_t
, rt
);
19129 generate_exception_end(ctx
, EXCP_RI
);
19135 tcg_gen_movi_tl(t0
, rd
);
19136 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
19137 gen_store_gpr(v1_t
, rt
);
19143 imm
= sextract32(ctx
->opcode
, 11, 11);
19144 imm
= (int16_t)(imm
<< 6) >> 6;
19146 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
19151 generate_exception_end(ctx
, EXCP_RI
);
19156 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19164 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
19165 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
19167 rt
= extract32(ctx
->opcode
, 21, 5);
19168 rs
= extract32(ctx
->opcode
, 16, 5);
19169 rd
= extract32(ctx
->opcode
, 11, 5);
19171 op
= extract32(ctx
->opcode
, 26, 6);
19176 switch (extract32(ctx
->opcode
, 19, 2)) {
19179 generate_exception_end(ctx
, EXCP_RI
);
19182 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
19183 generate_exception_end(ctx
, EXCP_SYSCALL
);
19185 generate_exception_end(ctx
, EXCP_RI
);
19189 generate_exception_end(ctx
, EXCP_BREAK
);
19192 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
19193 gen_helper_do_semihosting(cpu_env
);
19195 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
19196 generate_exception_end(ctx
, EXCP_RI
);
19198 generate_exception_end(ctx
, EXCP_DBp
);
19205 imm
= extract32(ctx
->opcode
, 0, 16);
19207 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
19209 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
19211 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19216 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
19217 extract32(ctx
->opcode
, 1, 20) << 1;
19218 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19219 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19223 switch (ctx
->opcode
& 0x07) {
19225 gen_pool32a0_nanomips_insn(env
, ctx
);
19229 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
19230 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
19234 switch (extract32(ctx
->opcode
, 3, 3)) {
19236 gen_p_lsx(ctx
, rd
, rs
, rt
);
19239 /* In nanoMIPS, the shift field directly encodes the shift
19240 * amount, meaning that the supported shift values are in
19241 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
19242 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
19243 extract32(ctx
->opcode
, 9, 2) - 1);
19246 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
19249 gen_pool32axf_nanomips_insn(env
, ctx
);
19252 generate_exception_end(ctx
, EXCP_RI
);
19257 generate_exception_end(ctx
, EXCP_RI
);
19262 switch (ctx
->opcode
& 0x03) {
19265 offset
= extract32(ctx
->opcode
, 0, 21);
19266 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
19270 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
19273 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
19276 generate_exception_end(ctx
, EXCP_RI
);
19282 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
19283 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
19284 switch (extract32(ctx
->opcode
, 16, 5)) {
19287 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
19292 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
19293 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
19298 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
19303 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19306 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19312 t0
= tcg_temp_new();
19314 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19317 tcg_gen_movi_tl(t0
, addr
);
19318 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
19325 t0
= tcg_temp_new();
19326 t1
= tcg_temp_new();
19328 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
19331 tcg_gen_movi_tl(t0
, addr
);
19332 gen_load_gpr(t1
, rt
);
19334 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
19341 generate_exception_end(ctx
, EXCP_RI
);
19347 switch (extract32(ctx
->opcode
, 12, 4)) {
19349 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19352 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19355 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19358 switch (extract32(ctx
->opcode
, 20, 1)) {
19360 switch (ctx
->opcode
& 3) {
19362 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
19363 extract32(ctx
->opcode
, 2, 1),
19364 extract32(ctx
->opcode
, 3, 9) << 3);
19367 case NM_RESTORE_JRC
:
19368 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
19369 extract32(ctx
->opcode
, 2, 1),
19370 extract32(ctx
->opcode
, 3, 9) << 3);
19371 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
19372 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
19376 generate_exception_end(ctx
, EXCP_RI
);
19381 generate_exception_end(ctx
, EXCP_RI
);
19386 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19389 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
19393 TCGv t0
= tcg_temp_new();
19395 imm
= extract32(ctx
->opcode
, 0, 12);
19396 gen_load_gpr(t0
, rs
);
19397 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
19398 gen_store_gpr(t0
, rt
);
19404 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
19405 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
19409 int shift
= extract32(ctx
->opcode
, 0, 5);
19410 switch (extract32(ctx
->opcode
, 5, 4)) {
19412 if (rt
== 0 && shift
== 0) {
19414 } else if (rt
== 0 && shift
== 3) {
19415 /* EHB - treat as NOP */
19416 } else if (rt
== 0 && shift
== 5) {
19417 /* PAUSE - treat as NOP */
19418 } else if (rt
== 0 && shift
== 6) {
19420 gen_sync(extract32(ctx
->opcode
, 16, 5));
19423 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
19424 extract32(ctx
->opcode
, 0, 5));
19428 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
19429 extract32(ctx
->opcode
, 0, 5));
19432 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
19433 extract32(ctx
->opcode
, 0, 5));
19436 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
19437 extract32(ctx
->opcode
, 0, 5));
19444 TCGv t0
= tcg_temp_new();
19445 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
19446 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
19448 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
19450 gen_load_gpr(t0
, rs
);
19451 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
19454 tcg_temp_free_i32(shift
);
19455 tcg_temp_free_i32(shiftx
);
19456 tcg_temp_free_i32(stripe
);
19460 switch (((ctx
->opcode
>> 10) & 2) |
19461 (extract32(ctx
->opcode
, 5, 1))) {
19463 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
19464 extract32(ctx
->opcode
, 6, 5));
19467 generate_exception_end(ctx
, EXCP_RI
);
19472 switch (((ctx
->opcode
>> 10) & 2) |
19473 (extract32(ctx
->opcode
, 5, 1))) {
19475 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
19476 extract32(ctx
->opcode
, 6, 5));
19479 generate_exception_end(ctx
, EXCP_RI
);
19484 generate_exception_end(ctx
, EXCP_RI
);
19489 gen_pool32f_nanomips_insn(ctx
);
19494 switch (extract32(ctx
->opcode
, 1, 1)) {
19497 tcg_gen_movi_tl(cpu_gpr
[rt
],
19498 sextract32(ctx
->opcode
, 0, 1) << 31 |
19499 extract32(ctx
->opcode
, 2, 10) << 21 |
19500 extract32(ctx
->opcode
, 12, 9) << 12);
19505 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
19506 extract32(ctx
->opcode
, 2, 10) << 21 |
19507 extract32(ctx
->opcode
, 12, 9) << 12;
19509 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19510 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
19517 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
19519 switch (extract32(ctx
->opcode
, 18, 3)) {
19521 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
19524 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
19527 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
19531 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
19536 switch (ctx
->opcode
& 1) {
19538 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
19541 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
19547 switch (ctx
->opcode
& 1) {
19549 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
19552 generate_exception_end(ctx
, EXCP_RI
);
19558 switch (ctx
->opcode
& 0x3) {
19560 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
19563 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
19566 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
19569 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
19574 generate_exception_end(ctx
, EXCP_RI
);
19581 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
19583 switch (extract32(ctx
->opcode
, 12, 4)) {
19587 /* Break the TB to be able to sync copied instructions
19589 ctx
->base
.is_jmp
= DISAS_STOP
;
19592 /* Treat as NOP. */
19596 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
19599 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
19602 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
19605 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
19608 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
19611 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
19614 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
19617 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
19620 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
19623 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
19626 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
19629 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
19632 generate_exception_end(ctx
, EXCP_RI
);
19639 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
19640 extract32(ctx
->opcode
, 0, 8);
19642 switch (extract32(ctx
->opcode
, 8, 3)) {
19644 switch (extract32(ctx
->opcode
, 11, 4)) {
19646 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
19649 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
19652 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
19655 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
19658 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
19661 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
19664 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
19667 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
19670 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
19673 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
19676 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
19679 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
19684 /* Break the TB to be able to sync copied instructions
19686 ctx
->base
.is_jmp
= DISAS_STOP
;
19689 /* Treat as NOP. */
19693 generate_exception_end(ctx
, EXCP_RI
);
19698 switch (extract32(ctx
->opcode
, 11, 4)) {
19702 TCGv t0
= tcg_temp_new();
19703 TCGv t1
= tcg_temp_new();
19705 gen_base_offset_addr(ctx
, t0
, rs
, s
);
19707 switch (extract32(ctx
->opcode
, 11, 4)) {
19709 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
19711 gen_store_gpr(t0
, rt
);
19714 gen_load_gpr(t1
, rt
);
19715 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
19724 switch (ctx
->opcode
& 0x03) {
19726 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
19730 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
19735 switch (ctx
->opcode
& 0x03) {
19737 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
19741 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
19746 check_cp0_enabled(ctx
);
19747 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19748 gen_cache_operation(ctx
, rt
, rs
, s
);
19756 int count
= extract32(ctx
->opcode
, 12, 3);
19759 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
19760 extract32(ctx
->opcode
, 0, 8);
19761 TCGv va
= tcg_temp_new();
19762 TCGv t1
= tcg_temp_new();
19763 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
19764 NM_P_LS_UAWM
? MO_UNALN
: 0;
19766 count
= (count
== 0) ? 8 : count
;
19767 while (counter
!= count
) {
19768 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
19769 int this_offset
= offset
+ (counter
<< 2);
19771 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
19773 switch (extract32(ctx
->opcode
, 11, 1)) {
19775 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
19777 gen_store_gpr(t1
, this_rt
);
19778 if ((this_rt
== rs
) &&
19779 (counter
!= (count
- 1))) {
19780 /* UNPREDICTABLE */
19784 this_rt
= (rt
== 0) ? 0 : this_rt
;
19785 gen_load_gpr(t1
, this_rt
);
19786 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
19797 generate_exception_end(ctx
, EXCP_RI
);
19804 TCGv t0
= tcg_temp_new();
19805 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
19806 extract32(ctx
->opcode
, 1, 20) << 1;
19807 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
19808 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
19809 extract32(ctx
->opcode
, 21, 3));
19810 gen_load_gpr(t0
, rt
);
19811 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
19812 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
19818 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
19819 extract32(ctx
->opcode
, 1, 24) << 1;
19821 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
19823 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
19826 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
19831 switch (extract32(ctx
->opcode
, 12, 4)) {
19834 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
19837 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
19840 generate_exception_end(ctx
, EXCP_RI
);
19846 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
19847 extract32(ctx
->opcode
, 1, 13) << 1;
19848 switch (extract32(ctx
->opcode
, 14, 2)) {
19850 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
19853 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
19854 extract32(ctx
->opcode
, 1, 13) << 1;
19855 check_cp1_enabled(ctx
);
19856 switch (extract32(ctx
->opcode
, 16, 5)) {
19858 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
19861 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
19866 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
19867 extract32(ctx
->opcode
, 0, 1) << 13;
19869 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
19874 generate_exception_end(ctx
, EXCP_RI
);
19880 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
19882 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
19886 if (rs
== rt
|| rt
== 0) {
19887 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
19888 } else if (rs
== 0) {
19889 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
19891 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
19899 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
19900 extract32(ctx
->opcode
, 1, 13) << 1;
19901 switch (extract32(ctx
->opcode
, 14, 2)) {
19903 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
19906 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19908 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
19910 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
19914 if (rs
== 0 || rs
== rt
) {
19916 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
19918 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
19922 generate_exception_end(ctx
, EXCP_RI
);
19929 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
19930 extract32(ctx
->opcode
, 1, 10) << 1;
19931 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
19933 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
19938 generate_exception_end(ctx
, EXCP_RI
);
19944 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19947 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
19948 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
19949 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
19953 /* make sure instructions are on a halfword boundary */
19954 if (ctx
->base
.pc_next
& 0x1) {
19955 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
19956 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
19957 tcg_temp_free(tmp
);
19958 generate_exception_end(ctx
, EXCP_AdEL
);
19962 op
= extract32(ctx
->opcode
, 10, 6);
19965 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
19968 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
19969 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
19972 switch (extract32(ctx
->opcode
, 3, 2)) {
19973 case NM_P16_SYSCALL
:
19974 if (extract32(ctx
->opcode
, 2, 1) == 0) {
19975 generate_exception_end(ctx
, EXCP_SYSCALL
);
19977 generate_exception_end(ctx
, EXCP_RI
);
19981 generate_exception_end(ctx
, EXCP_BREAK
);
19984 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
19985 gen_helper_do_semihosting(cpu_env
);
19987 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
19988 generate_exception_end(ctx
, EXCP_RI
);
19990 generate_exception_end(ctx
, EXCP_DBp
);
19995 generate_exception_end(ctx
, EXCP_RI
);
20002 int shift
= extract32(ctx
->opcode
, 0, 3);
20004 shift
= (shift
== 0) ? 8 : shift
;
20006 switch (extract32(ctx
->opcode
, 3, 1)) {
20014 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
20018 switch (ctx
->opcode
& 1) {
20020 gen_pool16c_nanomips_insn(ctx
);
20023 gen_ldxs(ctx
, rt
, rs
, rd
);
20028 switch (extract32(ctx
->opcode
, 6, 1)) {
20030 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
20031 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
20034 generate_exception_end(ctx
, EXCP_RI
);
20039 switch (extract32(ctx
->opcode
, 3, 1)) {
20041 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
20042 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
20044 case NM_P_ADDIURS5
:
20045 rt
= extract32(ctx
->opcode
, 5, 5);
20047 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
20048 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
20049 (extract32(ctx
->opcode
, 0, 3));
20050 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
20056 switch (ctx
->opcode
& 0x1) {
20058 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
20061 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
20066 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20067 extract32(ctx
->opcode
, 5, 3);
20068 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20069 extract32(ctx
->opcode
, 0, 3);
20070 rt
= decode_gpr_gpr4(rt
);
20071 rs
= decode_gpr_gpr4(rs
);
20072 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
20073 (extract32(ctx
->opcode
, 3, 1))) {
20075 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
20078 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
20081 generate_exception_end(ctx
, EXCP_RI
);
20087 int imm
= extract32(ctx
->opcode
, 0, 7);
20088 imm
= (imm
== 0x7f ? -1 : imm
);
20090 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20096 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
20097 u
= (u
== 12) ? 0xff :
20098 (u
== 13) ? 0xffff : u
;
20099 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
20103 offset
= extract32(ctx
->opcode
, 0, 2);
20104 switch (extract32(ctx
->opcode
, 2, 2)) {
20106 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
20109 rt
= decode_gpr_gpr3_src_store(
20110 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20111 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
20114 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
20117 generate_exception_end(ctx
, EXCP_RI
);
20122 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
20123 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
20125 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
20128 rt
= decode_gpr_gpr3_src_store(
20129 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20130 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
20133 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
20136 generate_exception_end(ctx
, EXCP_RI
);
20141 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
20142 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
20145 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
20146 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
20147 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
20150 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20151 extract32(ctx
->opcode
, 5, 3);
20152 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20153 extract32(ctx
->opcode
, 0, 3);
20154 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
20155 (extract32(ctx
->opcode
, 8, 1) << 2);
20156 rt
= decode_gpr_gpr4(rt
);
20157 rs
= decode_gpr_gpr4(rs
);
20158 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
20161 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
20162 extract32(ctx
->opcode
, 5, 3);
20163 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
20164 extract32(ctx
->opcode
, 0, 3);
20165 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
20166 (extract32(ctx
->opcode
, 8, 1) << 2);
20167 rt
= decode_gpr_gpr4_zero(rt
);
20168 rs
= decode_gpr_gpr4(rs
);
20169 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
20172 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
20173 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
20176 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
20177 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
20178 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
20181 rt
= decode_gpr_gpr3_src_store(
20182 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20183 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
20184 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
20185 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
20188 rt
= decode_gpr_gpr3_src_store(
20189 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
20190 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
20191 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
20194 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
20195 (sextract32(ctx
->opcode
, 0, 1) << 10) |
20196 (extract32(ctx
->opcode
, 1, 9) << 1));
20199 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
20200 (sextract32(ctx
->opcode
, 0, 1) << 10) |
20201 (extract32(ctx
->opcode
, 1, 9) << 1));
20204 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
20205 (sextract32(ctx
->opcode
, 0, 1) << 7) |
20206 (extract32(ctx
->opcode
, 1, 6) << 1));
20209 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
20210 (sextract32(ctx
->opcode
, 0, 1) << 7) |
20211 (extract32(ctx
->opcode
, 1, 6) << 1));
20214 switch (ctx
->opcode
& 0xf) {
20217 switch (extract32(ctx
->opcode
, 4, 1)) {
20219 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
20220 extract32(ctx
->opcode
, 5, 5), 0, 0);
20223 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
20224 extract32(ctx
->opcode
, 5, 5), 31, 0);
20231 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
20232 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
20233 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
20234 extract32(ctx
->opcode
, 0, 4) << 1);
20241 int count
= extract32(ctx
->opcode
, 0, 4);
20242 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
20244 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
20245 switch (extract32(ctx
->opcode
, 8, 1)) {
20247 gen_save(ctx
, rt
, count
, 0, u
);
20249 case NM_RESTORE_JRC16
:
20250 gen_restore(ctx
, rt
, count
, 0, u
);
20251 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
20259 static const int gpr2reg1
[] = {4, 5, 6, 7};
20260 static const int gpr2reg2
[] = {5, 6, 7, 8};
20262 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
20263 extract32(ctx
->opcode
, 8, 1);
20264 int r1
= gpr2reg1
[rd2
];
20265 int r2
= gpr2reg2
[rd2
];
20266 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
20267 extract32(ctx
->opcode
, 0, 3);
20268 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
20269 extract32(ctx
->opcode
, 5, 3);
20270 TCGv t0
= tcg_temp_new();
20271 TCGv t1
= tcg_temp_new();
20272 if (op
== NM_MOVEP
) {
20275 rs
= decode_gpr_gpr4_zero(r3
);
20276 rt
= decode_gpr_gpr4_zero(r4
);
20278 rd
= decode_gpr_gpr4(r3
);
20279 re
= decode_gpr_gpr4(r4
);
20283 gen_load_gpr(t0
, rs
);
20284 gen_load_gpr(t1
, rt
);
20285 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
20286 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
20292 return decode_nanomips_32_48_opc(env
, ctx
);
20299 /* SmartMIPS extension to MIPS32 */
20301 #if defined(TARGET_MIPS64)
20303 /* MDMX extension to MIPS64 */
20307 /* MIPSDSP functions. */
20308 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
20309 int rd
, int base
, int offset
)
20314 t0
= tcg_temp_new();
20317 gen_load_gpr(t0
, offset
);
20318 } else if (offset
== 0) {
20319 gen_load_gpr(t0
, base
);
20321 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
20326 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
20327 gen_store_gpr(t0
, rd
);
20330 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
20331 gen_store_gpr(t0
, rd
);
20334 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
20335 gen_store_gpr(t0
, rd
);
20337 #if defined(TARGET_MIPS64)
20339 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
20340 gen_store_gpr(t0
, rd
);
20347 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
20348 int ret
, int v1
, int v2
)
20354 /* Treat as NOP. */
20358 v1_t
= tcg_temp_new();
20359 v2_t
= tcg_temp_new();
20361 gen_load_gpr(v1_t
, v1
);
20362 gen_load_gpr(v2_t
, v2
);
20365 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
20366 case OPC_MULT_G_2E
:
20370 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20372 case OPC_ADDUH_R_QB
:
20373 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20376 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20378 case OPC_ADDQH_R_PH
:
20379 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20382 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20384 case OPC_ADDQH_R_W
:
20385 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20388 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20390 case OPC_SUBUH_R_QB
:
20391 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20394 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20396 case OPC_SUBQH_R_PH
:
20397 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20400 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20402 case OPC_SUBQH_R_W
:
20403 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20407 case OPC_ABSQ_S_PH_DSP
:
20409 case OPC_ABSQ_S_QB
:
20411 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
20413 case OPC_ABSQ_S_PH
:
20415 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
20419 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
20421 case OPC_PRECEQ_W_PHL
:
20423 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
20424 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
20426 case OPC_PRECEQ_W_PHR
:
20428 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
20429 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
20430 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
20432 case OPC_PRECEQU_PH_QBL
:
20434 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
20436 case OPC_PRECEQU_PH_QBR
:
20438 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
20440 case OPC_PRECEQU_PH_QBLA
:
20442 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
20444 case OPC_PRECEQU_PH_QBRA
:
20446 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
20448 case OPC_PRECEU_PH_QBL
:
20450 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
20452 case OPC_PRECEU_PH_QBR
:
20454 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
20456 case OPC_PRECEU_PH_QBLA
:
20458 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
20460 case OPC_PRECEU_PH_QBRA
:
20462 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
20466 case OPC_ADDU_QB_DSP
:
20470 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20472 case OPC_ADDQ_S_PH
:
20474 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20478 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20482 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20484 case OPC_ADDU_S_QB
:
20486 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20490 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20492 case OPC_ADDU_S_PH
:
20494 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20498 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20500 case OPC_SUBQ_S_PH
:
20502 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20506 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20510 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20512 case OPC_SUBU_S_QB
:
20514 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20518 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20520 case OPC_SUBU_S_PH
:
20522 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20526 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20530 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20534 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
20536 case OPC_RADDU_W_QB
:
20538 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
20542 case OPC_CMPU_EQ_QB_DSP
:
20544 case OPC_PRECR_QB_PH
:
20546 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20548 case OPC_PRECRQ_QB_PH
:
20550 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20552 case OPC_PRECR_SRA_PH_W
:
20555 TCGv_i32 sa_t
= tcg_const_i32(v2
);
20556 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
20558 tcg_temp_free_i32(sa_t
);
20561 case OPC_PRECR_SRA_R_PH_W
:
20564 TCGv_i32 sa_t
= tcg_const_i32(v2
);
20565 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
20567 tcg_temp_free_i32(sa_t
);
20570 case OPC_PRECRQ_PH_W
:
20572 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20574 case OPC_PRECRQ_RS_PH_W
:
20576 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20578 case OPC_PRECRQU_S_QB_PH
:
20580 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20584 #ifdef TARGET_MIPS64
20585 case OPC_ABSQ_S_QH_DSP
:
20587 case OPC_PRECEQ_L_PWL
:
20589 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
20591 case OPC_PRECEQ_L_PWR
:
20593 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
20595 case OPC_PRECEQ_PW_QHL
:
20597 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
20599 case OPC_PRECEQ_PW_QHR
:
20601 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
20603 case OPC_PRECEQ_PW_QHLA
:
20605 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
20607 case OPC_PRECEQ_PW_QHRA
:
20609 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
20611 case OPC_PRECEQU_QH_OBL
:
20613 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
20615 case OPC_PRECEQU_QH_OBR
:
20617 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
20619 case OPC_PRECEQU_QH_OBLA
:
20621 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
20623 case OPC_PRECEQU_QH_OBRA
:
20625 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
20627 case OPC_PRECEU_QH_OBL
:
20629 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
20631 case OPC_PRECEU_QH_OBR
:
20633 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
20635 case OPC_PRECEU_QH_OBLA
:
20637 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
20639 case OPC_PRECEU_QH_OBRA
:
20641 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
20643 case OPC_ABSQ_S_OB
:
20645 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
20647 case OPC_ABSQ_S_PW
:
20649 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
20651 case OPC_ABSQ_S_QH
:
20653 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
20657 case OPC_ADDU_OB_DSP
:
20659 case OPC_RADDU_L_OB
:
20661 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
20665 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20667 case OPC_SUBQ_S_PW
:
20669 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20673 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20675 case OPC_SUBQ_S_QH
:
20677 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20681 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20683 case OPC_SUBU_S_OB
:
20685 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20689 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20691 case OPC_SUBU_S_QH
:
20693 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20697 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20699 case OPC_SUBUH_R_OB
:
20701 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20705 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20707 case OPC_ADDQ_S_PW
:
20709 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20713 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20715 case OPC_ADDQ_S_QH
:
20717 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20721 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20723 case OPC_ADDU_S_OB
:
20725 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20729 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20731 case OPC_ADDU_S_QH
:
20733 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20737 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20739 case OPC_ADDUH_R_OB
:
20741 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
20745 case OPC_CMPU_EQ_OB_DSP
:
20747 case OPC_PRECR_OB_QH
:
20749 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
20751 case OPC_PRECR_SRA_QH_PW
:
20754 TCGv_i32 ret_t
= tcg_const_i32(ret
);
20755 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
20756 tcg_temp_free_i32(ret_t
);
20759 case OPC_PRECR_SRA_R_QH_PW
:
20762 TCGv_i32 sa_v
= tcg_const_i32(ret
);
20763 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
20764 tcg_temp_free_i32(sa_v
);
20767 case OPC_PRECRQ_OB_QH
:
20769 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
20771 case OPC_PRECRQ_PW_L
:
20773 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
20775 case OPC_PRECRQ_QH_PW
:
20777 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
20779 case OPC_PRECRQ_RS_QH_PW
:
20781 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20783 case OPC_PRECRQU_S_OB_QH
:
20785 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20792 tcg_temp_free(v1_t
);
20793 tcg_temp_free(v2_t
);
20796 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
20797 int ret
, int v1
, int v2
)
20805 /* Treat as NOP. */
20809 t0
= tcg_temp_new();
20810 v1_t
= tcg_temp_new();
20811 v2_t
= tcg_temp_new();
20813 tcg_gen_movi_tl(t0
, v1
);
20814 gen_load_gpr(v1_t
, v1
);
20815 gen_load_gpr(v2_t
, v2
);
20818 case OPC_SHLL_QB_DSP
:
20820 op2
= MASK_SHLL_QB(ctx
->opcode
);
20824 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
20828 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20832 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
20836 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20838 case OPC_SHLL_S_PH
:
20840 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
20842 case OPC_SHLLV_S_PH
:
20844 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20848 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
20850 case OPC_SHLLV_S_W
:
20852 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
20856 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
20860 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20864 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
20868 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20872 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
20874 case OPC_SHRA_R_QB
:
20876 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
20880 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20882 case OPC_SHRAV_R_QB
:
20884 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
20888 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
20890 case OPC_SHRA_R_PH
:
20892 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
20896 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20898 case OPC_SHRAV_R_PH
:
20900 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
20904 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
20906 case OPC_SHRAV_R_W
:
20908 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
20910 default: /* Invalid */
20911 MIPS_INVAL("MASK SHLL.QB");
20912 generate_exception_end(ctx
, EXCP_RI
);
20917 #ifdef TARGET_MIPS64
20918 case OPC_SHLL_OB_DSP
:
20919 op2
= MASK_SHLL_OB(ctx
->opcode
);
20923 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
20927 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
20929 case OPC_SHLL_S_PW
:
20931 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
20933 case OPC_SHLLV_S_PW
:
20935 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
20939 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
20943 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
20947 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
20951 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
20953 case OPC_SHLL_S_QH
:
20955 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
20957 case OPC_SHLLV_S_QH
:
20959 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
20963 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
20967 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
20969 case OPC_SHRA_R_OB
:
20971 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
20973 case OPC_SHRAV_R_OB
:
20975 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
20979 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
20983 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
20985 case OPC_SHRA_R_PW
:
20987 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
20989 case OPC_SHRAV_R_PW
:
20991 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
20995 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
20999 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21001 case OPC_SHRA_R_QH
:
21003 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
21005 case OPC_SHRAV_R_QH
:
21007 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21011 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
21015 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
21019 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
21023 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
21025 default: /* Invalid */
21026 MIPS_INVAL("MASK SHLL.OB");
21027 generate_exception_end(ctx
, EXCP_RI
);
21035 tcg_temp_free(v1_t
);
21036 tcg_temp_free(v2_t
);
21039 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21040 int ret
, int v1
, int v2
, int check_ret
)
21046 if ((ret
== 0) && (check_ret
== 1)) {
21047 /* Treat as NOP. */
21051 t0
= tcg_temp_new_i32();
21052 v1_t
= tcg_temp_new();
21053 v2_t
= tcg_temp_new();
21055 tcg_gen_movi_i32(t0
, ret
);
21056 gen_load_gpr(v1_t
, v1
);
21057 gen_load_gpr(v2_t
, v2
);
21060 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
21061 * the same mask and op1. */
21062 case OPC_MULT_G_2E
:
21066 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21069 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21072 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21074 case OPC_MULQ_RS_W
:
21075 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21079 case OPC_DPA_W_PH_DSP
:
21081 case OPC_DPAU_H_QBL
:
21083 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
21085 case OPC_DPAU_H_QBR
:
21087 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
21089 case OPC_DPSU_H_QBL
:
21091 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
21093 case OPC_DPSU_H_QBR
:
21095 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
21099 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21101 case OPC_DPAX_W_PH
:
21103 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21105 case OPC_DPAQ_S_W_PH
:
21107 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21109 case OPC_DPAQX_S_W_PH
:
21111 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21113 case OPC_DPAQX_SA_W_PH
:
21115 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21119 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21121 case OPC_DPSX_W_PH
:
21123 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21125 case OPC_DPSQ_S_W_PH
:
21127 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21129 case OPC_DPSQX_S_W_PH
:
21131 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21133 case OPC_DPSQX_SA_W_PH
:
21135 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21137 case OPC_MULSAQ_S_W_PH
:
21139 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21141 case OPC_DPAQ_SA_L_W
:
21143 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
21145 case OPC_DPSQ_SA_L_W
:
21147 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
21149 case OPC_MAQ_S_W_PHL
:
21151 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
21153 case OPC_MAQ_S_W_PHR
:
21155 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
21157 case OPC_MAQ_SA_W_PHL
:
21159 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
21161 case OPC_MAQ_SA_W_PHR
:
21163 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
21165 case OPC_MULSA_W_PH
:
21167 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
21171 #ifdef TARGET_MIPS64
21172 case OPC_DPAQ_W_QH_DSP
:
21174 int ac
= ret
& 0x03;
21175 tcg_gen_movi_i32(t0
, ac
);
21180 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
21184 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
21188 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
21192 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
21196 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21198 case OPC_DPAQ_S_W_QH
:
21200 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21202 case OPC_DPAQ_SA_L_PW
:
21204 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21206 case OPC_DPAU_H_OBL
:
21208 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
21210 case OPC_DPAU_H_OBR
:
21212 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
21216 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21218 case OPC_DPSQ_S_W_QH
:
21220 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21222 case OPC_DPSQ_SA_L_PW
:
21224 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21226 case OPC_DPSU_H_OBL
:
21228 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
21230 case OPC_DPSU_H_OBR
:
21232 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
21234 case OPC_MAQ_S_L_PWL
:
21236 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
21238 case OPC_MAQ_S_L_PWR
:
21240 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
21242 case OPC_MAQ_S_W_QHLL
:
21244 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
21246 case OPC_MAQ_SA_W_QHLL
:
21248 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
21250 case OPC_MAQ_S_W_QHLR
:
21252 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
21254 case OPC_MAQ_SA_W_QHLR
:
21256 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
21258 case OPC_MAQ_S_W_QHRL
:
21260 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
21262 case OPC_MAQ_SA_W_QHRL
:
21264 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
21266 case OPC_MAQ_S_W_QHRR
:
21268 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
21270 case OPC_MAQ_SA_W_QHRR
:
21272 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
21274 case OPC_MULSAQ_S_L_PW
:
21276 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
21278 case OPC_MULSAQ_S_W_QH
:
21280 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
21286 case OPC_ADDU_QB_DSP
:
21288 case OPC_MULEU_S_PH_QBL
:
21290 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21292 case OPC_MULEU_S_PH_QBR
:
21294 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21296 case OPC_MULQ_RS_PH
:
21298 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21300 case OPC_MULEQ_S_W_PHL
:
21302 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21304 case OPC_MULEQ_S_W_PHR
:
21306 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21308 case OPC_MULQ_S_PH
:
21310 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21314 #ifdef TARGET_MIPS64
21315 case OPC_ADDU_OB_DSP
:
21317 case OPC_MULEQ_S_PW_QHL
:
21319 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21321 case OPC_MULEQ_S_PW_QHR
:
21323 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21325 case OPC_MULEU_S_QH_OBL
:
21327 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21329 case OPC_MULEU_S_QH_OBR
:
21331 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21333 case OPC_MULQ_RS_QH
:
21335 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21342 tcg_temp_free_i32(t0
);
21343 tcg_temp_free(v1_t
);
21344 tcg_temp_free(v2_t
);
21347 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21355 /* Treat as NOP. */
21359 t0
= tcg_temp_new();
21360 val_t
= tcg_temp_new();
21361 gen_load_gpr(val_t
, val
);
21364 case OPC_ABSQ_S_PH_DSP
:
21368 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
21373 target_long result
;
21374 imm
= (ctx
->opcode
>> 16) & 0xFF;
21375 result
= (uint32_t)imm
<< 24 |
21376 (uint32_t)imm
<< 16 |
21377 (uint32_t)imm
<< 8 |
21379 result
= (int32_t)result
;
21380 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
21385 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
21386 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
21387 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21388 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21389 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21390 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21395 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21396 imm
= (int16_t)(imm
<< 6) >> 6;
21397 tcg_gen_movi_tl(cpu_gpr
[ret
], \
21398 (target_long
)((int32_t)imm
<< 16 | \
21404 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
21405 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21406 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21407 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
21411 #ifdef TARGET_MIPS64
21412 case OPC_ABSQ_S_QH_DSP
:
21419 imm
= (ctx
->opcode
>> 16) & 0xFF;
21420 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
21421 temp
= (temp
<< 16) | temp
;
21422 temp
= (temp
<< 32) | temp
;
21423 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21431 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21432 imm
= (int16_t)(imm
<< 6) >> 6;
21433 temp
= ((target_long
)imm
<< 32) \
21434 | ((target_long
)imm
& 0xFFFFFFFF);
21435 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21443 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21444 imm
= (int16_t)(imm
<< 6) >> 6;
21446 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
21447 ((uint64_t)(uint16_t)imm
<< 32) |
21448 ((uint64_t)(uint16_t)imm
<< 16) |
21449 (uint64_t)(uint16_t)imm
;
21450 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
21455 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
21456 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
21457 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21458 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21459 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21460 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21461 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21465 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
21466 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21467 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21471 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
21472 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
21473 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21474 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
21475 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
21482 tcg_temp_free(val_t
);
21485 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
21486 uint32_t op1
, uint32_t op2
,
21487 int ret
, int v1
, int v2
, int check_ret
)
21493 if ((ret
== 0) && (check_ret
== 1)) {
21494 /* Treat as NOP. */
21498 t1
= tcg_temp_new();
21499 v1_t
= tcg_temp_new();
21500 v2_t
= tcg_temp_new();
21502 gen_load_gpr(v1_t
, v1
);
21503 gen_load_gpr(v2_t
, v2
);
21506 case OPC_CMPU_EQ_QB_DSP
:
21508 case OPC_CMPU_EQ_QB
:
21510 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
21512 case OPC_CMPU_LT_QB
:
21514 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
21516 case OPC_CMPU_LE_QB
:
21518 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
21520 case OPC_CMPGU_EQ_QB
:
21522 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21524 case OPC_CMPGU_LT_QB
:
21526 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21528 case OPC_CMPGU_LE_QB
:
21530 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
21532 case OPC_CMPGDU_EQ_QB
:
21534 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
21535 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21536 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21537 tcg_gen_shli_tl(t1
, t1
, 24);
21538 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21540 case OPC_CMPGDU_LT_QB
:
21542 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
21543 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21544 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21545 tcg_gen_shli_tl(t1
, t1
, 24);
21546 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21548 case OPC_CMPGDU_LE_QB
:
21550 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
21551 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
21552 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
21553 tcg_gen_shli_tl(t1
, t1
, 24);
21554 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
21556 case OPC_CMP_EQ_PH
:
21558 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
21560 case OPC_CMP_LT_PH
:
21562 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
21564 case OPC_CMP_LE_PH
:
21566 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
21570 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21574 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21576 case OPC_PACKRL_PH
:
21578 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
21582 #ifdef TARGET_MIPS64
21583 case OPC_CMPU_EQ_OB_DSP
:
21585 case OPC_CMP_EQ_PW
:
21587 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
21589 case OPC_CMP_LT_PW
:
21591 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
21593 case OPC_CMP_LE_PW
:
21595 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
21597 case OPC_CMP_EQ_QH
:
21599 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
21601 case OPC_CMP_LT_QH
:
21603 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
21605 case OPC_CMP_LE_QH
:
21607 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
21609 case OPC_CMPGDU_EQ_OB
:
21611 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21613 case OPC_CMPGDU_LT_OB
:
21615 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21617 case OPC_CMPGDU_LE_OB
:
21619 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21621 case OPC_CMPGU_EQ_OB
:
21623 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21625 case OPC_CMPGU_LT_OB
:
21627 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21629 case OPC_CMPGU_LE_OB
:
21631 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
21633 case OPC_CMPU_EQ_OB
:
21635 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
21637 case OPC_CMPU_LT_OB
:
21639 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
21641 case OPC_CMPU_LE_OB
:
21643 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
21645 case OPC_PACKRL_PW
:
21647 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
21651 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21655 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21659 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
21667 tcg_temp_free(v1_t
);
21668 tcg_temp_free(v2_t
);
21671 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
21672 uint32_t op1
, int rt
, int rs
, int sa
)
21679 /* Treat as NOP. */
21683 t0
= tcg_temp_new();
21684 gen_load_gpr(t0
, rs
);
21687 case OPC_APPEND_DSP
:
21688 switch (MASK_APPEND(ctx
->opcode
)) {
21691 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
21693 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21697 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21698 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
21699 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
21700 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21702 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21706 if (sa
!= 0 && sa
!= 2) {
21707 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
21708 tcg_gen_ext32u_tl(t0
, t0
);
21709 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
21710 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21712 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21714 default: /* Invalid */
21715 MIPS_INVAL("MASK APPEND");
21716 generate_exception_end(ctx
, EXCP_RI
);
21720 #ifdef TARGET_MIPS64
21721 case OPC_DAPPEND_DSP
:
21722 switch (MASK_DAPPEND(ctx
->opcode
)) {
21725 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
21729 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
21730 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
21731 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
21735 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
21736 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
21737 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21742 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
21743 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
21744 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
21745 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
21748 default: /* Invalid */
21749 MIPS_INVAL("MASK DAPPEND");
21750 generate_exception_end(ctx
, EXCP_RI
);
21759 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
21760 int ret
, int v1
, int v2
, int check_ret
)
21769 if ((ret
== 0) && (check_ret
== 1)) {
21770 /* Treat as NOP. */
21774 t0
= tcg_temp_new();
21775 t1
= tcg_temp_new();
21776 v1_t
= tcg_temp_new();
21777 v2_t
= tcg_temp_new();
21779 gen_load_gpr(v1_t
, v1
);
21780 gen_load_gpr(v2_t
, v2
);
21783 case OPC_EXTR_W_DSP
:
21787 tcg_gen_movi_tl(t0
, v2
);
21788 tcg_gen_movi_tl(t1
, v1
);
21789 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21792 tcg_gen_movi_tl(t0
, v2
);
21793 tcg_gen_movi_tl(t1
, v1
);
21794 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21796 case OPC_EXTR_RS_W
:
21797 tcg_gen_movi_tl(t0
, v2
);
21798 tcg_gen_movi_tl(t1
, v1
);
21799 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21802 tcg_gen_movi_tl(t0
, v2
);
21803 tcg_gen_movi_tl(t1
, v1
);
21804 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21806 case OPC_EXTRV_S_H
:
21807 tcg_gen_movi_tl(t0
, v2
);
21808 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21811 tcg_gen_movi_tl(t0
, v2
);
21812 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21814 case OPC_EXTRV_R_W
:
21815 tcg_gen_movi_tl(t0
, v2
);
21816 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21818 case OPC_EXTRV_RS_W
:
21819 tcg_gen_movi_tl(t0
, v2
);
21820 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21823 tcg_gen_movi_tl(t0
, v2
);
21824 tcg_gen_movi_tl(t1
, v1
);
21825 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21828 tcg_gen_movi_tl(t0
, v2
);
21829 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21832 tcg_gen_movi_tl(t0
, v2
);
21833 tcg_gen_movi_tl(t1
, v1
);
21834 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21837 tcg_gen_movi_tl(t0
, v2
);
21838 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21841 imm
= (ctx
->opcode
>> 20) & 0x3F;
21842 tcg_gen_movi_tl(t0
, ret
);
21843 tcg_gen_movi_tl(t1
, imm
);
21844 gen_helper_shilo(t0
, t1
, cpu_env
);
21847 tcg_gen_movi_tl(t0
, ret
);
21848 gen_helper_shilo(t0
, v1_t
, cpu_env
);
21851 tcg_gen_movi_tl(t0
, ret
);
21852 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
21855 imm
= (ctx
->opcode
>> 11) & 0x3FF;
21856 tcg_gen_movi_tl(t0
, imm
);
21857 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
21860 imm
= (ctx
->opcode
>> 16) & 0x03FF;
21861 tcg_gen_movi_tl(t0
, imm
);
21862 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
21866 #ifdef TARGET_MIPS64
21867 case OPC_DEXTR_W_DSP
:
21871 tcg_gen_movi_tl(t0
, ret
);
21872 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
21876 int shift
= (ctx
->opcode
>> 19) & 0x7F;
21877 int ac
= (ctx
->opcode
>> 11) & 0x03;
21878 tcg_gen_movi_tl(t0
, shift
);
21879 tcg_gen_movi_tl(t1
, ac
);
21880 gen_helper_dshilo(t0
, t1
, cpu_env
);
21885 int ac
= (ctx
->opcode
>> 11) & 0x03;
21886 tcg_gen_movi_tl(t0
, ac
);
21887 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
21891 tcg_gen_movi_tl(t0
, v2
);
21892 tcg_gen_movi_tl(t1
, v1
);
21894 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21897 tcg_gen_movi_tl(t0
, v2
);
21898 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21901 tcg_gen_movi_tl(t0
, v2
);
21902 tcg_gen_movi_tl(t1
, v1
);
21903 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21906 tcg_gen_movi_tl(t0
, v2
);
21907 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21910 tcg_gen_movi_tl(t0
, v2
);
21911 tcg_gen_movi_tl(t1
, v1
);
21912 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21914 case OPC_DEXTR_R_L
:
21915 tcg_gen_movi_tl(t0
, v2
);
21916 tcg_gen_movi_tl(t1
, v1
);
21917 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21919 case OPC_DEXTR_RS_L
:
21920 tcg_gen_movi_tl(t0
, v2
);
21921 tcg_gen_movi_tl(t1
, v1
);
21922 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21925 tcg_gen_movi_tl(t0
, v2
);
21926 tcg_gen_movi_tl(t1
, v1
);
21927 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21929 case OPC_DEXTR_R_W
:
21930 tcg_gen_movi_tl(t0
, v2
);
21931 tcg_gen_movi_tl(t1
, v1
);
21932 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21934 case OPC_DEXTR_RS_W
:
21935 tcg_gen_movi_tl(t0
, v2
);
21936 tcg_gen_movi_tl(t1
, v1
);
21937 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21939 case OPC_DEXTR_S_H
:
21940 tcg_gen_movi_tl(t0
, v2
);
21941 tcg_gen_movi_tl(t1
, v1
);
21942 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21944 case OPC_DEXTRV_S_H
:
21945 tcg_gen_movi_tl(t0
, v2
);
21946 tcg_gen_movi_tl(t1
, v1
);
21947 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
21950 tcg_gen_movi_tl(t0
, v2
);
21951 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21953 case OPC_DEXTRV_R_L
:
21954 tcg_gen_movi_tl(t0
, v2
);
21955 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21957 case OPC_DEXTRV_RS_L
:
21958 tcg_gen_movi_tl(t0
, v2
);
21959 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21962 tcg_gen_movi_tl(t0
, v2
);
21963 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21965 case OPC_DEXTRV_R_W
:
21966 tcg_gen_movi_tl(t0
, v2
);
21967 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21969 case OPC_DEXTRV_RS_W
:
21970 tcg_gen_movi_tl(t0
, v2
);
21971 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
21980 tcg_temp_free(v1_t
);
21981 tcg_temp_free(v2_t
);
21984 /* End MIPSDSP functions. */
21986 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
21988 int rs
, rt
, rd
, sa
;
21991 rs
= (ctx
->opcode
>> 21) & 0x1f;
21992 rt
= (ctx
->opcode
>> 16) & 0x1f;
21993 rd
= (ctx
->opcode
>> 11) & 0x1f;
21994 sa
= (ctx
->opcode
>> 6) & 0x1f;
21996 op1
= MASK_SPECIAL(ctx
->opcode
);
21999 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
22005 op2
= MASK_R6_MULDIV(ctx
->opcode
);
22015 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
22018 MIPS_INVAL("special_r6 muldiv");
22019 generate_exception_end(ctx
, EXCP_RI
);
22025 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
22029 if (rt
== 0 && sa
== 1) {
22030 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22031 We need additionally to check other fields */
22032 gen_cl(ctx
, op1
, rd
, rs
);
22034 generate_exception_end(ctx
, EXCP_RI
);
22038 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
22039 gen_helper_do_semihosting(cpu_env
);
22041 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22042 generate_exception_end(ctx
, EXCP_RI
);
22044 generate_exception_end(ctx
, EXCP_DBp
);
22048 #if defined(TARGET_MIPS64)
22050 check_mips_64(ctx
);
22051 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
22055 if (rt
== 0 && sa
== 1) {
22056 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22057 We need additionally to check other fields */
22058 check_mips_64(ctx
);
22059 gen_cl(ctx
, op1
, rd
, rs
);
22061 generate_exception_end(ctx
, EXCP_RI
);
22069 op2
= MASK_R6_MULDIV(ctx
->opcode
);
22079 check_mips_64(ctx
);
22080 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
22083 MIPS_INVAL("special_r6 muldiv");
22084 generate_exception_end(ctx
, EXCP_RI
);
22089 default: /* Invalid */
22090 MIPS_INVAL("special_r6");
22091 generate_exception_end(ctx
, EXCP_RI
);
22096 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22098 int rs
, rt
, rd
, sa
;
22101 rs
= (ctx
->opcode
>> 21) & 0x1f;
22102 rt
= (ctx
->opcode
>> 16) & 0x1f;
22103 rd
= (ctx
->opcode
>> 11) & 0x1f;
22104 sa
= (ctx
->opcode
>> 6) & 0x1f;
22106 op1
= MASK_SPECIAL(ctx
->opcode
);
22108 case OPC_MOVN
: /* Conditional move */
22110 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
22111 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
22112 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
22114 case OPC_MFHI
: /* Move from HI/LO */
22116 gen_HILO(ctx
, op1
, rs
& 3, rd
);
22119 case OPC_MTLO
: /* Move to HI/LO */
22120 gen_HILO(ctx
, op1
, rd
& 3, rs
);
22123 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
22124 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
22125 check_cp1_enabled(ctx
);
22126 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
22127 (ctx
->opcode
>> 16) & 1);
22129 generate_exception_err(ctx
, EXCP_CpU
, 1);
22135 check_insn(ctx
, INSN_VR54XX
);
22136 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
22137 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
22139 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
22144 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
22146 #if defined(TARGET_MIPS64)
22151 check_insn(ctx
, ISA_MIPS3
);
22152 check_mips_64(ctx
);
22153 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
22157 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
22160 #ifdef MIPS_STRICT_STANDARD
22161 MIPS_INVAL("SPIM");
22162 generate_exception_end(ctx
, EXCP_RI
);
22164 /* Implemented as RI exception for now. */
22165 MIPS_INVAL("spim (unofficial)");
22166 generate_exception_end(ctx
, EXCP_RI
);
22169 default: /* Invalid */
22170 MIPS_INVAL("special_legacy");
22171 generate_exception_end(ctx
, EXCP_RI
);
22176 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
22178 int rs
, rt
, rd
, sa
;
22181 rs
= (ctx
->opcode
>> 21) & 0x1f;
22182 rt
= (ctx
->opcode
>> 16) & 0x1f;
22183 rd
= (ctx
->opcode
>> 11) & 0x1f;
22184 sa
= (ctx
->opcode
>> 6) & 0x1f;
22186 op1
= MASK_SPECIAL(ctx
->opcode
);
22188 case OPC_SLL
: /* Shift with immediate */
22189 if (sa
== 5 && rd
== 0 &&
22190 rs
== 0 && rt
== 0) { /* PAUSE */
22191 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
22192 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
22193 generate_exception_end(ctx
, EXCP_RI
);
22199 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22202 switch ((ctx
->opcode
>> 21) & 0x1f) {
22204 /* rotr is decoded as srl on non-R2 CPUs */
22205 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22210 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22213 generate_exception_end(ctx
, EXCP_RI
);
22221 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22223 case OPC_SLLV
: /* Shifts */
22225 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22228 switch ((ctx
->opcode
>> 6) & 0x1f) {
22230 /* rotrv is decoded as srlv on non-R2 CPUs */
22231 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22236 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22239 generate_exception_end(ctx
, EXCP_RI
);
22243 case OPC_SLT
: /* Set on less than */
22245 gen_slt(ctx
, op1
, rd
, rs
, rt
);
22247 case OPC_AND
: /* Logic*/
22251 gen_logic(ctx
, op1
, rd
, rs
, rt
);
22254 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
22256 case OPC_TGE
: /* Traps */
22262 check_insn(ctx
, ISA_MIPS2
);
22263 gen_trap(ctx
, op1
, rs
, rt
, -1);
22265 case OPC_LSA
: /* OPC_PMON */
22266 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
22267 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
22268 decode_opc_special_r6(env
, ctx
);
22270 /* Pmon entry point, also R4010 selsl */
22271 #ifdef MIPS_STRICT_STANDARD
22272 MIPS_INVAL("PMON / selsl");
22273 generate_exception_end(ctx
, EXCP_RI
);
22275 gen_helper_0e0i(pmon
, sa
);
22280 generate_exception_end(ctx
, EXCP_SYSCALL
);
22283 generate_exception_end(ctx
, EXCP_BREAK
);
22286 check_insn(ctx
, ISA_MIPS2
);
22287 gen_sync(extract32(ctx
->opcode
, 6, 5));
22290 #if defined(TARGET_MIPS64)
22291 /* MIPS64 specific opcodes */
22296 check_insn(ctx
, ISA_MIPS3
);
22297 check_mips_64(ctx
);
22298 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22301 switch ((ctx
->opcode
>> 21) & 0x1f) {
22303 /* drotr is decoded as dsrl on non-R2 CPUs */
22304 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22309 check_insn(ctx
, ISA_MIPS3
);
22310 check_mips_64(ctx
);
22311 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22314 generate_exception_end(ctx
, EXCP_RI
);
22319 switch ((ctx
->opcode
>> 21) & 0x1f) {
22321 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
22322 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22327 check_insn(ctx
, ISA_MIPS3
);
22328 check_mips_64(ctx
);
22329 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
22332 generate_exception_end(ctx
, EXCP_RI
);
22340 check_insn(ctx
, ISA_MIPS3
);
22341 check_mips_64(ctx
);
22342 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22346 check_insn(ctx
, ISA_MIPS3
);
22347 check_mips_64(ctx
);
22348 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22351 switch ((ctx
->opcode
>> 6) & 0x1f) {
22353 /* drotrv is decoded as dsrlv on non-R2 CPUs */
22354 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
22359 check_insn(ctx
, ISA_MIPS3
);
22360 check_mips_64(ctx
);
22361 gen_shift(ctx
, op1
, rd
, rs
, rt
);
22364 generate_exception_end(ctx
, EXCP_RI
);
22369 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
22370 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
22371 decode_opc_special_r6(env
, ctx
);
22376 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
22377 decode_opc_special_r6(env
, ctx
);
22379 decode_opc_special_legacy(env
, ctx
);
22384 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
22391 rs
= (ctx
->opcode
>> 21) & 0x1f;
22392 rt
= (ctx
->opcode
>> 16) & 0x1f;
22393 rd
= (ctx
->opcode
>> 11) & 0x1f;
22395 op1
= MASK_SPECIAL2(ctx
->opcode
);
22397 case OPC_MADD
: /* Multiply and add/sub */
22401 check_insn(ctx
, ISA_MIPS32
);
22402 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
22405 gen_arith(ctx
, op1
, rd
, rs
, rt
);
22408 case OPC_DIVU_G_2F
:
22409 case OPC_MULT_G_2F
:
22410 case OPC_MULTU_G_2F
:
22412 case OPC_MODU_G_2F
:
22413 check_insn(ctx
, INSN_LOONGSON2F
);
22414 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22418 check_insn(ctx
, ISA_MIPS32
);
22419 gen_cl(ctx
, op1
, rd
, rs
);
22422 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
22423 gen_helper_do_semihosting(cpu_env
);
22425 /* XXX: not clear which exception should be raised
22426 * when in debug mode...
22428 check_insn(ctx
, ISA_MIPS32
);
22429 generate_exception_end(ctx
, EXCP_DBp
);
22432 #if defined(TARGET_MIPS64)
22435 check_insn(ctx
, ISA_MIPS64
);
22436 check_mips_64(ctx
);
22437 gen_cl(ctx
, op1
, rd
, rs
);
22439 case OPC_DMULT_G_2F
:
22440 case OPC_DMULTU_G_2F
:
22441 case OPC_DDIV_G_2F
:
22442 case OPC_DDIVU_G_2F
:
22443 case OPC_DMOD_G_2F
:
22444 case OPC_DMODU_G_2F
:
22445 check_insn(ctx
, INSN_LOONGSON2F
);
22446 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22449 default: /* Invalid */
22450 MIPS_INVAL("special2_legacy");
22451 generate_exception_end(ctx
, EXCP_RI
);
22456 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
22458 int rs
, rt
, rd
, sa
;
22462 rs
= (ctx
->opcode
>> 21) & 0x1f;
22463 rt
= (ctx
->opcode
>> 16) & 0x1f;
22464 rd
= (ctx
->opcode
>> 11) & 0x1f;
22465 sa
= (ctx
->opcode
>> 6) & 0x1f;
22466 imm
= (int16_t)ctx
->opcode
>> 7;
22468 op1
= MASK_SPECIAL3(ctx
->opcode
);
22472 /* hint codes 24-31 are reserved and signal RI */
22473 generate_exception_end(ctx
, EXCP_RI
);
22475 /* Treat as NOP. */
22478 check_cp0_enabled(ctx
);
22479 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
22480 gen_cache_operation(ctx
, rt
, rs
, imm
);
22484 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
22487 gen_ld(ctx
, op1
, rt
, rs
, imm
);
22492 /* Treat as NOP. */
22495 op2
= MASK_BSHFL(ctx
->opcode
);
22498 case OPC_ALIGN_END
:
22499 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
22502 gen_bitswap(ctx
, op2
, rd
, rt
);
22507 #if defined(TARGET_MIPS64)
22509 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
22512 gen_ld(ctx
, op1
, rt
, rs
, imm
);
22515 check_mips_64(ctx
);
22518 /* Treat as NOP. */
22521 op2
= MASK_DBSHFL(ctx
->opcode
);
22524 case OPC_DALIGN_END
:
22525 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
22528 gen_bitswap(ctx
, op2
, rd
, rt
);
22535 default: /* Invalid */
22536 MIPS_INVAL("special3_r6");
22537 generate_exception_end(ctx
, EXCP_RI
);
22542 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
22547 rs
= (ctx
->opcode
>> 21) & 0x1f;
22548 rt
= (ctx
->opcode
>> 16) & 0x1f;
22549 rd
= (ctx
->opcode
>> 11) & 0x1f;
22551 op1
= MASK_SPECIAL3(ctx
->opcode
);
22554 case OPC_DIVU_G_2E
:
22556 case OPC_MODU_G_2E
:
22557 case OPC_MULT_G_2E
:
22558 case OPC_MULTU_G_2E
:
22559 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22560 * the same mask and op1. */
22561 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
22562 op2
= MASK_ADDUH_QB(ctx
->opcode
);
22565 case OPC_ADDUH_R_QB
:
22567 case OPC_ADDQH_R_PH
:
22569 case OPC_ADDQH_R_W
:
22571 case OPC_SUBUH_R_QB
:
22573 case OPC_SUBQH_R_PH
:
22575 case OPC_SUBQH_R_W
:
22576 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22581 case OPC_MULQ_RS_W
:
22582 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22585 MIPS_INVAL("MASK ADDUH.QB");
22586 generate_exception_end(ctx
, EXCP_RI
);
22589 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
22590 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22592 generate_exception_end(ctx
, EXCP_RI
);
22596 op2
= MASK_LX(ctx
->opcode
);
22598 #if defined(TARGET_MIPS64)
22604 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
22606 default: /* Invalid */
22607 MIPS_INVAL("MASK LX");
22608 generate_exception_end(ctx
, EXCP_RI
);
22612 case OPC_ABSQ_S_PH_DSP
:
22613 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
22615 case OPC_ABSQ_S_QB
:
22616 case OPC_ABSQ_S_PH
:
22618 case OPC_PRECEQ_W_PHL
:
22619 case OPC_PRECEQ_W_PHR
:
22620 case OPC_PRECEQU_PH_QBL
:
22621 case OPC_PRECEQU_PH_QBR
:
22622 case OPC_PRECEQU_PH_QBLA
:
22623 case OPC_PRECEQU_PH_QBRA
:
22624 case OPC_PRECEU_PH_QBL
:
22625 case OPC_PRECEU_PH_QBR
:
22626 case OPC_PRECEU_PH_QBLA
:
22627 case OPC_PRECEU_PH_QBRA
:
22628 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22635 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
22638 MIPS_INVAL("MASK ABSQ_S.PH");
22639 generate_exception_end(ctx
, EXCP_RI
);
22643 case OPC_ADDU_QB_DSP
:
22644 op2
= MASK_ADDU_QB(ctx
->opcode
);
22647 case OPC_ADDQ_S_PH
:
22650 case OPC_ADDU_S_QB
:
22652 case OPC_ADDU_S_PH
:
22654 case OPC_SUBQ_S_PH
:
22657 case OPC_SUBU_S_QB
:
22659 case OPC_SUBU_S_PH
:
22663 case OPC_RADDU_W_QB
:
22664 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22666 case OPC_MULEU_S_PH_QBL
:
22667 case OPC_MULEU_S_PH_QBR
:
22668 case OPC_MULQ_RS_PH
:
22669 case OPC_MULEQ_S_W_PHL
:
22670 case OPC_MULEQ_S_W_PHR
:
22671 case OPC_MULQ_S_PH
:
22672 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22674 default: /* Invalid */
22675 MIPS_INVAL("MASK ADDU.QB");
22676 generate_exception_end(ctx
, EXCP_RI
);
22681 case OPC_CMPU_EQ_QB_DSP
:
22682 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
22684 case OPC_PRECR_SRA_PH_W
:
22685 case OPC_PRECR_SRA_R_PH_W
:
22686 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
22688 case OPC_PRECR_QB_PH
:
22689 case OPC_PRECRQ_QB_PH
:
22690 case OPC_PRECRQ_PH_W
:
22691 case OPC_PRECRQ_RS_PH_W
:
22692 case OPC_PRECRQU_S_QB_PH
:
22693 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22695 case OPC_CMPU_EQ_QB
:
22696 case OPC_CMPU_LT_QB
:
22697 case OPC_CMPU_LE_QB
:
22698 case OPC_CMP_EQ_PH
:
22699 case OPC_CMP_LT_PH
:
22700 case OPC_CMP_LE_PH
:
22701 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22703 case OPC_CMPGU_EQ_QB
:
22704 case OPC_CMPGU_LT_QB
:
22705 case OPC_CMPGU_LE_QB
:
22706 case OPC_CMPGDU_EQ_QB
:
22707 case OPC_CMPGDU_LT_QB
:
22708 case OPC_CMPGDU_LE_QB
:
22711 case OPC_PACKRL_PH
:
22712 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22714 default: /* Invalid */
22715 MIPS_INVAL("MASK CMPU.EQ.QB");
22716 generate_exception_end(ctx
, EXCP_RI
);
22720 case OPC_SHLL_QB_DSP
:
22721 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
22723 case OPC_DPA_W_PH_DSP
:
22724 op2
= MASK_DPA_W_PH(ctx
->opcode
);
22726 case OPC_DPAU_H_QBL
:
22727 case OPC_DPAU_H_QBR
:
22728 case OPC_DPSU_H_QBL
:
22729 case OPC_DPSU_H_QBR
:
22731 case OPC_DPAX_W_PH
:
22732 case OPC_DPAQ_S_W_PH
:
22733 case OPC_DPAQX_S_W_PH
:
22734 case OPC_DPAQX_SA_W_PH
:
22736 case OPC_DPSX_W_PH
:
22737 case OPC_DPSQ_S_W_PH
:
22738 case OPC_DPSQX_S_W_PH
:
22739 case OPC_DPSQX_SA_W_PH
:
22740 case OPC_MULSAQ_S_W_PH
:
22741 case OPC_DPAQ_SA_L_W
:
22742 case OPC_DPSQ_SA_L_W
:
22743 case OPC_MAQ_S_W_PHL
:
22744 case OPC_MAQ_S_W_PHR
:
22745 case OPC_MAQ_SA_W_PHL
:
22746 case OPC_MAQ_SA_W_PHR
:
22747 case OPC_MULSA_W_PH
:
22748 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22750 default: /* Invalid */
22751 MIPS_INVAL("MASK DPAW.PH");
22752 generate_exception_end(ctx
, EXCP_RI
);
22757 op2
= MASK_INSV(ctx
->opcode
);
22768 t0
= tcg_temp_new();
22769 t1
= tcg_temp_new();
22771 gen_load_gpr(t0
, rt
);
22772 gen_load_gpr(t1
, rs
);
22774 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
22780 default: /* Invalid */
22781 MIPS_INVAL("MASK INSV");
22782 generate_exception_end(ctx
, EXCP_RI
);
22786 case OPC_APPEND_DSP
:
22787 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
22789 case OPC_EXTR_W_DSP
:
22790 op2
= MASK_EXTR_W(ctx
->opcode
);
22794 case OPC_EXTR_RS_W
:
22796 case OPC_EXTRV_S_H
:
22798 case OPC_EXTRV_R_W
:
22799 case OPC_EXTRV_RS_W
:
22804 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
22807 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22813 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22815 default: /* Invalid */
22816 MIPS_INVAL("MASK EXTR.W");
22817 generate_exception_end(ctx
, EXCP_RI
);
22821 #if defined(TARGET_MIPS64)
22822 case OPC_DDIV_G_2E
:
22823 case OPC_DDIVU_G_2E
:
22824 case OPC_DMULT_G_2E
:
22825 case OPC_DMULTU_G_2E
:
22826 case OPC_DMOD_G_2E
:
22827 case OPC_DMODU_G_2E
:
22828 check_insn(ctx
, INSN_LOONGSON2E
);
22829 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
22831 case OPC_ABSQ_S_QH_DSP
:
22832 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
22834 case OPC_PRECEQ_L_PWL
:
22835 case OPC_PRECEQ_L_PWR
:
22836 case OPC_PRECEQ_PW_QHL
:
22837 case OPC_PRECEQ_PW_QHR
:
22838 case OPC_PRECEQ_PW_QHLA
:
22839 case OPC_PRECEQ_PW_QHRA
:
22840 case OPC_PRECEQU_QH_OBL
:
22841 case OPC_PRECEQU_QH_OBR
:
22842 case OPC_PRECEQU_QH_OBLA
:
22843 case OPC_PRECEQU_QH_OBRA
:
22844 case OPC_PRECEU_QH_OBL
:
22845 case OPC_PRECEU_QH_OBR
:
22846 case OPC_PRECEU_QH_OBLA
:
22847 case OPC_PRECEU_QH_OBRA
:
22848 case OPC_ABSQ_S_OB
:
22849 case OPC_ABSQ_S_PW
:
22850 case OPC_ABSQ_S_QH
:
22851 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22859 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
22861 default: /* Invalid */
22862 MIPS_INVAL("MASK ABSQ_S.QH");
22863 generate_exception_end(ctx
, EXCP_RI
);
22867 case OPC_ADDU_OB_DSP
:
22868 op2
= MASK_ADDU_OB(ctx
->opcode
);
22870 case OPC_RADDU_L_OB
:
22872 case OPC_SUBQ_S_PW
:
22874 case OPC_SUBQ_S_QH
:
22876 case OPC_SUBU_S_OB
:
22878 case OPC_SUBU_S_QH
:
22880 case OPC_SUBUH_R_OB
:
22882 case OPC_ADDQ_S_PW
:
22884 case OPC_ADDQ_S_QH
:
22886 case OPC_ADDU_S_OB
:
22888 case OPC_ADDU_S_QH
:
22890 case OPC_ADDUH_R_OB
:
22891 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22893 case OPC_MULEQ_S_PW_QHL
:
22894 case OPC_MULEQ_S_PW_QHR
:
22895 case OPC_MULEU_S_QH_OBL
:
22896 case OPC_MULEU_S_QH_OBR
:
22897 case OPC_MULQ_RS_QH
:
22898 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22900 default: /* Invalid */
22901 MIPS_INVAL("MASK ADDU.OB");
22902 generate_exception_end(ctx
, EXCP_RI
);
22906 case OPC_CMPU_EQ_OB_DSP
:
22907 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
22909 case OPC_PRECR_SRA_QH_PW
:
22910 case OPC_PRECR_SRA_R_QH_PW
:
22911 /* Return value is rt. */
22912 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
22914 case OPC_PRECR_OB_QH
:
22915 case OPC_PRECRQ_OB_QH
:
22916 case OPC_PRECRQ_PW_L
:
22917 case OPC_PRECRQ_QH_PW
:
22918 case OPC_PRECRQ_RS_QH_PW
:
22919 case OPC_PRECRQU_S_OB_QH
:
22920 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
22922 case OPC_CMPU_EQ_OB
:
22923 case OPC_CMPU_LT_OB
:
22924 case OPC_CMPU_LE_OB
:
22925 case OPC_CMP_EQ_QH
:
22926 case OPC_CMP_LT_QH
:
22927 case OPC_CMP_LE_QH
:
22928 case OPC_CMP_EQ_PW
:
22929 case OPC_CMP_LT_PW
:
22930 case OPC_CMP_LE_PW
:
22931 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22933 case OPC_CMPGDU_EQ_OB
:
22934 case OPC_CMPGDU_LT_OB
:
22935 case OPC_CMPGDU_LE_OB
:
22936 case OPC_CMPGU_EQ_OB
:
22937 case OPC_CMPGU_LT_OB
:
22938 case OPC_CMPGU_LE_OB
:
22939 case OPC_PACKRL_PW
:
22943 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
22945 default: /* Invalid */
22946 MIPS_INVAL("MASK CMPU_EQ.OB");
22947 generate_exception_end(ctx
, EXCP_RI
);
22951 case OPC_DAPPEND_DSP
:
22952 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
22954 case OPC_DEXTR_W_DSP
:
22955 op2
= MASK_DEXTR_W(ctx
->opcode
);
22962 case OPC_DEXTR_R_L
:
22963 case OPC_DEXTR_RS_L
:
22965 case OPC_DEXTR_R_W
:
22966 case OPC_DEXTR_RS_W
:
22967 case OPC_DEXTR_S_H
:
22969 case OPC_DEXTRV_R_L
:
22970 case OPC_DEXTRV_RS_L
:
22971 case OPC_DEXTRV_S_H
:
22973 case OPC_DEXTRV_R_W
:
22974 case OPC_DEXTRV_RS_W
:
22975 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
22980 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
22982 default: /* Invalid */
22983 MIPS_INVAL("MASK EXTR.W");
22984 generate_exception_end(ctx
, EXCP_RI
);
22988 case OPC_DPAQ_W_QH_DSP
:
22989 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
22991 case OPC_DPAU_H_OBL
:
22992 case OPC_DPAU_H_OBR
:
22993 case OPC_DPSU_H_OBL
:
22994 case OPC_DPSU_H_OBR
:
22996 case OPC_DPAQ_S_W_QH
:
22998 case OPC_DPSQ_S_W_QH
:
22999 case OPC_MULSAQ_S_W_QH
:
23000 case OPC_DPAQ_SA_L_PW
:
23001 case OPC_DPSQ_SA_L_PW
:
23002 case OPC_MULSAQ_S_L_PW
:
23003 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23005 case OPC_MAQ_S_W_QHLL
:
23006 case OPC_MAQ_S_W_QHLR
:
23007 case OPC_MAQ_S_W_QHRL
:
23008 case OPC_MAQ_S_W_QHRR
:
23009 case OPC_MAQ_SA_W_QHLL
:
23010 case OPC_MAQ_SA_W_QHLR
:
23011 case OPC_MAQ_SA_W_QHRL
:
23012 case OPC_MAQ_SA_W_QHRR
:
23013 case OPC_MAQ_S_L_PWL
:
23014 case OPC_MAQ_S_L_PWR
:
23019 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
23021 default: /* Invalid */
23022 MIPS_INVAL("MASK DPAQ.W.QH");
23023 generate_exception_end(ctx
, EXCP_RI
);
23027 case OPC_DINSV_DSP
:
23028 op2
= MASK_INSV(ctx
->opcode
);
23039 t0
= tcg_temp_new();
23040 t1
= tcg_temp_new();
23042 gen_load_gpr(t0
, rt
);
23043 gen_load_gpr(t1
, rs
);
23045 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
23051 default: /* Invalid */
23052 MIPS_INVAL("MASK DINSV");
23053 generate_exception_end(ctx
, EXCP_RI
);
23057 case OPC_SHLL_OB_DSP
:
23058 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
23061 default: /* Invalid */
23062 MIPS_INVAL("special3_legacy");
23063 generate_exception_end(ctx
, EXCP_RI
);
23068 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
23070 int rs
, rt
, rd
, sa
;
23074 rs
= (ctx
->opcode
>> 21) & 0x1f;
23075 rt
= (ctx
->opcode
>> 16) & 0x1f;
23076 rd
= (ctx
->opcode
>> 11) & 0x1f;
23077 sa
= (ctx
->opcode
>> 6) & 0x1f;
23078 imm
= sextract32(ctx
->opcode
, 7, 9);
23080 op1
= MASK_SPECIAL3(ctx
->opcode
);
23083 * EVA loads and stores overlap Loongson 2E instructions decoded by
23084 * decode_opc_special3_legacy(), so be careful to allow their decoding when
23091 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23099 check_cp0_enabled(ctx
);
23100 gen_ld(ctx
, op1
, rt
, rs
, imm
);
23104 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
23109 check_cp0_enabled(ctx
);
23110 gen_st(ctx
, op1
, rt
, rs
, imm
);
23113 check_cp0_enabled(ctx
);
23114 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
23117 check_cp0_enabled(ctx
);
23118 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
23119 gen_cache_operation(ctx
, rt
, rs
, imm
);
23121 /* Treat as NOP. */
23124 check_cp0_enabled(ctx
);
23125 /* Treat as NOP. */
23133 check_insn(ctx
, ISA_MIPS32R2
);
23134 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
23137 op2
= MASK_BSHFL(ctx
->opcode
);
23140 case OPC_ALIGN_END
:
23142 check_insn(ctx
, ISA_MIPS32R6
);
23143 decode_opc_special3_r6(env
, ctx
);
23146 check_insn(ctx
, ISA_MIPS32R2
);
23147 gen_bshfl(ctx
, op2
, rt
, rd
);
23151 #if defined(TARGET_MIPS64)
23158 check_insn(ctx
, ISA_MIPS64R2
);
23159 check_mips_64(ctx
);
23160 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
23163 op2
= MASK_DBSHFL(ctx
->opcode
);
23166 case OPC_DALIGN_END
:
23168 check_insn(ctx
, ISA_MIPS32R6
);
23169 decode_opc_special3_r6(env
, ctx
);
23172 check_insn(ctx
, ISA_MIPS64R2
);
23173 check_mips_64(ctx
);
23174 op2
= MASK_DBSHFL(ctx
->opcode
);
23175 gen_bshfl(ctx
, op2
, rt
, rd
);
23181 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
23186 TCGv t0
= tcg_temp_new();
23187 TCGv t1
= tcg_temp_new();
23189 gen_load_gpr(t0
, rt
);
23190 gen_load_gpr(t1
, rs
);
23191 gen_helper_fork(t0
, t1
);
23199 TCGv t0
= tcg_temp_new();
23201 gen_load_gpr(t0
, rs
);
23202 gen_helper_yield(t0
, cpu_env
, t0
);
23203 gen_store_gpr(t0
, rd
);
23208 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
23209 decode_opc_special3_r6(env
, ctx
);
23211 decode_opc_special3_legacy(env
, ctx
);
23216 /* MIPS SIMD Architecture (MSA) */
23217 static inline int check_msa_access(DisasContext
*ctx
)
23219 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
23220 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
23221 generate_exception_end(ctx
, EXCP_RI
);
23225 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
23226 if (ctx
->insn_flags
& ASE_MSA
) {
23227 generate_exception_end(ctx
, EXCP_MSADIS
);
23230 generate_exception_end(ctx
, EXCP_RI
);
23237 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
23239 /* generates tcg ops to check if any element is 0 */
23240 /* Note this function only works with MSA_WRLEN = 128 */
23241 uint64_t eval_zero_or_big
= 0;
23242 uint64_t eval_big
= 0;
23243 TCGv_i64 t0
= tcg_temp_new_i64();
23244 TCGv_i64 t1
= tcg_temp_new_i64();
23247 eval_zero_or_big
= 0x0101010101010101ULL
;
23248 eval_big
= 0x8080808080808080ULL
;
23251 eval_zero_or_big
= 0x0001000100010001ULL
;
23252 eval_big
= 0x8000800080008000ULL
;
23255 eval_zero_or_big
= 0x0000000100000001ULL
;
23256 eval_big
= 0x8000000080000000ULL
;
23259 eval_zero_or_big
= 0x0000000000000001ULL
;
23260 eval_big
= 0x8000000000000000ULL
;
23263 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
23264 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
23265 tcg_gen_andi_i64(t0
, t0
, eval_big
);
23266 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
23267 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
23268 tcg_gen_andi_i64(t1
, t1
, eval_big
);
23269 tcg_gen_or_i64(t0
, t0
, t1
);
23270 /* if all bits are zero then all elements are not zero */
23271 /* if some bit is non-zero then some element is zero */
23272 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
23273 tcg_gen_trunc_i64_tl(tresult
, t0
);
23274 tcg_temp_free_i64(t0
);
23275 tcg_temp_free_i64(t1
);
23278 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
23280 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23281 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
23282 int64_t s16
= (int16_t)ctx
->opcode
;
23284 check_msa_access(ctx
);
23286 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
23287 generate_exception_end(ctx
, EXCP_RI
);
23294 TCGv_i64 t0
= tcg_temp_new_i64();
23295 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
23296 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
23297 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
23298 tcg_gen_trunc_i64_tl(bcond
, t0
);
23299 tcg_temp_free_i64(t0
);
23306 gen_check_zero_element(bcond
, df
, wt
);
23312 gen_check_zero_element(bcond
, df
, wt
);
23313 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
23317 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
23319 ctx
->hflags
|= MIPS_HFLAG_BC
;
23320 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
23323 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
23325 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
23326 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
23327 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23328 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23330 TCGv_i32 twd
= tcg_const_i32(wd
);
23331 TCGv_i32 tws
= tcg_const_i32(ws
);
23332 TCGv_i32 ti8
= tcg_const_i32(i8
);
23334 switch (MASK_MSA_I8(ctx
->opcode
)) {
23336 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
23339 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
23342 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
23345 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
23348 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
23351 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
23354 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
23360 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
23361 if (df
== DF_DOUBLE
) {
23362 generate_exception_end(ctx
, EXCP_RI
);
23364 TCGv_i32 tdf
= tcg_const_i32(df
);
23365 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
23366 tcg_temp_free_i32(tdf
);
23371 MIPS_INVAL("MSA instruction");
23372 generate_exception_end(ctx
, EXCP_RI
);
23376 tcg_temp_free_i32(twd
);
23377 tcg_temp_free_i32(tws
);
23378 tcg_temp_free_i32(ti8
);
23381 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
23383 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23384 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23385 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
23386 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
23387 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23388 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23390 TCGv_i32 tdf
= tcg_const_i32(df
);
23391 TCGv_i32 twd
= tcg_const_i32(wd
);
23392 TCGv_i32 tws
= tcg_const_i32(ws
);
23393 TCGv_i32 timm
= tcg_temp_new_i32();
23394 tcg_gen_movi_i32(timm
, u5
);
23396 switch (MASK_MSA_I5(ctx
->opcode
)) {
23398 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23401 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23403 case OPC_MAXI_S_df
:
23404 tcg_gen_movi_i32(timm
, s5
);
23405 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23407 case OPC_MAXI_U_df
:
23408 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23410 case OPC_MINI_S_df
:
23411 tcg_gen_movi_i32(timm
, s5
);
23412 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23414 case OPC_MINI_U_df
:
23415 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23418 tcg_gen_movi_i32(timm
, s5
);
23419 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
23421 case OPC_CLTI_S_df
:
23422 tcg_gen_movi_i32(timm
, s5
);
23423 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23425 case OPC_CLTI_U_df
:
23426 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23428 case OPC_CLEI_S_df
:
23429 tcg_gen_movi_i32(timm
, s5
);
23430 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
23432 case OPC_CLEI_U_df
:
23433 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
23437 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
23438 tcg_gen_movi_i32(timm
, s10
);
23439 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
23443 MIPS_INVAL("MSA instruction");
23444 generate_exception_end(ctx
, EXCP_RI
);
23448 tcg_temp_free_i32(tdf
);
23449 tcg_temp_free_i32(twd
);
23450 tcg_temp_free_i32(tws
);
23451 tcg_temp_free_i32(timm
);
23454 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
23456 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23457 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
23458 uint32_t df
= 0, m
= 0;
23459 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23460 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23467 if ((dfm
& 0x40) == 0x00) {
23470 } else if ((dfm
& 0x60) == 0x40) {
23473 } else if ((dfm
& 0x70) == 0x60) {
23476 } else if ((dfm
& 0x78) == 0x70) {
23480 generate_exception_end(ctx
, EXCP_RI
);
23484 tdf
= tcg_const_i32(df
);
23485 tm
= tcg_const_i32(m
);
23486 twd
= tcg_const_i32(wd
);
23487 tws
= tcg_const_i32(ws
);
23489 switch (MASK_MSA_BIT(ctx
->opcode
)) {
23491 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23494 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
23497 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23500 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23503 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
23506 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
23508 case OPC_BINSLI_df
:
23509 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
23511 case OPC_BINSRI_df
:
23512 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23515 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
23518 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
23521 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
23524 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
23527 MIPS_INVAL("MSA instruction");
23528 generate_exception_end(ctx
, EXCP_RI
);
23532 tcg_temp_free_i32(tdf
);
23533 tcg_temp_free_i32(tm
);
23534 tcg_temp_free_i32(twd
);
23535 tcg_temp_free_i32(tws
);
23538 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
23540 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23541 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
23542 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
23543 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23544 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23546 TCGv_i32 tdf
= tcg_const_i32(df
);
23547 TCGv_i32 twd
= tcg_const_i32(wd
);
23548 TCGv_i32 tws
= tcg_const_i32(ws
);
23549 TCGv_i32 twt
= tcg_const_i32(wt
);
23551 switch (MASK_MSA_3R(ctx
->opcode
)) {
23553 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
23556 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23559 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23562 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23564 case OPC_SUBS_S_df
:
23565 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23568 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23571 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
23574 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
23577 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
23580 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23582 case OPC_ADDS_A_df
:
23583 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23585 case OPC_SUBS_U_df
:
23586 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23589 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23592 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
23595 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
23598 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23601 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23604 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23606 case OPC_ADDS_S_df
:
23607 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23609 case OPC_SUBSUS_U_df
:
23610 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23613 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23616 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
23619 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23622 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23625 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23628 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23630 case OPC_ADDS_U_df
:
23631 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23633 case OPC_SUBSUU_S_df
:
23634 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23637 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
23640 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
23643 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23646 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23649 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23651 case OPC_ASUB_S_df
:
23652 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23655 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23658 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23661 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
23664 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23667 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23670 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23672 case OPC_ASUB_U_df
:
23673 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23676 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23679 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23682 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
23685 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23687 case OPC_AVER_S_df
:
23688 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23691 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23694 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
23697 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
23700 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
23702 case OPC_AVER_U_df
:
23703 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23706 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23709 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
23712 case OPC_DOTP_S_df
:
23713 case OPC_DOTP_U_df
:
23714 case OPC_DPADD_S_df
:
23715 case OPC_DPADD_U_df
:
23716 case OPC_DPSUB_S_df
:
23717 case OPC_HADD_S_df
:
23718 case OPC_DPSUB_U_df
:
23719 case OPC_HADD_U_df
:
23720 case OPC_HSUB_S_df
:
23721 case OPC_HSUB_U_df
:
23722 if (df
== DF_BYTE
) {
23723 generate_exception_end(ctx
, EXCP_RI
);
23726 switch (MASK_MSA_3R(ctx
->opcode
)) {
23727 case OPC_DOTP_S_df
:
23728 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23730 case OPC_DOTP_U_df
:
23731 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23733 case OPC_DPADD_S_df
:
23734 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23736 case OPC_DPADD_U_df
:
23737 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23739 case OPC_DPSUB_S_df
:
23740 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23742 case OPC_HADD_S_df
:
23743 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23745 case OPC_DPSUB_U_df
:
23746 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23748 case OPC_HADD_U_df
:
23749 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23751 case OPC_HSUB_S_df
:
23752 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
23754 case OPC_HSUB_U_df
:
23755 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
23760 MIPS_INVAL("MSA instruction");
23761 generate_exception_end(ctx
, EXCP_RI
);
23764 tcg_temp_free_i32(twd
);
23765 tcg_temp_free_i32(tws
);
23766 tcg_temp_free_i32(twt
);
23767 tcg_temp_free_i32(tdf
);
23770 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
23772 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
23773 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
23774 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
23775 TCGv telm
= tcg_temp_new();
23776 TCGv_i32 tsr
= tcg_const_i32(source
);
23777 TCGv_i32 tdt
= tcg_const_i32(dest
);
23779 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
23781 gen_load_gpr(telm
, source
);
23782 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
23785 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
23786 gen_store_gpr(telm
, dest
);
23789 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
23792 MIPS_INVAL("MSA instruction");
23793 generate_exception_end(ctx
, EXCP_RI
);
23797 tcg_temp_free(telm
);
23798 tcg_temp_free_i32(tdt
);
23799 tcg_temp_free_i32(tsr
);
23802 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
23805 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
23806 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23807 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23809 TCGv_i32 tws
= tcg_const_i32(ws
);
23810 TCGv_i32 twd
= tcg_const_i32(wd
);
23811 TCGv_i32 tn
= tcg_const_i32(n
);
23812 TCGv_i32 tdf
= tcg_const_i32(df
);
23814 switch (MASK_MSA_ELM(ctx
->opcode
)) {
23816 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
23818 case OPC_SPLATI_df
:
23819 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
23822 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
23824 case OPC_COPY_S_df
:
23825 case OPC_COPY_U_df
:
23826 case OPC_INSERT_df
:
23827 #if !defined(TARGET_MIPS64)
23828 /* Double format valid only for MIPS64 */
23829 if (df
== DF_DOUBLE
) {
23830 generate_exception_end(ctx
, EXCP_RI
);
23834 switch (MASK_MSA_ELM(ctx
->opcode
)) {
23835 case OPC_COPY_S_df
:
23836 if (likely(wd
!= 0)) {
23837 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
23840 case OPC_COPY_U_df
:
23841 if (likely(wd
!= 0)) {
23842 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
23845 case OPC_INSERT_df
:
23846 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
23851 MIPS_INVAL("MSA instruction");
23852 generate_exception_end(ctx
, EXCP_RI
);
23854 tcg_temp_free_i32(twd
);
23855 tcg_temp_free_i32(tws
);
23856 tcg_temp_free_i32(tn
);
23857 tcg_temp_free_i32(tdf
);
23860 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
23862 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
23863 uint32_t df
= 0, n
= 0;
23865 if ((dfn
& 0x30) == 0x00) {
23868 } else if ((dfn
& 0x38) == 0x20) {
23871 } else if ((dfn
& 0x3c) == 0x30) {
23874 } else if ((dfn
& 0x3e) == 0x38) {
23877 } else if (dfn
== 0x3E) {
23878 /* CTCMSA, CFCMSA, MOVE.V */
23879 gen_msa_elm_3e(env
, ctx
);
23882 generate_exception_end(ctx
, EXCP_RI
);
23886 gen_msa_elm_df(env
, ctx
, df
, n
);
23889 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
23891 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
23892 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
23893 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
23894 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
23895 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
23897 TCGv_i32 twd
= tcg_const_i32(wd
);
23898 TCGv_i32 tws
= tcg_const_i32(ws
);
23899 TCGv_i32 twt
= tcg_const_i32(wt
);
23900 TCGv_i32 tdf
= tcg_temp_new_i32();
23902 /* adjust df value for floating-point instruction */
23903 tcg_gen_movi_i32(tdf
, df
+ 2);
23905 switch (MASK_MSA_3RF(ctx
->opcode
)) {
23907 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
23910 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
23913 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
23916 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
23919 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
23922 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23925 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
23928 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
23931 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23934 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
23937 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
23940 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
23943 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
23946 tcg_gen_movi_i32(tdf
, df
+ 1);
23947 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
23950 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
23953 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
23955 case OPC_MADD_Q_df
:
23956 tcg_gen_movi_i32(tdf
, df
+ 1);
23957 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
23960 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
23962 case OPC_MSUB_Q_df
:
23963 tcg_gen_movi_i32(tdf
, df
+ 1);
23964 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
23967 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
23970 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
23973 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
23976 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
23979 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
23982 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
23985 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23988 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23991 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
23994 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
23997 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
24000 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
24003 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
24005 case OPC_MULR_Q_df
:
24006 tcg_gen_movi_i32(tdf
, df
+ 1);
24007 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24010 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
24012 case OPC_FMIN_A_df
:
24013 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24015 case OPC_MADDR_Q_df
:
24016 tcg_gen_movi_i32(tdf
, df
+ 1);
24017 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24020 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
24023 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
24025 case OPC_MSUBR_Q_df
:
24026 tcg_gen_movi_i32(tdf
, df
+ 1);
24027 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
24030 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
24032 case OPC_FMAX_A_df
:
24033 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
24036 MIPS_INVAL("MSA instruction");
24037 generate_exception_end(ctx
, EXCP_RI
);
24041 tcg_temp_free_i32(twd
);
24042 tcg_temp_free_i32(tws
);
24043 tcg_temp_free_i32(twt
);
24044 tcg_temp_free_i32(tdf
);
24047 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
24049 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24050 (op & (0x7 << 18)))
24051 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24052 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24053 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24054 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
24055 TCGv_i32 twd
= tcg_const_i32(wd
);
24056 TCGv_i32 tws
= tcg_const_i32(ws
);
24057 TCGv_i32 twt
= tcg_const_i32(wt
);
24058 TCGv_i32 tdf
= tcg_const_i32(df
);
24060 switch (MASK_MSA_2R(ctx
->opcode
)) {
24062 #if !defined(TARGET_MIPS64)
24063 /* Double format valid only for MIPS64 */
24064 if (df
== DF_DOUBLE
) {
24065 generate_exception_end(ctx
, EXCP_RI
);
24069 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
24072 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
24075 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
24078 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
24081 MIPS_INVAL("MSA instruction");
24082 generate_exception_end(ctx
, EXCP_RI
);
24086 tcg_temp_free_i32(twd
);
24087 tcg_temp_free_i32(tws
);
24088 tcg_temp_free_i32(twt
);
24089 tcg_temp_free_i32(tdf
);
24092 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
24094 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24095 (op & (0xf << 17)))
24096 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24097 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24098 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24099 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
24100 TCGv_i32 twd
= tcg_const_i32(wd
);
24101 TCGv_i32 tws
= tcg_const_i32(ws
);
24102 TCGv_i32 twt
= tcg_const_i32(wt
);
24103 /* adjust df value for floating-point instruction */
24104 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
24106 switch (MASK_MSA_2RF(ctx
->opcode
)) {
24107 case OPC_FCLASS_df
:
24108 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
24110 case OPC_FTRUNC_S_df
:
24111 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
24113 case OPC_FTRUNC_U_df
:
24114 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
24117 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
24119 case OPC_FRSQRT_df
:
24120 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
24123 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
24126 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
24129 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
24131 case OPC_FEXUPL_df
:
24132 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
24134 case OPC_FEXUPR_df
:
24135 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
24138 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
24141 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
24143 case OPC_FTINT_S_df
:
24144 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
24146 case OPC_FTINT_U_df
:
24147 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
24149 case OPC_FFINT_S_df
:
24150 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
24152 case OPC_FFINT_U_df
:
24153 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
24157 tcg_temp_free_i32(twd
);
24158 tcg_temp_free_i32(tws
);
24159 tcg_temp_free_i32(twt
);
24160 tcg_temp_free_i32(tdf
);
24163 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
24165 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
24166 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
24167 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
24168 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24169 TCGv_i32 twd
= tcg_const_i32(wd
);
24170 TCGv_i32 tws
= tcg_const_i32(ws
);
24171 TCGv_i32 twt
= tcg_const_i32(wt
);
24173 switch (MASK_MSA_VEC(ctx
->opcode
)) {
24175 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
24178 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
24181 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
24184 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
24187 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
24190 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
24193 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
24196 MIPS_INVAL("MSA instruction");
24197 generate_exception_end(ctx
, EXCP_RI
);
24201 tcg_temp_free_i32(twd
);
24202 tcg_temp_free_i32(tws
);
24203 tcg_temp_free_i32(twt
);
24206 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
24208 switch (MASK_MSA_VEC(ctx
->opcode
)) {
24216 gen_msa_vec_v(env
, ctx
);
24219 gen_msa_2r(env
, ctx
);
24222 gen_msa_2rf(env
, ctx
);
24225 MIPS_INVAL("MSA instruction");
24226 generate_exception_end(ctx
, EXCP_RI
);
24231 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
24233 uint32_t opcode
= ctx
->opcode
;
24234 check_insn(ctx
, ASE_MSA
);
24235 check_msa_access(ctx
);
24237 switch (MASK_MSA_MINOR(opcode
)) {
24238 case OPC_MSA_I8_00
:
24239 case OPC_MSA_I8_01
:
24240 case OPC_MSA_I8_02
:
24241 gen_msa_i8(env
, ctx
);
24243 case OPC_MSA_I5_06
:
24244 case OPC_MSA_I5_07
:
24245 gen_msa_i5(env
, ctx
);
24247 case OPC_MSA_BIT_09
:
24248 case OPC_MSA_BIT_0A
:
24249 gen_msa_bit(env
, ctx
);
24251 case OPC_MSA_3R_0D
:
24252 case OPC_MSA_3R_0E
:
24253 case OPC_MSA_3R_0F
:
24254 case OPC_MSA_3R_10
:
24255 case OPC_MSA_3R_11
:
24256 case OPC_MSA_3R_12
:
24257 case OPC_MSA_3R_13
:
24258 case OPC_MSA_3R_14
:
24259 case OPC_MSA_3R_15
:
24260 gen_msa_3r(env
, ctx
);
24263 gen_msa_elm(env
, ctx
);
24265 case OPC_MSA_3RF_1A
:
24266 case OPC_MSA_3RF_1B
:
24267 case OPC_MSA_3RF_1C
:
24268 gen_msa_3rf(env
, ctx
);
24271 gen_msa_vec(env
, ctx
);
24282 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
24283 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
24284 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
24285 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
24287 TCGv_i32 twd
= tcg_const_i32(wd
);
24288 TCGv taddr
= tcg_temp_new();
24289 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
24291 switch (MASK_MSA_MINOR(opcode
)) {
24293 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
24296 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
24299 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
24302 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
24305 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
24308 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
24311 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
24314 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
24318 tcg_temp_free_i32(twd
);
24319 tcg_temp_free(taddr
);
24323 MIPS_INVAL("MSA instruction");
24324 generate_exception_end(ctx
, EXCP_RI
);
24330 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
24333 int rs
, rt
, rd
, sa
;
24337 /* make sure instructions are on a word boundary */
24338 if (ctx
->base
.pc_next
& 0x3) {
24339 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
24340 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
24344 /* Handle blikely not taken case */
24345 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
24346 TCGLabel
*l1
= gen_new_label();
24348 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
24349 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
24350 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
24354 op
= MASK_OP_MAJOR(ctx
->opcode
);
24355 rs
= (ctx
->opcode
>> 21) & 0x1f;
24356 rt
= (ctx
->opcode
>> 16) & 0x1f;
24357 rd
= (ctx
->opcode
>> 11) & 0x1f;
24358 sa
= (ctx
->opcode
>> 6) & 0x1f;
24359 imm
= (int16_t)ctx
->opcode
;
24362 decode_opc_special(env
, ctx
);
24365 decode_opc_special2_legacy(env
, ctx
);
24368 decode_opc_special3(env
, ctx
);
24371 op1
= MASK_REGIMM(ctx
->opcode
);
24373 case OPC_BLTZL
: /* REGIMM branches */
24377 check_insn(ctx
, ISA_MIPS2
);
24378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24382 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
24386 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24388 /* OPC_NAL, OPC_BAL */
24389 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
24391 generate_exception_end(ctx
, EXCP_RI
);
24394 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
24397 case OPC_TGEI
: /* REGIMM traps */
24404 check_insn(ctx
, ISA_MIPS2
);
24405 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24406 gen_trap(ctx
, op1
, rs
, -1, imm
);
24409 check_insn(ctx
, ISA_MIPS32R6
);
24410 generate_exception_end(ctx
, EXCP_RI
);
24413 check_insn(ctx
, ISA_MIPS32R2
);
24414 /* Break the TB to be able to sync copied instructions
24416 ctx
->base
.is_jmp
= DISAS_STOP
;
24418 case OPC_BPOSGE32
: /* MIPS DSP branch */
24419 #if defined(TARGET_MIPS64)
24423 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
24425 #if defined(TARGET_MIPS64)
24427 check_insn(ctx
, ISA_MIPS32R6
);
24428 check_mips_64(ctx
);
24430 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
24434 check_insn(ctx
, ISA_MIPS32R6
);
24435 check_mips_64(ctx
);
24437 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
24441 default: /* Invalid */
24442 MIPS_INVAL("regimm");
24443 generate_exception_end(ctx
, EXCP_RI
);
24448 check_cp0_enabled(ctx
);
24449 op1
= MASK_CP0(ctx
->opcode
);
24457 #if defined(TARGET_MIPS64)
24461 #ifndef CONFIG_USER_ONLY
24462 gen_cp0(env
, ctx
, op1
, rt
, rd
);
24463 #endif /* !CONFIG_USER_ONLY */
24481 #ifndef CONFIG_USER_ONLY
24482 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
24483 #endif /* !CONFIG_USER_ONLY */
24486 #ifndef CONFIG_USER_ONLY
24489 TCGv t0
= tcg_temp_new();
24491 op2
= MASK_MFMC0(ctx
->opcode
);
24495 gen_helper_dmt(t0
);
24496 gen_store_gpr(t0
, rt
);
24500 gen_helper_emt(t0
);
24501 gen_store_gpr(t0
, rt
);
24505 gen_helper_dvpe(t0
, cpu_env
);
24506 gen_store_gpr(t0
, rt
);
24510 gen_helper_evpe(t0
, cpu_env
);
24511 gen_store_gpr(t0
, rt
);
24514 check_insn(ctx
, ISA_MIPS32R6
);
24516 gen_helper_dvp(t0
, cpu_env
);
24517 gen_store_gpr(t0
, rt
);
24521 check_insn(ctx
, ISA_MIPS32R6
);
24523 gen_helper_evp(t0
, cpu_env
);
24524 gen_store_gpr(t0
, rt
);
24528 check_insn(ctx
, ISA_MIPS32R2
);
24529 save_cpu_state(ctx
, 1);
24530 gen_helper_di(t0
, cpu_env
);
24531 gen_store_gpr(t0
, rt
);
24532 /* Stop translation as we may have switched
24533 the execution mode. */
24534 ctx
->base
.is_jmp
= DISAS_STOP
;
24537 check_insn(ctx
, ISA_MIPS32R2
);
24538 save_cpu_state(ctx
, 1);
24539 gen_helper_ei(t0
, cpu_env
);
24540 gen_store_gpr(t0
, rt
);
24541 /* DISAS_STOP isn't sufficient, we need to ensure we break
24542 out of translated code to check for pending interrupts */
24543 gen_save_pc(ctx
->base
.pc_next
+ 4);
24544 ctx
->base
.is_jmp
= DISAS_EXIT
;
24546 default: /* Invalid */
24547 MIPS_INVAL("mfmc0");
24548 generate_exception_end(ctx
, EXCP_RI
);
24553 #endif /* !CONFIG_USER_ONLY */
24556 check_insn(ctx
, ISA_MIPS32R2
);
24557 gen_load_srsgpr(rt
, rd
);
24560 check_insn(ctx
, ISA_MIPS32R2
);
24561 gen_store_srsgpr(rt
, rd
);
24565 generate_exception_end(ctx
, EXCP_RI
);
24569 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
24570 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24571 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
24572 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24575 /* Arithmetic with immediate opcode */
24576 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24580 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24582 case OPC_SLTI
: /* Set on less than with immediate opcode */
24584 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
24586 case OPC_ANDI
: /* Arithmetic with immediate opcode */
24587 case OPC_LUI
: /* OPC_AUI */
24590 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
24592 case OPC_J
: /* Jump */
24594 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
24595 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
24598 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
24599 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24601 generate_exception_end(ctx
, EXCP_RI
);
24604 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
24605 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24608 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24611 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
24612 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24614 generate_exception_end(ctx
, EXCP_RI
);
24617 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
24618 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24621 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24624 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
24627 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24629 check_insn(ctx
, ISA_MIPS32R6
);
24630 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
24631 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24634 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
24637 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24639 check_insn(ctx
, ISA_MIPS32R6
);
24640 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
24641 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24646 check_insn(ctx
, ISA_MIPS2
);
24647 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24651 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
24653 case OPC_LL
: /* Load and stores */
24654 check_insn(ctx
, ISA_MIPS2
);
24658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24666 gen_ld(ctx
, op
, rt
, rs
, imm
);
24670 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24675 gen_st(ctx
, op
, rt
, rs
, imm
);
24678 check_insn(ctx
, ISA_MIPS2
);
24679 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24680 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
24683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24684 check_cp0_enabled(ctx
);
24685 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
24686 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
24687 gen_cache_operation(ctx
, rt
, rs
, imm
);
24689 /* Treat as NOP. */
24692 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24693 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24694 /* Treat as NOP. */
24697 /* Floating point (COP1). */
24702 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
24706 op1
= MASK_CP1(ctx
->opcode
);
24711 check_cp1_enabled(ctx
);
24712 check_insn(ctx
, ISA_MIPS32R2
);
24718 check_cp1_enabled(ctx
);
24719 gen_cp1(ctx
, op1
, rt
, rd
);
24721 #if defined(TARGET_MIPS64)
24724 check_cp1_enabled(ctx
);
24725 check_insn(ctx
, ISA_MIPS3
);
24726 check_mips_64(ctx
);
24727 gen_cp1(ctx
, op1
, rt
, rd
);
24730 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
24731 check_cp1_enabled(ctx
);
24732 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24734 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
24739 check_insn(ctx
, ASE_MIPS3D
);
24740 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
24741 (rt
>> 2) & 0x7, imm
<< 2);
24745 check_cp1_enabled(ctx
);
24746 check_insn(ctx
, ISA_MIPS32R6
);
24747 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
24751 check_cp1_enabled(ctx
);
24752 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24754 check_insn(ctx
, ASE_MIPS3D
);
24757 check_cp1_enabled(ctx
);
24758 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24759 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
24760 (rt
>> 2) & 0x7, imm
<< 2);
24767 check_cp1_enabled(ctx
);
24768 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
24774 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
24775 check_cp1_enabled(ctx
);
24776 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24778 case R6_OPC_CMP_AF_S
:
24779 case R6_OPC_CMP_UN_S
:
24780 case R6_OPC_CMP_EQ_S
:
24781 case R6_OPC_CMP_UEQ_S
:
24782 case R6_OPC_CMP_LT_S
:
24783 case R6_OPC_CMP_ULT_S
:
24784 case R6_OPC_CMP_LE_S
:
24785 case R6_OPC_CMP_ULE_S
:
24786 case R6_OPC_CMP_SAF_S
:
24787 case R6_OPC_CMP_SUN_S
:
24788 case R6_OPC_CMP_SEQ_S
:
24789 case R6_OPC_CMP_SEUQ_S
:
24790 case R6_OPC_CMP_SLT_S
:
24791 case R6_OPC_CMP_SULT_S
:
24792 case R6_OPC_CMP_SLE_S
:
24793 case R6_OPC_CMP_SULE_S
:
24794 case R6_OPC_CMP_OR_S
:
24795 case R6_OPC_CMP_UNE_S
:
24796 case R6_OPC_CMP_NE_S
:
24797 case R6_OPC_CMP_SOR_S
:
24798 case R6_OPC_CMP_SUNE_S
:
24799 case R6_OPC_CMP_SNE_S
:
24800 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
24802 case R6_OPC_CMP_AF_D
:
24803 case R6_OPC_CMP_UN_D
:
24804 case R6_OPC_CMP_EQ_D
:
24805 case R6_OPC_CMP_UEQ_D
:
24806 case R6_OPC_CMP_LT_D
:
24807 case R6_OPC_CMP_ULT_D
:
24808 case R6_OPC_CMP_LE_D
:
24809 case R6_OPC_CMP_ULE_D
:
24810 case R6_OPC_CMP_SAF_D
:
24811 case R6_OPC_CMP_SUN_D
:
24812 case R6_OPC_CMP_SEQ_D
:
24813 case R6_OPC_CMP_SEUQ_D
:
24814 case R6_OPC_CMP_SLT_D
:
24815 case R6_OPC_CMP_SULT_D
:
24816 case R6_OPC_CMP_SLE_D
:
24817 case R6_OPC_CMP_SULE_D
:
24818 case R6_OPC_CMP_OR_D
:
24819 case R6_OPC_CMP_UNE_D
:
24820 case R6_OPC_CMP_NE_D
:
24821 case R6_OPC_CMP_SOR_D
:
24822 case R6_OPC_CMP_SUNE_D
:
24823 case R6_OPC_CMP_SNE_D
:
24824 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
24827 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
24828 rt
, rd
, sa
, (imm
>> 8) & 0x7);
24833 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
24848 check_insn(ctx
, ASE_MSA
);
24849 gen_msa_branch(env
, ctx
, op1
);
24853 generate_exception_end(ctx
, EXCP_RI
);
24858 /* Compact branches [R6] and COP2 [non-R6] */
24859 case OPC_BC
: /* OPC_LWC2 */
24860 case OPC_BALC
: /* OPC_SWC2 */
24861 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24862 /* OPC_BC, OPC_BALC */
24863 gen_compute_compact_branch(ctx
, op
, 0, 0,
24864 sextract32(ctx
->opcode
<< 2, 0, 28));
24866 /* OPC_LWC2, OPC_SWC2 */
24867 /* COP2: Not implemented. */
24868 generate_exception_err(ctx
, EXCP_CpU
, 2);
24871 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
24872 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
24873 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24875 /* OPC_BEQZC, OPC_BNEZC */
24876 gen_compute_compact_branch(ctx
, op
, rs
, 0,
24877 sextract32(ctx
->opcode
<< 2, 0, 23));
24879 /* OPC_JIC, OPC_JIALC */
24880 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
24883 /* OPC_LWC2, OPC_SWC2 */
24884 /* COP2: Not implemented. */
24885 generate_exception_err(ctx
, EXCP_CpU
, 2);
24889 check_insn(ctx
, INSN_LOONGSON2F
);
24890 /* Note that these instructions use different fields. */
24891 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
24895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24896 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
24897 check_cp1_enabled(ctx
);
24898 op1
= MASK_CP3(ctx
->opcode
);
24902 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
24908 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
24909 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
24912 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
24913 /* Treat as NOP. */
24916 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
24930 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
24931 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
24935 generate_exception_end(ctx
, EXCP_RI
);
24939 generate_exception_err(ctx
, EXCP_CpU
, 1);
24943 #if defined(TARGET_MIPS64)
24944 /* MIPS64 opcodes */
24948 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24952 check_insn(ctx
, ISA_MIPS3
);
24953 check_mips_64(ctx
);
24954 gen_ld(ctx
, op
, rt
, rs
, imm
);
24958 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24961 check_insn(ctx
, ISA_MIPS3
);
24962 check_mips_64(ctx
);
24963 gen_st(ctx
, op
, rt
, rs
, imm
);
24966 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
24967 check_insn(ctx
, ISA_MIPS3
);
24968 check_mips_64(ctx
);
24969 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
24971 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
24972 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24973 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
24974 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24977 check_insn(ctx
, ISA_MIPS3
);
24978 check_mips_64(ctx
);
24979 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24983 check_insn(ctx
, ISA_MIPS3
);
24984 check_mips_64(ctx
);
24985 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
24988 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
24989 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24990 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
24992 MIPS_INVAL("major opcode");
24993 generate_exception_end(ctx
, EXCP_RI
);
24997 case OPC_DAUI
: /* OPC_JALX */
24998 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24999 #if defined(TARGET_MIPS64)
25001 check_mips_64(ctx
);
25003 generate_exception(ctx
, EXCP_RI
);
25004 } else if (rt
!= 0) {
25005 TCGv t0
= tcg_temp_new();
25006 gen_load_gpr(t0
, rs
);
25007 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
25011 generate_exception_end(ctx
, EXCP_RI
);
25012 MIPS_INVAL("major opcode");
25016 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
25017 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
25018 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
25021 case OPC_MSA
: /* OPC_MDMX */
25022 /* MDMX: Not implemented. */
25026 check_insn(ctx
, ISA_MIPS32R6
);
25027 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
25029 default: /* Invalid */
25030 MIPS_INVAL("major opcode");
25031 generate_exception_end(ctx
, EXCP_RI
);
25036 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
25038 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25039 CPUMIPSState
*env
= cs
->env_ptr
;
25041 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
25042 ctx
->saved_pc
= -1;
25043 ctx
->insn_flags
= env
->insn_flags
;
25044 ctx
->CP0_Config1
= env
->CP0_Config1
;
25045 ctx
->CP0_Config3
= env
->CP0_Config3
;
25046 ctx
->CP0_Config5
= env
->CP0_Config5
;
25048 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
25049 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
25050 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
25051 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
25052 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
25053 ctx
->PAMask
= env
->PAMask
;
25054 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
25055 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
25056 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
25057 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
25058 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
25059 /* Restore delay slot state from the tb context. */
25060 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
25061 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
25062 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
25063 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
25064 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
25065 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
25066 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
25067 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
25068 restore_cpu_state(env
, ctx
);
25069 #ifdef CONFIG_USER_ONLY
25070 ctx
->mem_idx
= MIPS_HFLAG_UM
;
25072 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
25074 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
25075 MO_UNALN
: MO_ALIGN
;
25077 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
25081 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
25085 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
25087 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25089 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
25093 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
25094 const CPUBreakpoint
*bp
)
25096 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25098 save_cpu_state(ctx
, 1);
25099 ctx
->base
.is_jmp
= DISAS_NORETURN
;
25100 gen_helper_raise_exception_debug(cpu_env
);
25101 /* The address covered by the breakpoint must be included in
25102 [tb->pc, tb->pc + tb->size) in order to for it to be
25103 properly cleared -- thus we increment the PC here so that
25104 the logic setting tb->size below does the right thing. */
25105 ctx
->base
.pc_next
+= 4;
25109 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
25111 CPUMIPSState
*env
= cs
->env_ptr
;
25112 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25116 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
25117 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
25118 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25119 insn_bytes
= decode_nanomips_opc(env
, ctx
);
25120 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
25121 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
25123 decode_opc(env
, ctx
);
25124 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
25125 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25126 insn_bytes
= decode_micromips_opc(env
, ctx
);
25127 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
25128 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
25129 insn_bytes
= decode_mips16_opc(env
, ctx
);
25131 generate_exception_end(ctx
, EXCP_RI
);
25132 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
25136 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
25137 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
25138 MIPS_HFLAG_FBNSLOT
))) {
25139 /* force to generate branch as there is neither delay nor
25143 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
25144 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
25145 /* Force to generate branch as microMIPS R6 doesn't restrict
25146 branches in the forbidden slot. */
25151 gen_branch(ctx
, insn_bytes
);
25153 ctx
->base
.pc_next
+= insn_bytes
;
25155 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
25158 /* Execute a branch and its delay slot as a single instruction.
25159 This is what GDB expects and is consistent with what the
25160 hardware does (e.g. if a delay slot instruction faults, the
25161 reported PC is the PC of the branch). */
25162 if (ctx
->base
.singlestep_enabled
&&
25163 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
25164 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
25166 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
25167 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
25171 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
25173 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
25175 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
25176 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
25177 gen_helper_raise_exception_debug(cpu_env
);
25179 switch (ctx
->base
.is_jmp
) {
25181 gen_save_pc(ctx
->base
.pc_next
);
25182 tcg_gen_lookup_and_goto_ptr();
25185 case DISAS_TOO_MANY
:
25186 save_cpu_state(ctx
, 0);
25187 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
25190 tcg_gen_exit_tb(NULL
, 0);
25192 case DISAS_NORETURN
:
25195 g_assert_not_reached();
25200 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
25202 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
25203 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
25206 static const TranslatorOps mips_tr_ops
= {
25207 .init_disas_context
= mips_tr_init_disas_context
,
25208 .tb_start
= mips_tr_tb_start
,
25209 .insn_start
= mips_tr_insn_start
,
25210 .breakpoint_check
= mips_tr_breakpoint_check
,
25211 .translate_insn
= mips_tr_translate_insn
,
25212 .tb_stop
= mips_tr_tb_stop
,
25213 .disas_log
= mips_tr_disas_log
,
25216 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
25220 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
25223 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
25227 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
25229 #define printfpr(fp) \
25232 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25233 " fd:%13g fs:%13g psu: %13g\n", \
25234 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
25235 (double)(fp)->fd, \
25236 (double)(fp)->fs[FP_ENDIAN_IDX], \
25237 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
25240 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
25241 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
25242 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25243 " fd:%13g fs:%13g psu:%13g\n", \
25244 tmp.w[FP_ENDIAN_IDX], tmp.d, \
25246 (double)tmp.fs[FP_ENDIAN_IDX], \
25247 (double)tmp.fs[!FP_ENDIAN_IDX]); \
25252 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
25253 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
25254 get_float_exception_flags(&env
->active_fpu
.fp_status
));
25255 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
25256 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
25257 printfpr(&env
->active_fpu
.fpr
[i
]);
25263 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
25266 MIPSCPU
*cpu
= MIPS_CPU(cs
);
25267 CPUMIPSState
*env
= &cpu
->env
;
25270 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
25271 " LO=0x" TARGET_FMT_lx
" ds %04x "
25272 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
25273 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
25274 env
->hflags
, env
->btarget
, env
->bcond
);
25275 for (i
= 0; i
< 32; i
++) {
25277 cpu_fprintf(f
, "GPR%02d:", i
);
25278 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
25280 cpu_fprintf(f
, "\n");
25283 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
25284 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
25285 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
25287 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
25288 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
25289 env
->CP0_Config2
, env
->CP0_Config3
);
25290 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
25291 env
->CP0_Config4
, env
->CP0_Config5
);
25292 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
25293 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
25297 void mips_tcg_init(void)
25302 for (i
= 1; i
< 32; i
++)
25303 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
25304 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
25307 for (i
= 0; i
< 32; i
++) {
25308 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
25310 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
25311 /* The scalar floating-point unit (FPU) registers are mapped on
25312 * the MSA vector registers. */
25313 fpu_f64
[i
] = msa_wr_d
[i
* 2];
25314 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
25315 msa_wr_d
[i
* 2 + 1] =
25316 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
25319 cpu_PC
= tcg_global_mem_new(cpu_env
,
25320 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
25321 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
25322 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
25323 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
25325 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
25326 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
25329 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
25330 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
25332 bcond
= tcg_global_mem_new(cpu_env
,
25333 offsetof(CPUMIPSState
, bcond
), "bcond");
25334 btarget
= tcg_global_mem_new(cpu_env
,
25335 offsetof(CPUMIPSState
, btarget
), "btarget");
25336 hflags
= tcg_global_mem_new_i32(cpu_env
,
25337 offsetof(CPUMIPSState
, hflags
), "hflags");
25339 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
25340 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
25342 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
25343 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
25347 #include "translate_init.inc.c"
25349 void cpu_mips_realize_env(CPUMIPSState
*env
)
25351 env
->exception_base
= (int32_t)0xBFC00000;
25353 #ifndef CONFIG_USER_ONLY
25354 mmu_init(env
, env
->cpu_model
);
25356 fpu_init(env
, env
->cpu_model
);
25357 mvp_init(env
, env
->cpu_model
);
25360 bool cpu_supports_cps_smp(const char *cpu_type
)
25362 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
25363 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
25366 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
25368 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
25369 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
25372 void cpu_set_exception_base(int vp_index
, target_ulong address
)
25374 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
25375 vp
->env
.exception_base
= address
;
25378 void cpu_state_reset(CPUMIPSState
*env
)
25380 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
25381 CPUState
*cs
= CPU(cpu
);
25383 /* Reset registers to their default values */
25384 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
25385 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
25386 #ifdef TARGET_WORDS_BIGENDIAN
25387 env
->CP0_Config0
|= (1 << CP0C0_BE
);
25389 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
25390 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
25391 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
25392 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
25393 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
25394 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
25395 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
25396 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
25397 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
25398 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
25399 << env
->cpu_model
->CP0_LLAddr_shift
;
25400 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
25401 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
25402 env
->CCRes
= env
->cpu_model
->CCRes
;
25403 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
25404 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
25405 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
25406 env
->current_tc
= 0;
25407 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
25408 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
25409 #if defined(TARGET_MIPS64)
25410 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
25411 env
->SEGMask
|= 3ULL << 62;
25414 env
->PABITS
= env
->cpu_model
->PABITS
;
25415 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
25416 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
25417 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
25418 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
25419 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
25420 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
25421 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
25422 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
25423 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
25424 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
25425 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
25426 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
25427 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
25428 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
25429 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
25430 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
25431 env
->msair
= env
->cpu_model
->MSAIR
;
25432 env
->insn_flags
= env
->cpu_model
->insn_flags
;
25434 #if defined(CONFIG_USER_ONLY)
25435 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
25436 # ifdef TARGET_MIPS64
25437 /* Enable 64-bit register mode. */
25438 env
->CP0_Status
|= (1 << CP0St_PX
);
25440 # ifdef TARGET_ABI_MIPSN64
25441 /* Enable 64-bit address mode. */
25442 env
->CP0_Status
|= (1 << CP0St_UX
);
25444 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
25445 hardware registers. */
25446 env
->CP0_HWREna
|= 0x0000000F;
25447 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
25448 env
->CP0_Status
|= (1 << CP0St_CU1
);
25450 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
25451 env
->CP0_Status
|= (1 << CP0St_MX
);
25453 # if defined(TARGET_MIPS64)
25454 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
25455 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
25456 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
25457 env
->CP0_Status
|= (1 << CP0St_FR
);
25461 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
25462 /* If the exception was raised from a delay slot,
25463 come back to the jump. */
25464 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
25465 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
25467 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
25469 env
->active_tc
.PC
= env
->exception_base
;
25470 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
25471 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
25472 env
->CP0_Wired
= 0;
25473 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
25474 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
25475 if (mips_um_ksegs_enabled()) {
25476 env
->CP0_EBase
|= 0x40000000;
25478 env
->CP0_EBase
|= (int32_t)0x80000000;
25480 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
25481 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
25483 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
25485 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
25486 /* vectored interrupts not implemented, timer on int 7,
25487 no performance counters. */
25488 env
->CP0_IntCtl
= 0xe0000000;
25492 for (i
= 0; i
< 7; i
++) {
25493 env
->CP0_WatchLo
[i
] = 0;
25494 env
->CP0_WatchHi
[i
] = 0x80000000;
25496 env
->CP0_WatchLo
[7] = 0;
25497 env
->CP0_WatchHi
[7] = 0;
25499 /* Count register increments in debug mode, EJTAG version 1 */
25500 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
25502 cpu_mips_store_count(env
, 1);
25504 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
25507 /* Only TC0 on VPE 0 starts as active. */
25508 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
25509 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
25510 env
->tcs
[i
].CP0_TCHalt
= 1;
25512 env
->active_tc
.CP0_TCHalt
= 1;
25515 if (cs
->cpu_index
== 0) {
25516 /* VPE0 starts up enabled. */
25517 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
25518 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
25520 /* TC0 starts up unhalted. */
25522 env
->active_tc
.CP0_TCHalt
= 0;
25523 env
->tcs
[0].CP0_TCHalt
= 0;
25524 /* With thread 0 active. */
25525 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
25526 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
25531 * Configure default legacy segmentation control. We use this regardless of
25532 * whether segmentation control is presented to the guest.
25534 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
25535 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
25536 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
25537 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
25538 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
25539 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
25541 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
25542 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
25543 (3 << CP0SC_C
)) << 16;
25544 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
25545 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
25546 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
25547 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
25548 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
25549 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
25550 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
25551 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
25553 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
25554 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
25555 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
25556 env
->CP0_Status
|= (1 << CP0St_FR
);
25559 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
25560 /* microMIPS on reset when Config3.ISA is 3 */
25561 env
->hflags
|= MIPS_HFLAG_M16
;
25565 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
25569 compute_hflags(env
);
25570 restore_fp_status(env
);
25571 restore_pamask(env
);
25572 cs
->exception_index
= EXCP_NONE
;
25574 if (semihosting_get_argc()) {
25575 /* UHI interface can be used to obtain argc and argv */
25576 env
->active_tc
.gpr
[4] = -1;
25580 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
25581 target_ulong
*data
)
25583 env
->active_tc
.PC
= data
[0];
25584 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
25585 env
->hflags
|= data
[1];
25586 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
25587 case MIPS_HFLAG_BR
:
25589 case MIPS_HFLAG_BC
:
25590 case MIPS_HFLAG_BL
:
25592 env
->btarget
= data
[2];