2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
41 #define MIPS_DEBUG_DISAS 0
43 /* MIPS major opcodes */
44 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
47 /* indirect opcode tables */
48 OPC_SPECIAL
= (0x00 << 26),
49 OPC_REGIMM
= (0x01 << 26),
50 OPC_CP0
= (0x10 << 26),
51 OPC_CP1
= (0x11 << 26),
52 OPC_CP2
= (0x12 << 26),
53 OPC_CP3
= (0x13 << 26),
54 OPC_SPECIAL2
= (0x1C << 26),
55 OPC_SPECIAL3
= (0x1F << 26),
56 /* arithmetic with immediate */
57 OPC_ADDI
= (0x08 << 26),
58 OPC_ADDIU
= (0x09 << 26),
59 OPC_SLTI
= (0x0A << 26),
60 OPC_SLTIU
= (0x0B << 26),
61 /* logic with immediate */
62 OPC_ANDI
= (0x0C << 26),
63 OPC_ORI
= (0x0D << 26),
64 OPC_XORI
= (0x0E << 26),
65 OPC_LUI
= (0x0F << 26),
66 /* arithmetic with immediate */
67 OPC_DADDI
= (0x18 << 26),
68 OPC_DADDIU
= (0x19 << 26),
69 /* Jump and branches */
71 OPC_JAL
= (0x03 << 26),
72 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
73 OPC_BEQL
= (0x14 << 26),
74 OPC_BNE
= (0x05 << 26),
75 OPC_BNEL
= (0x15 << 26),
76 OPC_BLEZ
= (0x06 << 26),
77 OPC_BLEZL
= (0x16 << 26),
78 OPC_BGTZ
= (0x07 << 26),
79 OPC_BGTZL
= (0x17 << 26),
80 OPC_JALX
= (0x1D << 26),
81 OPC_DAUI
= (0x1D << 26),
83 OPC_LDL
= (0x1A << 26),
84 OPC_LDR
= (0x1B << 26),
85 OPC_LB
= (0x20 << 26),
86 OPC_LH
= (0x21 << 26),
87 OPC_LWL
= (0x22 << 26),
88 OPC_LW
= (0x23 << 26),
89 OPC_LWPC
= OPC_LW
| 0x5,
90 OPC_LBU
= (0x24 << 26),
91 OPC_LHU
= (0x25 << 26),
92 OPC_LWR
= (0x26 << 26),
93 OPC_LWU
= (0x27 << 26),
94 OPC_SB
= (0x28 << 26),
95 OPC_SH
= (0x29 << 26),
96 OPC_SWL
= (0x2A << 26),
97 OPC_SW
= (0x2B << 26),
98 OPC_SDL
= (0x2C << 26),
99 OPC_SDR
= (0x2D << 26),
100 OPC_SWR
= (0x2E << 26),
101 OPC_LL
= (0x30 << 26),
102 OPC_LLD
= (0x34 << 26),
103 OPC_LD
= (0x37 << 26),
104 OPC_LDPC
= OPC_LD
| 0x5,
105 OPC_SC
= (0x38 << 26),
106 OPC_SCD
= (0x3C << 26),
107 OPC_SD
= (0x3F << 26),
108 /* Floating point load/store */
109 OPC_LWC1
= (0x31 << 26),
110 OPC_LWC2
= (0x32 << 26),
111 OPC_LDC1
= (0x35 << 26),
112 OPC_LDC2
= (0x36 << 26),
113 OPC_SWC1
= (0x39 << 26),
114 OPC_SWC2
= (0x3A << 26),
115 OPC_SDC1
= (0x3D << 26),
116 OPC_SDC2
= (0x3E << 26),
117 /* Compact Branches */
118 OPC_BLEZALC
= (0x06 << 26),
119 OPC_BGEZALC
= (0x06 << 26),
120 OPC_BGEUC
= (0x06 << 26),
121 OPC_BGTZALC
= (0x07 << 26),
122 OPC_BLTZALC
= (0x07 << 26),
123 OPC_BLTUC
= (0x07 << 26),
124 OPC_BOVC
= (0x08 << 26),
125 OPC_BEQZALC
= (0x08 << 26),
126 OPC_BEQC
= (0x08 << 26),
127 OPC_BLEZC
= (0x16 << 26),
128 OPC_BGEZC
= (0x16 << 26),
129 OPC_BGEC
= (0x16 << 26),
130 OPC_BGTZC
= (0x17 << 26),
131 OPC_BLTZC
= (0x17 << 26),
132 OPC_BLTC
= (0x17 << 26),
133 OPC_BNVC
= (0x18 << 26),
134 OPC_BNEZALC
= (0x18 << 26),
135 OPC_BNEC
= (0x18 << 26),
136 OPC_BC
= (0x32 << 26),
137 OPC_BEQZC
= (0x36 << 26),
138 OPC_JIC
= (0x36 << 26),
139 OPC_BALC
= (0x3A << 26),
140 OPC_BNEZC
= (0x3E << 26),
141 OPC_JIALC
= (0x3E << 26),
142 /* MDMX ASE specific */
143 OPC_MDMX
= (0x1E << 26),
144 /* MSA ASE, same as MDMX */
146 /* Cache and prefetch */
147 OPC_CACHE
= (0x2F << 26),
148 OPC_PREF
= (0x33 << 26),
149 /* PC-relative address computation / loads */
150 OPC_PCREL
= (0x3B << 26),
153 /* PC-relative address computation / loads */
154 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
155 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
157 /* Instructions determined by bits 19 and 20 */
158 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
159 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
160 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
162 /* Instructions determined by bits 16 ... 20 */
163 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
164 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
167 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
170 /* MIPS special opcodes */
171 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
175 OPC_SLL
= 0x00 | OPC_SPECIAL
,
176 /* NOP is SLL r0, r0, 0 */
177 /* SSNOP is SLL r0, r0, 1 */
178 /* EHB is SLL r0, r0, 3 */
179 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
180 OPC_ROTR
= OPC_SRL
| (1 << 21),
181 OPC_SRA
= 0x03 | OPC_SPECIAL
,
182 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
183 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
184 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
185 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
186 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
187 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
188 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
189 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
190 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
191 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
192 OPC_DROTR
= OPC_DSRL
| (1 << 21),
193 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
194 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
195 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
196 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
197 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
198 /* Multiplication / division */
199 OPC_MULT
= 0x18 | OPC_SPECIAL
,
200 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
201 OPC_DIV
= 0x1A | OPC_SPECIAL
,
202 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
203 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
204 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
205 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
206 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
208 /* 2 registers arithmetic / logic */
209 OPC_ADD
= 0x20 | OPC_SPECIAL
,
210 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
211 OPC_SUB
= 0x22 | OPC_SPECIAL
,
212 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
213 OPC_AND
= 0x24 | OPC_SPECIAL
,
214 OPC_OR
= 0x25 | OPC_SPECIAL
,
215 OPC_XOR
= 0x26 | OPC_SPECIAL
,
216 OPC_NOR
= 0x27 | OPC_SPECIAL
,
217 OPC_SLT
= 0x2A | OPC_SPECIAL
,
218 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
219 OPC_DADD
= 0x2C | OPC_SPECIAL
,
220 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
221 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
222 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
224 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
225 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
227 OPC_TGE
= 0x30 | OPC_SPECIAL
,
228 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
229 OPC_TLT
= 0x32 | OPC_SPECIAL
,
230 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
231 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
232 OPC_TNE
= 0x36 | OPC_SPECIAL
,
233 /* HI / LO registers load & stores */
234 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
235 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
236 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
237 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
238 /* Conditional moves */
239 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
240 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
242 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
243 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
245 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
248 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
249 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
250 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
251 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
252 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
254 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
255 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
256 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
257 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
260 /* R6 Multiply and Divide instructions have the same Opcode
261 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
262 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
265 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
266 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
267 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
268 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
269 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
270 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
271 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
272 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
274 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
275 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
276 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
277 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
278 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
279 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
280 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
281 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
283 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
284 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
285 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
286 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
287 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
289 OPC_LSA
= 0x05 | OPC_SPECIAL
,
290 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
293 /* Multiplication variants of the vr54xx. */
294 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
297 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
298 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
300 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
302 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
304 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
306 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
307 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
308 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
309 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
310 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
313 /* REGIMM (rt field) opcodes */
314 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
317 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
318 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
319 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
320 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
321 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
322 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
323 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
324 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
325 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
326 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
327 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
328 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
329 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
330 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
331 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
332 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
334 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
335 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
338 /* Special2 opcodes */
339 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
342 /* Multiply & xxx operations */
343 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
344 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
345 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
346 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
347 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
349 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
350 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
351 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
352 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
353 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
354 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
355 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
356 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
357 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
358 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
359 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
360 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
362 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
363 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
364 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
365 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
367 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
370 /* Special3 opcodes */
371 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
374 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
375 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
376 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
377 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
378 OPC_INS
= 0x04 | OPC_SPECIAL3
,
379 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
380 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
381 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
382 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
383 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
384 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
385 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
386 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
389 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
390 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
391 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
392 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
393 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
394 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
395 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
396 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
397 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
398 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
399 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
400 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
403 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
404 /* MIPS DSP Arithmetic */
405 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
406 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
407 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
409 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
410 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
411 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
412 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
413 /* MIPS DSP GPR-Based Shift Sub-class */
414 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
415 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
416 /* MIPS DSP Multiply Sub-class insns */
417 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
418 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
419 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
420 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
421 /* DSP Bit/Manipulation Sub-class */
422 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
423 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
424 /* MIPS DSP Append Sub-class */
425 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
426 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
427 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
428 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
429 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
432 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
433 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
434 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
435 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
436 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
437 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
438 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
439 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
440 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
441 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
442 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
443 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
444 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
445 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
446 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
447 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
450 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
451 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
452 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
453 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
454 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
455 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
459 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
462 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
463 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
464 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
465 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
466 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
467 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
471 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
474 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
475 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
476 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
477 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
478 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
481 /* MIPS DSP REGIMM opcodes */
483 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
484 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
487 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
490 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
491 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
492 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
493 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
496 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
498 /* MIPS DSP Arithmetic Sub-class */
499 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
500 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
506 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
517 /* MIPS DSP Multiply Sub-class insns */
518 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
526 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
527 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
529 /* MIPS DSP Arithmetic Sub-class */
530 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
531 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
542 /* MIPS DSP Multiply Sub-class insns */
543 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
549 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
551 /* MIPS DSP Arithmetic Sub-class */
552 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
553 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
565 /* DSP Bit/Manipulation Sub-class */
566 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
573 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
575 /* MIPS DSP Arithmetic Sub-class */
576 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
583 /* DSP Compare-Pick Sub-class */
584 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
585 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
601 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
603 /* MIPS DSP GPR-Based Shift Sub-class */
604 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
628 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
630 /* MIPS DSP Multiply Sub-class insns */
631 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
655 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
657 /* DSP Bit/Manipulation Sub-class */
658 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
661 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
663 /* MIPS DSP Append Sub-class */
664 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
665 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
666 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
669 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
671 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
672 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
673 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
684 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
686 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
687 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
688 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
691 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
693 /* MIPS DSP Arithmetic Sub-class */
694 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
711 /* DSP Bit/Manipulation Sub-class */
712 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
720 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
722 /* MIPS DSP Multiply Sub-class insns */
723 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
724 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
728 /* MIPS DSP Arithmetic Sub-class */
729 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
730 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
740 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
752 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
754 /* DSP Compare-Pick Sub-class */
755 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
774 /* MIPS DSP Arithmetic Sub-class */
775 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
785 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
787 /* DSP Append Sub-class */
788 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
789 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
791 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
794 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
797 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
798 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
820 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
822 /* DSP Bit/Manipulation Sub-class */
823 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
826 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
828 /* MIPS DSP Multiply Sub-class insns */
829 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
857 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
859 /* MIPS DSP GPR-Based Shift Sub-class */
860 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
888 /* Coprocessor 0 (rs field) */
889 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
892 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
893 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
894 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
895 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
896 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
897 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
898 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
899 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
900 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
901 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
902 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
903 OPC_C0
= (0x10 << 21) | OPC_CP0
,
904 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
909 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
912 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
913 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
914 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
915 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
916 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
917 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
918 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
919 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
922 /* Coprocessor 0 (with rs == C0) */
923 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
926 OPC_TLBR
= 0x01 | OPC_C0
,
927 OPC_TLBWI
= 0x02 | OPC_C0
,
928 OPC_TLBINV
= 0x03 | OPC_C0
,
929 OPC_TLBINVF
= 0x04 | OPC_C0
,
930 OPC_TLBWR
= 0x06 | OPC_C0
,
931 OPC_TLBP
= 0x08 | OPC_C0
,
932 OPC_RFE
= 0x10 | OPC_C0
,
933 OPC_ERET
= 0x18 | OPC_C0
,
934 OPC_DERET
= 0x1F | OPC_C0
,
935 OPC_WAIT
= 0x20 | OPC_C0
,
938 /* Coprocessor 1 (rs field) */
939 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
941 /* Values for the fmt field in FP instructions */
943 /* 0 - 15 are reserved */
944 FMT_S
= 16, /* single fp */
945 FMT_D
= 17, /* double fp */
946 FMT_E
= 18, /* extended fp */
947 FMT_Q
= 19, /* quad fp */
948 FMT_W
= 20, /* 32-bit fixed */
949 FMT_L
= 21, /* 64-bit fixed */
950 FMT_PS
= 22, /* paired single fp */
951 /* 23 - 31 are reserved */
955 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
956 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
957 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
958 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
959 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
960 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
961 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
962 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
963 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
964 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
965 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
966 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
967 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
968 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
969 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
970 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
971 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
972 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
973 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
974 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
975 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
976 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
977 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
978 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
979 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
980 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
981 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
982 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
983 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
984 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
987 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
988 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
991 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
992 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
993 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
994 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
998 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
999 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1003 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1004 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1007 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1010 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1011 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1012 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1013 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1014 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1015 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1016 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1017 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1018 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1019 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1020 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1023 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1026 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1027 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1028 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1029 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1030 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1031 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1032 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1033 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1035 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1036 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1037 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1038 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1039 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1040 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1041 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1042 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1044 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1045 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1046 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1047 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1048 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1049 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1050 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1051 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1053 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1054 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1055 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1056 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1057 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1058 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1059 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1060 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1062 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1063 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1064 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1065 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1066 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1067 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1069 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1070 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1071 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1072 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1073 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1074 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1076 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1077 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1078 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1079 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1080 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1081 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1083 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1084 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1085 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1086 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1087 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1088 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1090 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1091 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1092 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1093 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1094 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1095 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1097 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1098 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1099 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1100 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1101 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1102 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1104 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1105 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1106 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1107 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1108 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1109 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1111 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1112 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1113 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1114 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1115 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1116 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1120 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1123 OPC_LWXC1
= 0x00 | OPC_CP3
,
1124 OPC_LDXC1
= 0x01 | OPC_CP3
,
1125 OPC_LUXC1
= 0x05 | OPC_CP3
,
1126 OPC_SWXC1
= 0x08 | OPC_CP3
,
1127 OPC_SDXC1
= 0x09 | OPC_CP3
,
1128 OPC_SUXC1
= 0x0D | OPC_CP3
,
1129 OPC_PREFX
= 0x0F | OPC_CP3
,
1130 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1131 OPC_MADD_S
= 0x20 | OPC_CP3
,
1132 OPC_MADD_D
= 0x21 | OPC_CP3
,
1133 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1134 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1135 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1136 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1137 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1138 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1139 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1140 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1141 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1142 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1146 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1148 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1149 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1150 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1151 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1152 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1153 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1154 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1155 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1156 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1157 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1158 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1159 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1160 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1161 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1162 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1163 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1164 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1165 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1166 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1167 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1168 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1170 /* MI10 instruction */
1171 OPC_LD_B
= (0x20) | OPC_MSA
,
1172 OPC_LD_H
= (0x21) | OPC_MSA
,
1173 OPC_LD_W
= (0x22) | OPC_MSA
,
1174 OPC_LD_D
= (0x23) | OPC_MSA
,
1175 OPC_ST_B
= (0x24) | OPC_MSA
,
1176 OPC_ST_H
= (0x25) | OPC_MSA
,
1177 OPC_ST_W
= (0x26) | OPC_MSA
,
1178 OPC_ST_D
= (0x27) | OPC_MSA
,
1182 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1183 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1184 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1185 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1186 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1187 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1188 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1189 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1190 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1191 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1192 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1193 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1194 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1196 /* I8 instruction */
1197 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1198 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1199 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1200 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1201 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1202 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1203 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1204 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1205 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1206 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1208 /* VEC/2R/2RF instruction */
1209 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1210 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1211 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1212 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1213 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1214 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1215 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1217 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1218 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1220 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1221 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1222 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1223 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1224 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1226 /* 2RF instruction df(bit 16) = _w, _d */
1227 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1228 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1229 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1230 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1231 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1232 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1233 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1234 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1235 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1236 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1237 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1238 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1239 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1240 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1241 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1242 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1244 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1245 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1246 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1247 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1248 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1249 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1250 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1251 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1252 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1253 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1254 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1255 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1256 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1257 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1258 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1259 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1260 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1261 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1262 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1263 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1264 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1265 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1266 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1267 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1268 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1269 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1270 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1271 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1272 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1273 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1274 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1275 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1276 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1277 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1278 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1279 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1280 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1281 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1282 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1283 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1284 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1285 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1286 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1287 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1288 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1289 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1290 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1291 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1292 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1293 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1294 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1295 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1296 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1297 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1298 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1299 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1300 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1301 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1302 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1303 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1304 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1305 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1306 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1307 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1309 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1310 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1311 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1312 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1313 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1314 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1315 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1316 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1317 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1318 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1320 /* 3RF instruction _df(bit 21) = _w, _d */
1321 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1325 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1331 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1332 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1333 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1334 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1335 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1338 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1339 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1343 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1344 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1345 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1353 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1363 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1364 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1365 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1366 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1367 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1368 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1369 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1370 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1371 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1372 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1373 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1374 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1375 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1378 /* global register indices */
1379 static TCGv cpu_gpr
[32], cpu_PC
;
1380 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1381 static TCGv cpu_dspctrl
, btarget
, bcond
;
1382 static TCGv_i32 hflags
;
1383 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1384 static TCGv_i64 fpu_f64
[32];
1385 static TCGv_i64 msa_wr_d
[64];
1387 #include "exec/gen-icount.h"
1389 #define gen_helper_0e0i(name, arg) do { \
1390 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1391 gen_helper_##name(cpu_env, helper_tmp); \
1392 tcg_temp_free_i32(helper_tmp); \
1395 #define gen_helper_0e1i(name, arg1, arg2) do { \
1396 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1397 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1398 tcg_temp_free_i32(helper_tmp); \
1401 #define gen_helper_1e0i(name, ret, arg1) do { \
1402 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1403 gen_helper_##name(ret, cpu_env, helper_tmp); \
1404 tcg_temp_free_i32(helper_tmp); \
1407 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1408 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1409 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1410 tcg_temp_free_i32(helper_tmp); \
1413 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1414 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1415 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1416 tcg_temp_free_i32(helper_tmp); \
1419 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1420 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1421 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1422 tcg_temp_free_i32(helper_tmp); \
1425 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1426 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1427 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1428 tcg_temp_free_i32(helper_tmp); \
1431 typedef struct DisasContext
{
1432 struct TranslationBlock
*tb
;
1433 target_ulong pc
, saved_pc
;
1435 int singlestep_enabled
;
1437 int32_t CP0_Config1
;
1438 /* Routine used to access memory */
1440 TCGMemOp default_tcg_memop_mask
;
1441 uint32_t hflags
, saved_hflags
;
1443 target_ulong btarget
;
1454 int CP0_LLAddr_shift
;
1464 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1465 * exception condition */
1466 BS_STOP
= 1, /* We want to stop translation for any reason */
1467 BS_BRANCH
= 2, /* We reached a branch condition */
1468 BS_EXCP
= 3, /* We reached an exception condition */
1471 static const char * const regnames
[] = {
1472 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1473 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1474 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1475 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1478 static const char * const regnames_HI
[] = {
1479 "HI0", "HI1", "HI2", "HI3",
1482 static const char * const regnames_LO
[] = {
1483 "LO0", "LO1", "LO2", "LO3",
1486 static const char * const fregnames
[] = {
1487 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1488 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1489 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1490 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1493 static const char * const msaregnames
[] = {
1494 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1495 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1496 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1497 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1498 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1499 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1500 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1501 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1502 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1503 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1504 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1505 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1506 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1507 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1508 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1509 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1512 #define LOG_DISAS(...) \
1514 if (MIPS_DEBUG_DISAS) { \
1515 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1519 #define MIPS_INVAL(op) \
1521 if (MIPS_DEBUG_DISAS) { \
1522 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1523 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1524 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1525 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1529 /* General purpose registers moves. */
1530 static inline void gen_load_gpr (TCGv t
, int reg
)
1533 tcg_gen_movi_tl(t
, 0);
1535 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1538 static inline void gen_store_gpr (TCGv t
, int reg
)
1541 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1544 /* Moves to/from shadow registers. */
1545 static inline void gen_load_srsgpr (int from
, int to
)
1547 TCGv t0
= tcg_temp_new();
1550 tcg_gen_movi_tl(t0
, 0);
1552 TCGv_i32 t2
= tcg_temp_new_i32();
1553 TCGv_ptr addr
= tcg_temp_new_ptr();
1555 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1556 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1557 tcg_gen_andi_i32(t2
, t2
, 0xf);
1558 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1559 tcg_gen_ext_i32_ptr(addr
, t2
);
1560 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1562 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1563 tcg_temp_free_ptr(addr
);
1564 tcg_temp_free_i32(t2
);
1566 gen_store_gpr(t0
, to
);
1570 static inline void gen_store_srsgpr (int from
, int to
)
1573 TCGv t0
= tcg_temp_new();
1574 TCGv_i32 t2
= tcg_temp_new_i32();
1575 TCGv_ptr addr
= tcg_temp_new_ptr();
1577 gen_load_gpr(t0
, from
);
1578 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1579 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1580 tcg_gen_andi_i32(t2
, t2
, 0xf);
1581 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1582 tcg_gen_ext_i32_ptr(addr
, t2
);
1583 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1585 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1586 tcg_temp_free_ptr(addr
);
1587 tcg_temp_free_i32(t2
);
1593 static inline void gen_save_pc(target_ulong pc
)
1595 tcg_gen_movi_tl(cpu_PC
, pc
);
1598 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1600 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1601 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1602 gen_save_pc(ctx
->pc
);
1603 ctx
->saved_pc
= ctx
->pc
;
1605 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1606 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1607 ctx
->saved_hflags
= ctx
->hflags
;
1608 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1614 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1620 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1622 ctx
->saved_hflags
= ctx
->hflags
;
1623 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1629 ctx
->btarget
= env
->btarget
;
1634 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1636 TCGv_i32 texcp
= tcg_const_i32(excp
);
1637 TCGv_i32 terr
= tcg_const_i32(err
);
1638 save_cpu_state(ctx
, 1);
1639 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1640 tcg_temp_free_i32(terr
);
1641 tcg_temp_free_i32(texcp
);
1642 ctx
->bstate
= BS_EXCP
;
1645 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1647 gen_helper_0e0i(raise_exception
, excp
);
1650 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1652 generate_exception_err(ctx
, excp
, 0);
1655 /* Floating point register moves. */
1656 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1658 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1659 generate_exception(ctx
, EXCP_RI
);
1661 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1664 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1668 generate_exception(ctx
, EXCP_RI
);
1670 t64
= tcg_temp_new_i64();
1671 tcg_gen_extu_i32_i64(t64
, t
);
1672 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1673 tcg_temp_free_i64(t64
);
1676 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1681 gen_load_fpr32(ctx
, t
, reg
| 1);
1685 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 TCGv_i64 t64
= tcg_temp_new_i64();
1689 tcg_gen_extu_i32_i64(t64
, t
);
1690 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1691 tcg_temp_free_i64(t64
);
1693 gen_store_fpr32(ctx
, t
, reg
| 1);
1697 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1699 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1700 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1702 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1706 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1708 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1709 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1712 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1713 t0
= tcg_temp_new_i64();
1714 tcg_gen_shri_i64(t0
, t
, 32);
1715 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1716 tcg_temp_free_i64(t0
);
1720 static inline int get_fp_bit (int cc
)
1728 /* Addresses computation */
1729 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1731 tcg_gen_add_tl(ret
, arg0
, arg1
);
1733 #if defined(TARGET_MIPS64)
1734 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1735 tcg_gen_ext32s_i64(ret
, ret
);
1740 /* Addresses computation (translation time) */
1741 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1744 target_long sum
= base
+ offset
;
1746 #if defined(TARGET_MIPS64)
1747 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1754 /* Sign-extract the low 32-bits to a target_long. */
1755 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1757 #if defined(TARGET_MIPS64)
1758 tcg_gen_ext32s_i64(ret
, arg
);
1760 tcg_gen_extrl_i64_i32(ret
, arg
);
1764 /* Sign-extract the high 32-bits to a target_long. */
1765 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1767 #if defined(TARGET_MIPS64)
1768 tcg_gen_sari_i64(ret
, arg
, 32);
1770 tcg_gen_extrh_i64_i32(ret
, arg
);
1774 static inline void check_cp0_enabled(DisasContext
*ctx
)
1776 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1777 generate_exception_err(ctx
, EXCP_CpU
, 0);
1780 static inline void check_cp1_enabled(DisasContext
*ctx
)
1782 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1783 generate_exception_err(ctx
, EXCP_CpU
, 1);
1786 /* Verify that the processor is running with COP1X instructions enabled.
1787 This is associated with the nabla symbol in the MIPS32 and MIPS64
1790 static inline void check_cop1x(DisasContext
*ctx
)
1792 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1793 generate_exception_end(ctx
, EXCP_RI
);
1796 /* Verify that the processor is running with 64-bit floating-point
1797 operations enabled. */
1799 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1801 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1802 generate_exception_end(ctx
, EXCP_RI
);
1806 * Verify if floating point register is valid; an operation is not defined
1807 * if bit 0 of any register specification is set and the FR bit in the
1808 * Status register equals zero, since the register numbers specify an
1809 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1810 * in the Status register equals one, both even and odd register numbers
1811 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1813 * Multiple 64 bit wide registers can be checked by calling
1814 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1816 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1819 generate_exception_end(ctx
, EXCP_RI
);
1822 /* Verify that the processor is running with DSP instructions enabled.
1823 This is enabled by CP0 Status register MX(24) bit.
1826 static inline void check_dsp(DisasContext
*ctx
)
1828 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1829 if (ctx
->insn_flags
& ASE_DSP
) {
1830 generate_exception_end(ctx
, EXCP_DSPDIS
);
1832 generate_exception_end(ctx
, EXCP_RI
);
1837 static inline void check_dspr2(DisasContext
*ctx
)
1839 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1840 if (ctx
->insn_flags
& ASE_DSP
) {
1841 generate_exception_end(ctx
, EXCP_DSPDIS
);
1843 generate_exception_end(ctx
, EXCP_RI
);
1848 /* This code generates a "reserved instruction" exception if the
1849 CPU does not support the instruction set corresponding to flags. */
1850 static inline void check_insn(DisasContext
*ctx
, int flags
)
1852 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1853 generate_exception_end(ctx
, EXCP_RI
);
1857 /* This code generates a "reserved instruction" exception if the
1858 CPU has corresponding flag set which indicates that the instruction
1859 has been removed. */
1860 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1862 if (unlikely(ctx
->insn_flags
& flags
)) {
1863 generate_exception_end(ctx
, EXCP_RI
);
1867 /* This code generates a "reserved instruction" exception if the
1868 CPU does not support 64-bit paired-single (PS) floating point data type */
1869 static inline void check_ps(DisasContext
*ctx
)
1871 if (unlikely(!ctx
->ps
)) {
1872 generate_exception(ctx
, EXCP_RI
);
1874 check_cp1_64bitmode(ctx
);
1877 #ifdef TARGET_MIPS64
1878 /* This code generates a "reserved instruction" exception if 64-bit
1879 instructions are not enabled. */
1880 static inline void check_mips_64(DisasContext
*ctx
)
1882 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1883 generate_exception_end(ctx
, EXCP_RI
);
1887 #ifndef CONFIG_USER_ONLY
1888 static inline void check_mvh(DisasContext
*ctx
)
1890 if (unlikely(!ctx
->mvh
)) {
1891 generate_exception(ctx
, EXCP_RI
);
1896 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1897 calling interface for 32 and 64-bit FPRs. No sense in changing
1898 all callers for gen_load_fpr32 when we need the CTX parameter for
1900 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1901 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1902 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1903 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1904 int ft, int fs, int cc) \
1906 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1907 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1916 check_cp1_registers(ctx, fs | ft); \
1924 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1925 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1927 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1928 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1929 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1930 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1931 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1932 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1933 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1934 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1935 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1936 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1937 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1938 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1939 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1940 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1941 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1942 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1945 tcg_temp_free_i##bits (fp0); \
1946 tcg_temp_free_i##bits (fp1); \
1949 FOP_CONDS(, 0, d
, FMT_D
, 64)
1950 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1951 FOP_CONDS(, 0, s
, FMT_S
, 32)
1952 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1953 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1954 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1957 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1958 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1959 int ft, int fs, int fd) \
1961 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1962 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1963 if (ifmt == FMT_D) { \
1964 check_cp1_registers(ctx, fs | ft | fd); \
1966 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1967 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1970 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2015 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2018 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2021 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2024 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2027 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2030 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2033 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2039 tcg_temp_free_i ## bits (fp0); \
2040 tcg_temp_free_i ## bits (fp1); \
2043 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2044 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2046 #undef gen_ldcmp_fpr32
2047 #undef gen_ldcmp_fpr64
2049 /* load/store instructions. */
2050 #ifdef CONFIG_USER_ONLY
2051 #define OP_LD_ATOMIC(insn,fname) \
2052 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2053 DisasContext *ctx) \
2055 TCGv t0 = tcg_temp_new(); \
2056 tcg_gen_mov_tl(t0, arg1); \
2057 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2058 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2059 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2060 tcg_temp_free(t0); \
2063 #define OP_LD_ATOMIC(insn,fname) \
2064 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2065 DisasContext *ctx) \
2067 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2070 OP_LD_ATOMIC(ll
,ld32s
);
2071 #if defined(TARGET_MIPS64)
2072 OP_LD_ATOMIC(lld
,ld64
);
2076 #ifdef CONFIG_USER_ONLY
2077 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2078 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2079 DisasContext *ctx) \
2081 TCGv t0 = tcg_temp_new(); \
2082 TCGLabel *l1 = gen_new_label(); \
2083 TCGLabel *l2 = gen_new_label(); \
2085 tcg_gen_andi_tl(t0, arg2, almask); \
2086 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2087 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2088 generate_exception(ctx, EXCP_AdES); \
2089 gen_set_label(l1); \
2090 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2091 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2092 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2093 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2094 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2095 generate_exception_end(ctx, EXCP_SC); \
2096 gen_set_label(l2); \
2097 tcg_gen_movi_tl(t0, 0); \
2098 gen_store_gpr(t0, rt); \
2099 tcg_temp_free(t0); \
2102 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2103 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2104 DisasContext *ctx) \
2106 TCGv t0 = tcg_temp_new(); \
2107 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2108 gen_store_gpr(t0, rt); \
2109 tcg_temp_free(t0); \
2112 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2113 #if defined(TARGET_MIPS64)
2114 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2118 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2119 int base
, int16_t offset
)
2122 tcg_gen_movi_tl(addr
, offset
);
2123 } else if (offset
== 0) {
2124 gen_load_gpr(addr
, base
);
2126 tcg_gen_movi_tl(addr
, offset
);
2127 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2131 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2133 target_ulong pc
= ctx
->pc
;
2135 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2136 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2141 pc
&= ~(target_ulong
)3;
2146 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2147 int rt
, int base
, int16_t offset
)
2150 int mem_idx
= ctx
->mem_idx
;
2152 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2153 /* Loongson CPU uses a load to zero register for prefetch.
2154 We emulate it as a NOP. On other CPU we must perform the
2155 actual memory access. */
2159 t0
= tcg_temp_new();
2160 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2163 #if defined(TARGET_MIPS64)
2165 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2166 ctx
->default_tcg_memop_mask
);
2167 gen_store_gpr(t0
, rt
);
2170 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2171 ctx
->default_tcg_memop_mask
);
2172 gen_store_gpr(t0
, rt
);
2176 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2177 gen_store_gpr(t0
, rt
);
2180 t1
= tcg_temp_new();
2181 /* Do a byte access to possibly trigger a page
2182 fault with the unaligned address. */
2183 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2184 tcg_gen_andi_tl(t1
, t0
, 7);
2185 #ifndef TARGET_WORDS_BIGENDIAN
2186 tcg_gen_xori_tl(t1
, t1
, 7);
2188 tcg_gen_shli_tl(t1
, t1
, 3);
2189 tcg_gen_andi_tl(t0
, t0
, ~7);
2190 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2191 tcg_gen_shl_tl(t0
, t0
, t1
);
2192 t2
= tcg_const_tl(-1);
2193 tcg_gen_shl_tl(t2
, t2
, t1
);
2194 gen_load_gpr(t1
, rt
);
2195 tcg_gen_andc_tl(t1
, t1
, t2
);
2197 tcg_gen_or_tl(t0
, t0
, t1
);
2199 gen_store_gpr(t0
, rt
);
2202 t1
= tcg_temp_new();
2203 /* Do a byte access to possibly trigger a page
2204 fault with the unaligned address. */
2205 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2206 tcg_gen_andi_tl(t1
, t0
, 7);
2207 #ifdef TARGET_WORDS_BIGENDIAN
2208 tcg_gen_xori_tl(t1
, t1
, 7);
2210 tcg_gen_shli_tl(t1
, t1
, 3);
2211 tcg_gen_andi_tl(t0
, t0
, ~7);
2212 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2213 tcg_gen_shr_tl(t0
, t0
, t1
);
2214 tcg_gen_xori_tl(t1
, t1
, 63);
2215 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2216 tcg_gen_shl_tl(t2
, t2
, t1
);
2217 gen_load_gpr(t1
, rt
);
2218 tcg_gen_and_tl(t1
, t1
, t2
);
2220 tcg_gen_or_tl(t0
, t0
, t1
);
2222 gen_store_gpr(t0
, rt
);
2225 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2226 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2228 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2229 gen_store_gpr(t0
, rt
);
2233 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2234 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2236 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2237 gen_store_gpr(t0
, rt
);
2240 mem_idx
= MIPS_HFLAG_UM
;
2243 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2244 ctx
->default_tcg_memop_mask
);
2245 gen_store_gpr(t0
, rt
);
2248 mem_idx
= MIPS_HFLAG_UM
;
2251 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2252 ctx
->default_tcg_memop_mask
);
2253 gen_store_gpr(t0
, rt
);
2256 mem_idx
= MIPS_HFLAG_UM
;
2259 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2260 ctx
->default_tcg_memop_mask
);
2261 gen_store_gpr(t0
, rt
);
2264 mem_idx
= MIPS_HFLAG_UM
;
2267 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2268 gen_store_gpr(t0
, rt
);
2271 mem_idx
= MIPS_HFLAG_UM
;
2274 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2275 gen_store_gpr(t0
, rt
);
2278 mem_idx
= MIPS_HFLAG_UM
;
2281 t1
= tcg_temp_new();
2282 /* Do a byte access to possibly trigger a page
2283 fault with the unaligned address. */
2284 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2285 tcg_gen_andi_tl(t1
, t0
, 3);
2286 #ifndef TARGET_WORDS_BIGENDIAN
2287 tcg_gen_xori_tl(t1
, t1
, 3);
2289 tcg_gen_shli_tl(t1
, t1
, 3);
2290 tcg_gen_andi_tl(t0
, t0
, ~3);
2291 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2292 tcg_gen_shl_tl(t0
, t0
, t1
);
2293 t2
= tcg_const_tl(-1);
2294 tcg_gen_shl_tl(t2
, t2
, t1
);
2295 gen_load_gpr(t1
, rt
);
2296 tcg_gen_andc_tl(t1
, t1
, t2
);
2298 tcg_gen_or_tl(t0
, t0
, t1
);
2300 tcg_gen_ext32s_tl(t0
, t0
);
2301 gen_store_gpr(t0
, rt
);
2304 mem_idx
= MIPS_HFLAG_UM
;
2307 t1
= tcg_temp_new();
2308 /* Do a byte access to possibly trigger a page
2309 fault with the unaligned address. */
2310 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2311 tcg_gen_andi_tl(t1
, t0
, 3);
2312 #ifdef TARGET_WORDS_BIGENDIAN
2313 tcg_gen_xori_tl(t1
, t1
, 3);
2315 tcg_gen_shli_tl(t1
, t1
, 3);
2316 tcg_gen_andi_tl(t0
, t0
, ~3);
2317 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2318 tcg_gen_shr_tl(t0
, t0
, t1
);
2319 tcg_gen_xori_tl(t1
, t1
, 31);
2320 t2
= tcg_const_tl(0xfffffffeull
);
2321 tcg_gen_shl_tl(t2
, t2
, t1
);
2322 gen_load_gpr(t1
, rt
);
2323 tcg_gen_and_tl(t1
, t1
, t2
);
2325 tcg_gen_or_tl(t0
, t0
, t1
);
2327 tcg_gen_ext32s_tl(t0
, t0
);
2328 gen_store_gpr(t0
, rt
);
2331 mem_idx
= MIPS_HFLAG_UM
;
2335 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2336 gen_store_gpr(t0
, rt
);
2343 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2344 int base
, int16_t offset
)
2346 TCGv t0
= tcg_temp_new();
2347 TCGv t1
= tcg_temp_new();
2348 int mem_idx
= ctx
->mem_idx
;
2350 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2351 gen_load_gpr(t1
, rt
);
2353 #if defined(TARGET_MIPS64)
2355 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2356 ctx
->default_tcg_memop_mask
);
2359 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2362 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2366 mem_idx
= MIPS_HFLAG_UM
;
2369 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2370 ctx
->default_tcg_memop_mask
);
2373 mem_idx
= MIPS_HFLAG_UM
;
2376 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2377 ctx
->default_tcg_memop_mask
);
2380 mem_idx
= MIPS_HFLAG_UM
;
2383 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2386 mem_idx
= MIPS_HFLAG_UM
;
2389 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2392 mem_idx
= MIPS_HFLAG_UM
;
2395 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2403 /* Store conditional */
2404 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2405 int base
, int16_t offset
)
2408 int mem_idx
= ctx
->mem_idx
;
2410 #ifdef CONFIG_USER_ONLY
2411 t0
= tcg_temp_local_new();
2412 t1
= tcg_temp_local_new();
2414 t0
= tcg_temp_new();
2415 t1
= tcg_temp_new();
2417 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2418 gen_load_gpr(t1
, rt
);
2420 #if defined(TARGET_MIPS64)
2423 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2427 mem_idx
= MIPS_HFLAG_UM
;
2431 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2438 /* Load and store */
2439 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2440 int base
, int16_t offset
)
2442 TCGv t0
= tcg_temp_new();
2444 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2445 /* Don't do NOP if destination is zero: we must perform the actual
2450 TCGv_i32 fp0
= tcg_temp_new_i32();
2451 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2452 ctx
->default_tcg_memop_mask
);
2453 gen_store_fpr32(ctx
, fp0
, ft
);
2454 tcg_temp_free_i32(fp0
);
2459 TCGv_i32 fp0
= tcg_temp_new_i32();
2460 gen_load_fpr32(ctx
, fp0
, ft
);
2461 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2462 ctx
->default_tcg_memop_mask
);
2463 tcg_temp_free_i32(fp0
);
2468 TCGv_i64 fp0
= tcg_temp_new_i64();
2469 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2470 ctx
->default_tcg_memop_mask
);
2471 gen_store_fpr64(ctx
, fp0
, ft
);
2472 tcg_temp_free_i64(fp0
);
2477 TCGv_i64 fp0
= tcg_temp_new_i64();
2478 gen_load_fpr64(ctx
, fp0
, ft
);
2479 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2480 ctx
->default_tcg_memop_mask
);
2481 tcg_temp_free_i64(fp0
);
2485 MIPS_INVAL("flt_ldst");
2486 generate_exception_end(ctx
, EXCP_RI
);
2493 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2494 int rs
, int16_t imm
)
2496 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2497 check_cp1_enabled(ctx
);
2501 check_insn(ctx
, ISA_MIPS2
);
2504 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2507 generate_exception_err(ctx
, EXCP_CpU
, 1);
2511 /* Arithmetic with immediate operand */
2512 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2513 int rt
, int rs
, int16_t imm
)
2515 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2517 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2518 /* If no destination, treat it as a NOP.
2519 For addi, we must generate the overflow exception when needed. */
2525 TCGv t0
= tcg_temp_local_new();
2526 TCGv t1
= tcg_temp_new();
2527 TCGv t2
= tcg_temp_new();
2528 TCGLabel
*l1
= gen_new_label();
2530 gen_load_gpr(t1
, rs
);
2531 tcg_gen_addi_tl(t0
, t1
, uimm
);
2532 tcg_gen_ext32s_tl(t0
, t0
);
2534 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2535 tcg_gen_xori_tl(t2
, t0
, uimm
);
2536 tcg_gen_and_tl(t1
, t1
, t2
);
2538 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2540 /* operands of same sign, result different sign */
2541 generate_exception(ctx
, EXCP_OVERFLOW
);
2543 tcg_gen_ext32s_tl(t0
, t0
);
2544 gen_store_gpr(t0
, rt
);
2550 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2551 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2553 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2556 #if defined(TARGET_MIPS64)
2559 TCGv t0
= tcg_temp_local_new();
2560 TCGv t1
= tcg_temp_new();
2561 TCGv t2
= tcg_temp_new();
2562 TCGLabel
*l1
= gen_new_label();
2564 gen_load_gpr(t1
, rs
);
2565 tcg_gen_addi_tl(t0
, t1
, uimm
);
2567 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2568 tcg_gen_xori_tl(t2
, t0
, uimm
);
2569 tcg_gen_and_tl(t1
, t1
, t2
);
2571 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2573 /* operands of same sign, result different sign */
2574 generate_exception(ctx
, EXCP_OVERFLOW
);
2576 gen_store_gpr(t0
, rt
);
2582 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2584 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2591 /* Logic with immediate operand */
2592 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2593 int rt
, int rs
, int16_t imm
)
2598 /* If no destination, treat it as a NOP. */
2601 uimm
= (uint16_t)imm
;
2604 if (likely(rs
!= 0))
2605 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2607 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2611 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2613 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2616 if (likely(rs
!= 0))
2617 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2619 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2622 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2624 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2625 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2627 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2636 /* Set on less than with immediate operand */
2637 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2638 int rt
, int rs
, int16_t imm
)
2640 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2644 /* If no destination, treat it as a NOP. */
2647 t0
= tcg_temp_new();
2648 gen_load_gpr(t0
, rs
);
2651 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2654 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2660 /* Shifts with immediate operand */
2661 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2662 int rt
, int rs
, int16_t imm
)
2664 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2668 /* If no destination, treat it as a NOP. */
2672 t0
= tcg_temp_new();
2673 gen_load_gpr(t0
, rs
);
2676 tcg_gen_shli_tl(t0
, t0
, uimm
);
2677 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2680 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2684 tcg_gen_ext32u_tl(t0
, t0
);
2685 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2687 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2692 TCGv_i32 t1
= tcg_temp_new_i32();
2694 tcg_gen_trunc_tl_i32(t1
, t0
);
2695 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2696 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2697 tcg_temp_free_i32(t1
);
2699 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2702 #if defined(TARGET_MIPS64)
2704 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2707 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2710 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2714 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2716 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2720 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2723 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2726 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2729 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2737 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2738 int rd
, int rs
, int rt
)
2740 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2741 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2742 /* If no destination, treat it as a NOP.
2743 For add & sub, we must generate the overflow exception when needed. */
2750 TCGv t0
= tcg_temp_local_new();
2751 TCGv t1
= tcg_temp_new();
2752 TCGv t2
= tcg_temp_new();
2753 TCGLabel
*l1
= gen_new_label();
2755 gen_load_gpr(t1
, rs
);
2756 gen_load_gpr(t2
, rt
);
2757 tcg_gen_add_tl(t0
, t1
, t2
);
2758 tcg_gen_ext32s_tl(t0
, t0
);
2759 tcg_gen_xor_tl(t1
, t1
, t2
);
2760 tcg_gen_xor_tl(t2
, t0
, t2
);
2761 tcg_gen_andc_tl(t1
, t2
, t1
);
2763 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2765 /* operands of same sign, result different sign */
2766 generate_exception(ctx
, EXCP_OVERFLOW
);
2768 gen_store_gpr(t0
, rd
);
2773 if (rs
!= 0 && rt
!= 0) {
2774 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2775 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2776 } else if (rs
== 0 && rt
!= 0) {
2777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2778 } else if (rs
!= 0 && rt
== 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2781 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2786 TCGv t0
= tcg_temp_local_new();
2787 TCGv t1
= tcg_temp_new();
2788 TCGv t2
= tcg_temp_new();
2789 TCGLabel
*l1
= gen_new_label();
2791 gen_load_gpr(t1
, rs
);
2792 gen_load_gpr(t2
, rt
);
2793 tcg_gen_sub_tl(t0
, t1
, t2
);
2794 tcg_gen_ext32s_tl(t0
, t0
);
2795 tcg_gen_xor_tl(t2
, t1
, t2
);
2796 tcg_gen_xor_tl(t1
, t0
, t1
);
2797 tcg_gen_and_tl(t1
, t1
, t2
);
2799 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2801 /* operands of different sign, first operand and result different sign */
2802 generate_exception(ctx
, EXCP_OVERFLOW
);
2804 gen_store_gpr(t0
, rd
);
2809 if (rs
!= 0 && rt
!= 0) {
2810 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2811 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2812 } else if (rs
== 0 && rt
!= 0) {
2813 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2815 } else if (rs
!= 0 && rt
== 0) {
2816 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2821 #if defined(TARGET_MIPS64)
2824 TCGv t0
= tcg_temp_local_new();
2825 TCGv t1
= tcg_temp_new();
2826 TCGv t2
= tcg_temp_new();
2827 TCGLabel
*l1
= gen_new_label();
2829 gen_load_gpr(t1
, rs
);
2830 gen_load_gpr(t2
, rt
);
2831 tcg_gen_add_tl(t0
, t1
, t2
);
2832 tcg_gen_xor_tl(t1
, t1
, t2
);
2833 tcg_gen_xor_tl(t2
, t0
, t2
);
2834 tcg_gen_andc_tl(t1
, t2
, t1
);
2836 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2838 /* operands of same sign, result different sign */
2839 generate_exception(ctx
, EXCP_OVERFLOW
);
2841 gen_store_gpr(t0
, rd
);
2846 if (rs
!= 0 && rt
!= 0) {
2847 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2848 } else if (rs
== 0 && rt
!= 0) {
2849 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2850 } else if (rs
!= 0 && rt
== 0) {
2851 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2853 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2858 TCGv t0
= tcg_temp_local_new();
2859 TCGv t1
= tcg_temp_new();
2860 TCGv t2
= tcg_temp_new();
2861 TCGLabel
*l1
= gen_new_label();
2863 gen_load_gpr(t1
, rs
);
2864 gen_load_gpr(t2
, rt
);
2865 tcg_gen_sub_tl(t0
, t1
, t2
);
2866 tcg_gen_xor_tl(t2
, t1
, t2
);
2867 tcg_gen_xor_tl(t1
, t0
, t1
);
2868 tcg_gen_and_tl(t1
, t1
, t2
);
2870 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2872 /* operands of different sign, first operand and result different sign */
2873 generate_exception(ctx
, EXCP_OVERFLOW
);
2875 gen_store_gpr(t0
, rd
);
2880 if (rs
!= 0 && rt
!= 0) {
2881 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 } else if (rs
== 0 && rt
!= 0) {
2883 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2884 } else if (rs
!= 0 && rt
== 0) {
2885 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2887 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2892 if (likely(rs
!= 0 && rt
!= 0)) {
2893 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2894 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2896 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2902 /* Conditional move */
2903 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2904 int rd
, int rs
, int rt
)
2909 /* If no destination, treat it as a NOP. */
2913 t0
= tcg_temp_new();
2914 gen_load_gpr(t0
, rt
);
2915 t1
= tcg_const_tl(0);
2916 t2
= tcg_temp_new();
2917 gen_load_gpr(t2
, rs
);
2920 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2923 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2926 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2929 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2938 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2939 int rd
, int rs
, int rt
)
2942 /* If no destination, treat it as a NOP. */
2948 if (likely(rs
!= 0 && rt
!= 0)) {
2949 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2951 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2955 if (rs
!= 0 && rt
!= 0) {
2956 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2957 } else if (rs
== 0 && rt
!= 0) {
2958 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2959 } else if (rs
!= 0 && rt
== 0) {
2960 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2962 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2966 if (likely(rs
!= 0 && rt
!= 0)) {
2967 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2968 } else if (rs
== 0 && rt
!= 0) {
2969 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2970 } else if (rs
!= 0 && rt
== 0) {
2971 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2973 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2977 if (likely(rs
!= 0 && rt
!= 0)) {
2978 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2979 } else if (rs
== 0 && rt
!= 0) {
2980 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2981 } else if (rs
!= 0 && rt
== 0) {
2982 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2984 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2990 /* Set on lower than */
2991 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2992 int rd
, int rs
, int rt
)
2997 /* If no destination, treat it as a NOP. */
3001 t0
= tcg_temp_new();
3002 t1
= tcg_temp_new();
3003 gen_load_gpr(t0
, rs
);
3004 gen_load_gpr(t1
, rt
);
3007 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3010 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3018 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3019 int rd
, int rs
, int rt
)
3024 /* If no destination, treat it as a NOP.
3025 For add & sub, we must generate the overflow exception when needed. */
3029 t0
= tcg_temp_new();
3030 t1
= tcg_temp_new();
3031 gen_load_gpr(t0
, rs
);
3032 gen_load_gpr(t1
, rt
);
3035 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3036 tcg_gen_shl_tl(t0
, t1
, t0
);
3037 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3040 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3041 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3044 tcg_gen_ext32u_tl(t1
, t1
);
3045 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3046 tcg_gen_shr_tl(t0
, t1
, t0
);
3047 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3051 TCGv_i32 t2
= tcg_temp_new_i32();
3052 TCGv_i32 t3
= tcg_temp_new_i32();
3054 tcg_gen_trunc_tl_i32(t2
, t0
);
3055 tcg_gen_trunc_tl_i32(t3
, t1
);
3056 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3057 tcg_gen_rotr_i32(t2
, t3
, t2
);
3058 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3059 tcg_temp_free_i32(t2
);
3060 tcg_temp_free_i32(t3
);
3063 #if defined(TARGET_MIPS64)
3065 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3066 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3069 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3070 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3073 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3074 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3077 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3078 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3086 /* Arithmetic on HI/LO registers */
3087 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3089 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3100 #if defined(TARGET_MIPS64)
3102 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3106 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3110 #if defined(TARGET_MIPS64)
3112 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3116 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3121 #if defined(TARGET_MIPS64)
3123 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3127 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3130 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3135 #if defined(TARGET_MIPS64)
3137 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3141 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3144 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3150 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3153 TCGv t0
= tcg_const_tl(addr
);
3154 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3155 gen_store_gpr(t0
, reg
);
3159 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3165 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3168 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3169 addr
= addr_add(ctx
, pc
, offset
);
3170 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3174 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3175 addr
= addr_add(ctx
, pc
, offset
);
3176 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3178 #if defined(TARGET_MIPS64)
3181 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3182 addr
= addr_add(ctx
, pc
, offset
);
3183 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3187 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3190 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3191 addr
= addr_add(ctx
, pc
, offset
);
3192 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3197 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3198 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3199 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3202 #if defined(TARGET_MIPS64)
3203 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3204 case R6_OPC_LDPC
+ (1 << 16):
3205 case R6_OPC_LDPC
+ (2 << 16):
3206 case R6_OPC_LDPC
+ (3 << 16):
3208 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3209 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3210 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3214 MIPS_INVAL("OPC_PCREL");
3215 generate_exception_end(ctx
, EXCP_RI
);
3222 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3231 t0
= tcg_temp_new();
3232 t1
= tcg_temp_new();
3234 gen_load_gpr(t0
, rs
);
3235 gen_load_gpr(t1
, rt
);
3240 TCGv t2
= tcg_temp_new();
3241 TCGv t3
= tcg_temp_new();
3242 tcg_gen_ext32s_tl(t0
, t0
);
3243 tcg_gen_ext32s_tl(t1
, t1
);
3244 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3245 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3246 tcg_gen_and_tl(t2
, t2
, t3
);
3247 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3248 tcg_gen_or_tl(t2
, t2
, t3
);
3249 tcg_gen_movi_tl(t3
, 0);
3250 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3251 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3252 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3259 TCGv t2
= tcg_temp_new();
3260 TCGv t3
= tcg_temp_new();
3261 tcg_gen_ext32s_tl(t0
, t0
);
3262 tcg_gen_ext32s_tl(t1
, t1
);
3263 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3264 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3265 tcg_gen_and_tl(t2
, t2
, t3
);
3266 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3267 tcg_gen_or_tl(t2
, t2
, t3
);
3268 tcg_gen_movi_tl(t3
, 0);
3269 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3270 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3271 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3278 TCGv t2
= tcg_const_tl(0);
3279 TCGv t3
= tcg_const_tl(1);
3280 tcg_gen_ext32u_tl(t0
, t0
);
3281 tcg_gen_ext32u_tl(t1
, t1
);
3282 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3283 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3284 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3291 TCGv t2
= tcg_const_tl(0);
3292 TCGv t3
= tcg_const_tl(1);
3293 tcg_gen_ext32u_tl(t0
, t0
);
3294 tcg_gen_ext32u_tl(t1
, t1
);
3295 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3296 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3297 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3304 TCGv_i32 t2
= tcg_temp_new_i32();
3305 TCGv_i32 t3
= tcg_temp_new_i32();
3306 tcg_gen_trunc_tl_i32(t2
, t0
);
3307 tcg_gen_trunc_tl_i32(t3
, t1
);
3308 tcg_gen_mul_i32(t2
, t2
, t3
);
3309 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3310 tcg_temp_free_i32(t2
);
3311 tcg_temp_free_i32(t3
);
3316 TCGv_i32 t2
= tcg_temp_new_i32();
3317 TCGv_i32 t3
= tcg_temp_new_i32();
3318 tcg_gen_trunc_tl_i32(t2
, t0
);
3319 tcg_gen_trunc_tl_i32(t3
, t1
);
3320 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3321 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3322 tcg_temp_free_i32(t2
);
3323 tcg_temp_free_i32(t3
);
3328 TCGv_i32 t2
= tcg_temp_new_i32();
3329 TCGv_i32 t3
= tcg_temp_new_i32();
3330 tcg_gen_trunc_tl_i32(t2
, t0
);
3331 tcg_gen_trunc_tl_i32(t3
, t1
);
3332 tcg_gen_mul_i32(t2
, t2
, t3
);
3333 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3334 tcg_temp_free_i32(t2
);
3335 tcg_temp_free_i32(t3
);
3340 TCGv_i32 t2
= tcg_temp_new_i32();
3341 TCGv_i32 t3
= tcg_temp_new_i32();
3342 tcg_gen_trunc_tl_i32(t2
, t0
);
3343 tcg_gen_trunc_tl_i32(t3
, t1
);
3344 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3345 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3346 tcg_temp_free_i32(t2
);
3347 tcg_temp_free_i32(t3
);
3350 #if defined(TARGET_MIPS64)
3353 TCGv t2
= tcg_temp_new();
3354 TCGv t3
= tcg_temp_new();
3355 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3356 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3357 tcg_gen_and_tl(t2
, t2
, t3
);
3358 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3359 tcg_gen_or_tl(t2
, t2
, t3
);
3360 tcg_gen_movi_tl(t3
, 0);
3361 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3362 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3369 TCGv t2
= tcg_temp_new();
3370 TCGv t3
= tcg_temp_new();
3371 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3372 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3373 tcg_gen_and_tl(t2
, t2
, t3
);
3374 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3375 tcg_gen_or_tl(t2
, t2
, t3
);
3376 tcg_gen_movi_tl(t3
, 0);
3377 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3378 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3385 TCGv t2
= tcg_const_tl(0);
3386 TCGv t3
= tcg_const_tl(1);
3387 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3388 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3395 TCGv t2
= tcg_const_tl(0);
3396 TCGv t3
= tcg_const_tl(1);
3397 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3398 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3404 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3408 TCGv t2
= tcg_temp_new();
3409 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3414 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3418 TCGv t2
= tcg_temp_new();
3419 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3425 MIPS_INVAL("r6 mul/div");
3426 generate_exception_end(ctx
, EXCP_RI
);
3434 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3435 int acc
, int rs
, int rt
)
3439 t0
= tcg_temp_new();
3440 t1
= tcg_temp_new();
3442 gen_load_gpr(t0
, rs
);
3443 gen_load_gpr(t1
, rt
);
3452 TCGv t2
= tcg_temp_new();
3453 TCGv t3
= tcg_temp_new();
3454 tcg_gen_ext32s_tl(t0
, t0
);
3455 tcg_gen_ext32s_tl(t1
, t1
);
3456 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3457 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3458 tcg_gen_and_tl(t2
, t2
, t3
);
3459 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3460 tcg_gen_or_tl(t2
, t2
, t3
);
3461 tcg_gen_movi_tl(t3
, 0);
3462 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3463 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3464 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3465 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3466 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3473 TCGv t2
= tcg_const_tl(0);
3474 TCGv t3
= tcg_const_tl(1);
3475 tcg_gen_ext32u_tl(t0
, t0
);
3476 tcg_gen_ext32u_tl(t1
, t1
);
3477 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3478 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3479 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3480 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3481 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3488 TCGv_i32 t2
= tcg_temp_new_i32();
3489 TCGv_i32 t3
= tcg_temp_new_i32();
3490 tcg_gen_trunc_tl_i32(t2
, t0
);
3491 tcg_gen_trunc_tl_i32(t3
, t1
);
3492 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3493 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3494 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3495 tcg_temp_free_i32(t2
);
3496 tcg_temp_free_i32(t3
);
3501 TCGv_i32 t2
= tcg_temp_new_i32();
3502 TCGv_i32 t3
= tcg_temp_new_i32();
3503 tcg_gen_trunc_tl_i32(t2
, t0
);
3504 tcg_gen_trunc_tl_i32(t3
, t1
);
3505 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3506 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3507 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3508 tcg_temp_free_i32(t2
);
3509 tcg_temp_free_i32(t3
);
3512 #if defined(TARGET_MIPS64)
3515 TCGv t2
= tcg_temp_new();
3516 TCGv t3
= tcg_temp_new();
3517 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3518 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3519 tcg_gen_and_tl(t2
, t2
, t3
);
3520 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3521 tcg_gen_or_tl(t2
, t2
, t3
);
3522 tcg_gen_movi_tl(t3
, 0);
3523 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3524 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3525 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3532 TCGv t2
= tcg_const_tl(0);
3533 TCGv t3
= tcg_const_tl(1);
3534 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3535 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3536 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3542 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3545 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3550 TCGv_i64 t2
= tcg_temp_new_i64();
3551 TCGv_i64 t3
= tcg_temp_new_i64();
3553 tcg_gen_ext_tl_i64(t2
, t0
);
3554 tcg_gen_ext_tl_i64(t3
, t1
);
3555 tcg_gen_mul_i64(t2
, t2
, t3
);
3556 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3557 tcg_gen_add_i64(t2
, t2
, t3
);
3558 tcg_temp_free_i64(t3
);
3559 gen_move_low32(cpu_LO
[acc
], t2
);
3560 gen_move_high32(cpu_HI
[acc
], t2
);
3561 tcg_temp_free_i64(t2
);
3566 TCGv_i64 t2
= tcg_temp_new_i64();
3567 TCGv_i64 t3
= tcg_temp_new_i64();
3569 tcg_gen_ext32u_tl(t0
, t0
);
3570 tcg_gen_ext32u_tl(t1
, t1
);
3571 tcg_gen_extu_tl_i64(t2
, t0
);
3572 tcg_gen_extu_tl_i64(t3
, t1
);
3573 tcg_gen_mul_i64(t2
, t2
, t3
);
3574 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3575 tcg_gen_add_i64(t2
, t2
, t3
);
3576 tcg_temp_free_i64(t3
);
3577 gen_move_low32(cpu_LO
[acc
], t2
);
3578 gen_move_high32(cpu_HI
[acc
], t2
);
3579 tcg_temp_free_i64(t2
);
3584 TCGv_i64 t2
= tcg_temp_new_i64();
3585 TCGv_i64 t3
= tcg_temp_new_i64();
3587 tcg_gen_ext_tl_i64(t2
, t0
);
3588 tcg_gen_ext_tl_i64(t3
, t1
);
3589 tcg_gen_mul_i64(t2
, t2
, t3
);
3590 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3591 tcg_gen_sub_i64(t2
, t3
, t2
);
3592 tcg_temp_free_i64(t3
);
3593 gen_move_low32(cpu_LO
[acc
], t2
);
3594 gen_move_high32(cpu_HI
[acc
], t2
);
3595 tcg_temp_free_i64(t2
);
3600 TCGv_i64 t2
= tcg_temp_new_i64();
3601 TCGv_i64 t3
= tcg_temp_new_i64();
3603 tcg_gen_ext32u_tl(t0
, t0
);
3604 tcg_gen_ext32u_tl(t1
, t1
);
3605 tcg_gen_extu_tl_i64(t2
, t0
);
3606 tcg_gen_extu_tl_i64(t3
, t1
);
3607 tcg_gen_mul_i64(t2
, t2
, t3
);
3608 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3609 tcg_gen_sub_i64(t2
, t3
, t2
);
3610 tcg_temp_free_i64(t3
);
3611 gen_move_low32(cpu_LO
[acc
], t2
);
3612 gen_move_high32(cpu_HI
[acc
], t2
);
3613 tcg_temp_free_i64(t2
);
3617 MIPS_INVAL("mul/div");
3618 generate_exception_end(ctx
, EXCP_RI
);
3626 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3627 int rd
, int rs
, int rt
)
3629 TCGv t0
= tcg_temp_new();
3630 TCGv t1
= tcg_temp_new();
3632 gen_load_gpr(t0
, rs
);
3633 gen_load_gpr(t1
, rt
);
3636 case OPC_VR54XX_MULS
:
3637 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3639 case OPC_VR54XX_MULSU
:
3640 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3642 case OPC_VR54XX_MACC
:
3643 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3645 case OPC_VR54XX_MACCU
:
3646 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3648 case OPC_VR54XX_MSAC
:
3649 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3651 case OPC_VR54XX_MSACU
:
3652 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3654 case OPC_VR54XX_MULHI
:
3655 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3657 case OPC_VR54XX_MULHIU
:
3658 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3660 case OPC_VR54XX_MULSHI
:
3661 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3663 case OPC_VR54XX_MULSHIU
:
3664 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3666 case OPC_VR54XX_MACCHI
:
3667 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3669 case OPC_VR54XX_MACCHIU
:
3670 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3672 case OPC_VR54XX_MSACHI
:
3673 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3675 case OPC_VR54XX_MSACHIU
:
3676 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3679 MIPS_INVAL("mul vr54xx");
3680 generate_exception_end(ctx
, EXCP_RI
);
3683 gen_store_gpr(t0
, rd
);
3690 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3700 gen_load_gpr(t0
, rs
);
3705 #if defined(TARGET_MIPS64)
3709 tcg_gen_not_tl(t0
, t0
);
3718 tcg_gen_ext32u_tl(t0
, t0
);
3719 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3720 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3722 #if defined(TARGET_MIPS64)
3727 tcg_gen_clzi_i64(t0
, t0
, 64);
3733 /* Godson integer instructions */
3734 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3735 int rd
, int rs
, int rt
)
3747 case OPC_MULTU_G_2E
:
3748 case OPC_MULTU_G_2F
:
3749 #if defined(TARGET_MIPS64)
3750 case OPC_DMULT_G_2E
:
3751 case OPC_DMULT_G_2F
:
3752 case OPC_DMULTU_G_2E
:
3753 case OPC_DMULTU_G_2F
:
3755 t0
= tcg_temp_new();
3756 t1
= tcg_temp_new();
3759 t0
= tcg_temp_local_new();
3760 t1
= tcg_temp_local_new();
3764 gen_load_gpr(t0
, rs
);
3765 gen_load_gpr(t1
, rt
);
3770 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3773 case OPC_MULTU_G_2E
:
3774 case OPC_MULTU_G_2F
:
3775 tcg_gen_ext32u_tl(t0
, t0
);
3776 tcg_gen_ext32u_tl(t1
, t1
);
3777 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3778 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3783 TCGLabel
*l1
= gen_new_label();
3784 TCGLabel
*l2
= gen_new_label();
3785 TCGLabel
*l3
= gen_new_label();
3786 tcg_gen_ext32s_tl(t0
, t0
);
3787 tcg_gen_ext32s_tl(t1
, t1
);
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3793 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3794 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3797 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3798 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3805 TCGLabel
*l1
= gen_new_label();
3806 TCGLabel
*l2
= gen_new_label();
3807 tcg_gen_ext32u_tl(t0
, t0
);
3808 tcg_gen_ext32u_tl(t1
, t1
);
3809 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3810 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3813 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3821 TCGLabel
*l1
= gen_new_label();
3822 TCGLabel
*l2
= gen_new_label();
3823 TCGLabel
*l3
= gen_new_label();
3824 tcg_gen_ext32u_tl(t0
, t0
);
3825 tcg_gen_ext32u_tl(t1
, t1
);
3826 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3827 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3830 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3833 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3834 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3841 TCGLabel
*l1
= gen_new_label();
3842 TCGLabel
*l2
= gen_new_label();
3843 tcg_gen_ext32u_tl(t0
, t0
);
3844 tcg_gen_ext32u_tl(t1
, t1
);
3845 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3846 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3849 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3850 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3854 #if defined(TARGET_MIPS64)
3855 case OPC_DMULT_G_2E
:
3856 case OPC_DMULT_G_2F
:
3857 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3859 case OPC_DMULTU_G_2E
:
3860 case OPC_DMULTU_G_2F
:
3861 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3866 TCGLabel
*l1
= gen_new_label();
3867 TCGLabel
*l2
= gen_new_label();
3868 TCGLabel
*l3
= gen_new_label();
3869 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3873 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3874 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3875 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3878 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3882 case OPC_DDIVU_G_2E
:
3883 case OPC_DDIVU_G_2F
:
3885 TCGLabel
*l1
= gen_new_label();
3886 TCGLabel
*l2
= gen_new_label();
3887 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3888 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3891 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3898 TCGLabel
*l1
= gen_new_label();
3899 TCGLabel
*l2
= gen_new_label();
3900 TCGLabel
*l3
= gen_new_label();
3901 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3902 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3903 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3905 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3908 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3912 case OPC_DMODU_G_2E
:
3913 case OPC_DMODU_G_2F
:
3915 TCGLabel
*l1
= gen_new_label();
3916 TCGLabel
*l2
= gen_new_label();
3917 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3918 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3921 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3932 /* Loongson multimedia instructions */
3933 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3935 uint32_t opc
, shift_max
;
3938 opc
= MASK_LMI(ctx
->opcode
);
3944 t0
= tcg_temp_local_new_i64();
3945 t1
= tcg_temp_local_new_i64();
3948 t0
= tcg_temp_new_i64();
3949 t1
= tcg_temp_new_i64();
3953 check_cp1_enabled(ctx
);
3954 gen_load_fpr64(ctx
, t0
, rs
);
3955 gen_load_fpr64(ctx
, t1
, rt
);
3957 #define LMI_HELPER(UP, LO) \
3958 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3959 #define LMI_HELPER_1(UP, LO) \
3960 case OPC_##UP: gen_helper_##LO(t0, t0); break
3961 #define LMI_DIRECT(UP, LO, OP) \
3962 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3965 LMI_HELPER(PADDSH
, paddsh
);
3966 LMI_HELPER(PADDUSH
, paddush
);
3967 LMI_HELPER(PADDH
, paddh
);
3968 LMI_HELPER(PADDW
, paddw
);
3969 LMI_HELPER(PADDSB
, paddsb
);
3970 LMI_HELPER(PADDUSB
, paddusb
);
3971 LMI_HELPER(PADDB
, paddb
);
3973 LMI_HELPER(PSUBSH
, psubsh
);
3974 LMI_HELPER(PSUBUSH
, psubush
);
3975 LMI_HELPER(PSUBH
, psubh
);
3976 LMI_HELPER(PSUBW
, psubw
);
3977 LMI_HELPER(PSUBSB
, psubsb
);
3978 LMI_HELPER(PSUBUSB
, psubusb
);
3979 LMI_HELPER(PSUBB
, psubb
);
3981 LMI_HELPER(PSHUFH
, pshufh
);
3982 LMI_HELPER(PACKSSWH
, packsswh
);
3983 LMI_HELPER(PACKSSHB
, packsshb
);
3984 LMI_HELPER(PACKUSHB
, packushb
);
3986 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3987 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3988 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3989 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3990 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3991 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3993 LMI_HELPER(PAVGH
, pavgh
);
3994 LMI_HELPER(PAVGB
, pavgb
);
3995 LMI_HELPER(PMAXSH
, pmaxsh
);
3996 LMI_HELPER(PMINSH
, pminsh
);
3997 LMI_HELPER(PMAXUB
, pmaxub
);
3998 LMI_HELPER(PMINUB
, pminub
);
4000 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4001 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4002 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4003 LMI_HELPER(PCMPGTH
, pcmpgth
);
4004 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4005 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4007 LMI_HELPER(PSLLW
, psllw
);
4008 LMI_HELPER(PSLLH
, psllh
);
4009 LMI_HELPER(PSRLW
, psrlw
);
4010 LMI_HELPER(PSRLH
, psrlh
);
4011 LMI_HELPER(PSRAW
, psraw
);
4012 LMI_HELPER(PSRAH
, psrah
);
4014 LMI_HELPER(PMULLH
, pmullh
);
4015 LMI_HELPER(PMULHH
, pmulhh
);
4016 LMI_HELPER(PMULHUH
, pmulhuh
);
4017 LMI_HELPER(PMADDHW
, pmaddhw
);
4019 LMI_HELPER(PASUBUB
, pasubub
);
4020 LMI_HELPER_1(BIADD
, biadd
);
4021 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4023 LMI_DIRECT(PADDD
, paddd
, add
);
4024 LMI_DIRECT(PSUBD
, psubd
, sub
);
4025 LMI_DIRECT(XOR_CP2
, xor, xor);
4026 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4027 LMI_DIRECT(AND_CP2
, and, and);
4028 LMI_DIRECT(OR_CP2
, or, or);
4031 tcg_gen_andc_i64(t0
, t1
, t0
);
4035 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4038 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4041 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4044 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4048 tcg_gen_andi_i64(t1
, t1
, 3);
4049 tcg_gen_shli_i64(t1
, t1
, 4);
4050 tcg_gen_shr_i64(t0
, t0
, t1
);
4051 tcg_gen_ext16u_i64(t0
, t0
);
4055 tcg_gen_add_i64(t0
, t0
, t1
);
4056 tcg_gen_ext32s_i64(t0
, t0
);
4059 tcg_gen_sub_i64(t0
, t0
, t1
);
4060 tcg_gen_ext32s_i64(t0
, t0
);
4082 /* Make sure shift count isn't TCG undefined behaviour. */
4083 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4088 tcg_gen_shl_i64(t0
, t0
, t1
);
4092 /* Since SRA is UndefinedResult without sign-extended inputs,
4093 we can treat SRA and DSRA the same. */
4094 tcg_gen_sar_i64(t0
, t0
, t1
);
4097 /* We want to shift in zeros for SRL; zero-extend first. */
4098 tcg_gen_ext32u_i64(t0
, t0
);
4101 tcg_gen_shr_i64(t0
, t0
, t1
);
4105 if (shift_max
== 32) {
4106 tcg_gen_ext32s_i64(t0
, t0
);
4109 /* Shifts larger than MAX produce zero. */
4110 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4111 tcg_gen_neg_i64(t1
, t1
);
4112 tcg_gen_and_i64(t0
, t0
, t1
);
4118 TCGv_i64 t2
= tcg_temp_new_i64();
4119 TCGLabel
*lab
= gen_new_label();
4121 tcg_gen_mov_i64(t2
, t0
);
4122 tcg_gen_add_i64(t0
, t1
, t2
);
4123 if (opc
== OPC_ADD_CP2
) {
4124 tcg_gen_ext32s_i64(t0
, t0
);
4126 tcg_gen_xor_i64(t1
, t1
, t2
);
4127 tcg_gen_xor_i64(t2
, t2
, t0
);
4128 tcg_gen_andc_i64(t1
, t2
, t1
);
4129 tcg_temp_free_i64(t2
);
4130 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4131 generate_exception(ctx
, EXCP_OVERFLOW
);
4139 TCGv_i64 t2
= tcg_temp_new_i64();
4140 TCGLabel
*lab
= gen_new_label();
4142 tcg_gen_mov_i64(t2
, t0
);
4143 tcg_gen_sub_i64(t0
, t1
, t2
);
4144 if (opc
== OPC_SUB_CP2
) {
4145 tcg_gen_ext32s_i64(t0
, t0
);
4147 tcg_gen_xor_i64(t1
, t1
, t2
);
4148 tcg_gen_xor_i64(t2
, t2
, t0
);
4149 tcg_gen_and_i64(t1
, t1
, t2
);
4150 tcg_temp_free_i64(t2
);
4151 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4152 generate_exception(ctx
, EXCP_OVERFLOW
);
4158 tcg_gen_ext32u_i64(t0
, t0
);
4159 tcg_gen_ext32u_i64(t1
, t1
);
4160 tcg_gen_mul_i64(t0
, t0
, t1
);
4169 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4170 FD field is the CC field? */
4172 MIPS_INVAL("loongson_cp2");
4173 generate_exception_end(ctx
, EXCP_RI
);
4180 gen_store_fpr64(ctx
, t0
, rd
);
4182 tcg_temp_free_i64(t0
);
4183 tcg_temp_free_i64(t1
);
4187 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4188 int rs
, int rt
, int16_t imm
)
4191 TCGv t0
= tcg_temp_new();
4192 TCGv t1
= tcg_temp_new();
4195 /* Load needed operands */
4203 /* Compare two registers */
4205 gen_load_gpr(t0
, rs
);
4206 gen_load_gpr(t1
, rt
);
4216 /* Compare register to immediate */
4217 if (rs
!= 0 || imm
!= 0) {
4218 gen_load_gpr(t0
, rs
);
4219 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4226 case OPC_TEQ
: /* rs == rs */
4227 case OPC_TEQI
: /* r0 == 0 */
4228 case OPC_TGE
: /* rs >= rs */
4229 case OPC_TGEI
: /* r0 >= 0 */
4230 case OPC_TGEU
: /* rs >= rs unsigned */
4231 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4233 generate_exception_end(ctx
, EXCP_TRAP
);
4235 case OPC_TLT
: /* rs < rs */
4236 case OPC_TLTI
: /* r0 < 0 */
4237 case OPC_TLTU
: /* rs < rs unsigned */
4238 case OPC_TLTIU
: /* r0 < 0 unsigned */
4239 case OPC_TNE
: /* rs != rs */
4240 case OPC_TNEI
: /* r0 != 0 */
4241 /* Never trap: treat as NOP. */
4245 TCGLabel
*l1
= gen_new_label();
4250 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4254 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4258 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4262 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4266 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4270 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4273 generate_exception(ctx
, EXCP_TRAP
);
4280 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4282 if (unlikely(ctx
->singlestep_enabled
)) {
4286 #ifndef CONFIG_USER_ONLY
4287 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4293 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4295 if (use_goto_tb(ctx
, dest
)) {
4298 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4301 if (ctx
->singlestep_enabled
) {
4302 save_cpu_state(ctx
, 0);
4303 gen_helper_raise_exception_debug(cpu_env
);
4305 tcg_gen_lookup_and_goto_ptr();
4309 /* Branches (before delay slot) */
4310 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4312 int rs
, int rt
, int32_t offset
,
4315 target_ulong btgt
= -1;
4317 int bcond_compute
= 0;
4318 TCGv t0
= tcg_temp_new();
4319 TCGv t1
= tcg_temp_new();
4321 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4322 #ifdef MIPS_DEBUG_DISAS
4323 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4324 TARGET_FMT_lx
"\n", ctx
->pc
);
4326 generate_exception_end(ctx
, EXCP_RI
);
4330 /* Load needed operands */
4336 /* Compare two registers */
4338 gen_load_gpr(t0
, rs
);
4339 gen_load_gpr(t1
, rt
);
4342 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4356 /* Compare to zero */
4358 gen_load_gpr(t0
, rs
);
4361 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4364 #if defined(TARGET_MIPS64)
4366 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4368 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4371 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4376 /* Jump to immediate */
4377 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4381 /* Jump to register */
4382 if (offset
!= 0 && offset
!= 16) {
4383 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4384 others are reserved. */
4385 MIPS_INVAL("jump hint");
4386 generate_exception_end(ctx
, EXCP_RI
);
4389 gen_load_gpr(btarget
, rs
);
4392 MIPS_INVAL("branch/jump");
4393 generate_exception_end(ctx
, EXCP_RI
);
4396 if (bcond_compute
== 0) {
4397 /* No condition to be computed */
4399 case OPC_BEQ
: /* rx == rx */
4400 case OPC_BEQL
: /* rx == rx likely */
4401 case OPC_BGEZ
: /* 0 >= 0 */
4402 case OPC_BGEZL
: /* 0 >= 0 likely */
4403 case OPC_BLEZ
: /* 0 <= 0 */
4404 case OPC_BLEZL
: /* 0 <= 0 likely */
4406 ctx
->hflags
|= MIPS_HFLAG_B
;
4408 case OPC_BGEZAL
: /* 0 >= 0 */
4409 case OPC_BGEZALL
: /* 0 >= 0 likely */
4410 /* Always take and link */
4412 ctx
->hflags
|= MIPS_HFLAG_B
;
4414 case OPC_BNE
: /* rx != rx */
4415 case OPC_BGTZ
: /* 0 > 0 */
4416 case OPC_BLTZ
: /* 0 < 0 */
4419 case OPC_BLTZAL
: /* 0 < 0 */
4420 /* Handle as an unconditional branch to get correct delay
4423 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4424 ctx
->hflags
|= MIPS_HFLAG_B
;
4426 case OPC_BLTZALL
: /* 0 < 0 likely */
4427 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4428 /* Skip the instruction in the delay slot */
4431 case OPC_BNEL
: /* rx != rx likely */
4432 case OPC_BGTZL
: /* 0 > 0 likely */
4433 case OPC_BLTZL
: /* 0 < 0 likely */
4434 /* Skip the instruction in the delay slot */
4438 ctx
->hflags
|= MIPS_HFLAG_B
;
4441 ctx
->hflags
|= MIPS_HFLAG_BX
;
4445 ctx
->hflags
|= MIPS_HFLAG_B
;
4448 ctx
->hflags
|= MIPS_HFLAG_BR
;
4452 ctx
->hflags
|= MIPS_HFLAG_BR
;
4455 MIPS_INVAL("branch/jump");
4456 generate_exception_end(ctx
, EXCP_RI
);
4462 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4465 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4468 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4471 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4474 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4477 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4480 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4484 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4488 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4491 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4494 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4497 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4500 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4503 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4506 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4508 #if defined(TARGET_MIPS64)
4510 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4514 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4517 ctx
->hflags
|= MIPS_HFLAG_BC
;
4520 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4523 ctx
->hflags
|= MIPS_HFLAG_BL
;
4526 MIPS_INVAL("conditional branch/jump");
4527 generate_exception_end(ctx
, EXCP_RI
);
4532 ctx
->btarget
= btgt
;
4534 switch (delayslot_size
) {
4536 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4539 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4544 int post_delay
= insn_bytes
+ delayslot_size
;
4545 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4547 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4551 if (insn_bytes
== 2)
4552 ctx
->hflags
|= MIPS_HFLAG_B16
;
4557 /* special3 bitfield operations */
4558 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4559 int rs
, int lsb
, int msb
)
4561 TCGv t0
= tcg_temp_new();
4562 TCGv t1
= tcg_temp_new();
4564 gen_load_gpr(t1
, rs
);
4567 if (lsb
+ msb
> 31) {
4571 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4573 /* The two checks together imply that lsb == 0,
4574 so this is a simple sign-extension. */
4575 tcg_gen_ext32s_tl(t0
, t1
);
4578 #if defined(TARGET_MIPS64)
4587 if (lsb
+ msb
> 63) {
4590 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4597 gen_load_gpr(t0
, rt
);
4598 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4599 tcg_gen_ext32s_tl(t0
, t0
);
4601 #if defined(TARGET_MIPS64)
4612 gen_load_gpr(t0
, rt
);
4613 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4618 MIPS_INVAL("bitops");
4619 generate_exception_end(ctx
, EXCP_RI
);
4624 gen_store_gpr(t0
, rt
);
4629 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4634 /* If no destination, treat it as a NOP. */
4638 t0
= tcg_temp_new();
4639 gen_load_gpr(t0
, rt
);
4643 TCGv t1
= tcg_temp_new();
4644 TCGv t2
= tcg_const_tl(0x00FF00FF);
4646 tcg_gen_shri_tl(t1
, t0
, 8);
4647 tcg_gen_and_tl(t1
, t1
, t2
);
4648 tcg_gen_and_tl(t0
, t0
, t2
);
4649 tcg_gen_shli_tl(t0
, t0
, 8);
4650 tcg_gen_or_tl(t0
, t0
, t1
);
4653 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4657 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4660 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4662 #if defined(TARGET_MIPS64)
4665 TCGv t1
= tcg_temp_new();
4666 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4668 tcg_gen_shri_tl(t1
, t0
, 8);
4669 tcg_gen_and_tl(t1
, t1
, t2
);
4670 tcg_gen_and_tl(t0
, t0
, t2
);
4671 tcg_gen_shli_tl(t0
, t0
, 8);
4672 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4679 TCGv t1
= tcg_temp_new();
4680 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4682 tcg_gen_shri_tl(t1
, t0
, 16);
4683 tcg_gen_and_tl(t1
, t1
, t2
);
4684 tcg_gen_and_tl(t0
, t0
, t2
);
4685 tcg_gen_shli_tl(t0
, t0
, 16);
4686 tcg_gen_or_tl(t0
, t0
, t1
);
4687 tcg_gen_shri_tl(t1
, t0
, 32);
4688 tcg_gen_shli_tl(t0
, t0
, 32);
4689 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4696 MIPS_INVAL("bsfhl");
4697 generate_exception_end(ctx
, EXCP_RI
);
4704 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4713 t0
= tcg_temp_new();
4714 t1
= tcg_temp_new();
4715 gen_load_gpr(t0
, rs
);
4716 gen_load_gpr(t1
, rt
);
4717 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4718 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4719 if (opc
== OPC_LSA
) {
4720 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4729 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4737 t0
= tcg_temp_new();
4738 gen_load_gpr(t0
, rt
);
4742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4744 #if defined(TARGET_MIPS64)
4746 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4751 TCGv t1
= tcg_temp_new();
4752 gen_load_gpr(t1
, rs
);
4756 TCGv_i64 t2
= tcg_temp_new_i64();
4757 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4758 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4759 gen_move_low32(cpu_gpr
[rd
], t2
);
4760 tcg_temp_free_i64(t2
);
4763 #if defined(TARGET_MIPS64)
4765 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4766 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4767 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4777 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4784 t0
= tcg_temp_new();
4785 gen_load_gpr(t0
, rt
);
4788 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4790 #if defined(TARGET_MIPS64)
4792 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4799 #ifndef CONFIG_USER_ONLY
4800 /* CP0 (MMU and control) */
4801 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4803 TCGv_i64 t0
= tcg_temp_new_i64();
4804 TCGv_i64 t1
= tcg_temp_new_i64();
4806 tcg_gen_ext_tl_i64(t0
, arg
);
4807 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4808 #if defined(TARGET_MIPS64)
4809 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4811 tcg_gen_concat32_i64(t1
, t1
, t0
);
4813 tcg_gen_st_i64(t1
, cpu_env
, off
);
4814 tcg_temp_free_i64(t1
);
4815 tcg_temp_free_i64(t0
);
4818 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4820 TCGv_i64 t0
= tcg_temp_new_i64();
4821 TCGv_i64 t1
= tcg_temp_new_i64();
4823 tcg_gen_ext_tl_i64(t0
, arg
);
4824 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4825 tcg_gen_concat32_i64(t1
, t1
, t0
);
4826 tcg_gen_st_i64(t1
, cpu_env
, off
);
4827 tcg_temp_free_i64(t1
);
4828 tcg_temp_free_i64(t0
);
4831 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4833 TCGv_i64 t0
= tcg_temp_new_i64();
4835 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4836 #if defined(TARGET_MIPS64)
4837 tcg_gen_shri_i64(t0
, t0
, 30);
4839 tcg_gen_shri_i64(t0
, t0
, 32);
4841 gen_move_low32(arg
, t0
);
4842 tcg_temp_free_i64(t0
);
4845 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4847 TCGv_i64 t0
= tcg_temp_new_i64();
4849 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4850 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4851 gen_move_low32(arg
, t0
);
4852 tcg_temp_free_i64(t0
);
4855 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4857 TCGv_i32 t0
= tcg_temp_new_i32();
4859 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4860 tcg_gen_ext_i32_tl(arg
, t0
);
4861 tcg_temp_free_i32(t0
);
4864 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4866 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4867 tcg_gen_ext32s_tl(arg
, arg
);
4870 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4872 TCGv_i32 t0
= tcg_temp_new_i32();
4874 tcg_gen_trunc_tl_i32(t0
, arg
);
4875 tcg_gen_st_i32(t0
, cpu_env
, off
);
4876 tcg_temp_free_i32(t0
);
4879 #define CP0_CHECK(c) \
4882 goto cp0_unimplemented; \
4886 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4888 const char *rn
= "invalid";
4890 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4896 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4900 goto cp0_unimplemented
;
4906 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4910 goto cp0_unimplemented
;
4916 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4917 ctx
->CP0_LLAddr_shift
);
4921 CP0_CHECK(ctx
->mrp
);
4922 gen_helper_mfhc0_maar(arg
, cpu_env
);
4926 goto cp0_unimplemented
;
4935 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4939 goto cp0_unimplemented
;
4943 goto cp0_unimplemented
;
4945 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4949 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4950 tcg_gen_movi_tl(arg
, 0);
4953 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4955 const char *rn
= "invalid";
4956 uint64_t mask
= ctx
->PAMask
>> 36;
4958 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4964 tcg_gen_andi_tl(arg
, arg
, mask
);
4965 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4969 goto cp0_unimplemented
;
4975 tcg_gen_andi_tl(arg
, arg
, mask
);
4976 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4980 goto cp0_unimplemented
;
4986 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4987 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4988 relevant for modern MIPS cores supporting MTHC0, therefore
4989 treating MTHC0 to LLAddr as NOP. */
4993 CP0_CHECK(ctx
->mrp
);
4994 gen_helper_mthc0_maar(cpu_env
, arg
);
4998 goto cp0_unimplemented
;
5007 tcg_gen_andi_tl(arg
, arg
, mask
);
5008 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5012 goto cp0_unimplemented
;
5016 goto cp0_unimplemented
;
5018 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5021 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5024 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5026 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5027 tcg_gen_movi_tl(arg
, 0);
5029 tcg_gen_movi_tl(arg
, ~0);
5033 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5035 const char *rn
= "invalid";
5038 check_insn(ctx
, ISA_MIPS32
);
5044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5049 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5053 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5054 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5058 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5059 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5064 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5068 goto cp0_unimplemented
;
5074 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5075 gen_helper_mfc0_random(arg
, cpu_env
);
5079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5080 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5085 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5095 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5100 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5105 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5106 rn
= "VPEScheFBack";
5109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5110 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5114 goto cp0_unimplemented
;
5121 TCGv_i64 tmp
= tcg_temp_new_i64();
5122 tcg_gen_ld_i64(tmp
, cpu_env
,
5123 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5124 #if defined(TARGET_MIPS64)
5126 /* Move RI/XI fields to bits 31:30 */
5127 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5128 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5131 gen_move_low32(arg
, tmp
);
5132 tcg_temp_free_i64(tmp
);
5137 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5138 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5142 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5143 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5147 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5148 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5152 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5153 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5157 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5158 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5163 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5168 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5172 goto cp0_unimplemented
;
5179 TCGv_i64 tmp
= tcg_temp_new_i64();
5180 tcg_gen_ld_i64(tmp
, cpu_env
,
5181 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5182 #if defined(TARGET_MIPS64)
5184 /* Move RI/XI fields to bits 31:30 */
5185 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5186 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5189 gen_move_low32(arg
, tmp
);
5190 tcg_temp_free_i64(tmp
);
5196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5197 rn
= "GlobalNumber";
5200 goto cp0_unimplemented
;
5206 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5207 tcg_gen_ext32s_tl(arg
, arg
);
5211 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5212 rn
= "ContextConfig";
5213 goto cp0_unimplemented
;
5215 CP0_CHECK(ctx
->ulri
);
5216 tcg_gen_ld_tl(arg
, cpu_env
,
5217 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5218 tcg_gen_ext32s_tl(arg
, arg
);
5222 goto cp0_unimplemented
;
5228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5232 check_insn(ctx
, ISA_MIPS32R2
);
5233 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5238 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5239 tcg_gen_ext32s_tl(arg
, arg
);
5244 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5245 tcg_gen_ext32s_tl(arg
, arg
);
5250 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5251 tcg_gen_ext32s_tl(arg
, arg
);
5255 goto cp0_unimplemented
;
5261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5265 check_insn(ctx
, ISA_MIPS32R2
);
5266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5270 check_insn(ctx
, ISA_MIPS32R2
);
5271 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5275 check_insn(ctx
, ISA_MIPS32R2
);
5276 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5280 check_insn(ctx
, ISA_MIPS32R2
);
5281 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5285 check_insn(ctx
, ISA_MIPS32R2
);
5286 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5290 goto cp0_unimplemented
;
5296 check_insn(ctx
, ISA_MIPS32R2
);
5297 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5301 goto cp0_unimplemented
;
5307 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5308 tcg_gen_ext32s_tl(arg
, arg
);
5313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5318 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5322 goto cp0_unimplemented
;
5328 /* Mark as an IO operation because we read the time. */
5329 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
5332 gen_helper_mfc0_count(arg
, cpu_env
);
5333 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
5336 /* Break the TB to be able to take timer interrupts immediately
5337 after reading count. BS_STOP isn't sufficient, we need to ensure
5338 we break completely out of translated code. */
5339 gen_save_pc(ctx
->pc
+ 4);
5340 ctx
->bstate
= BS_EXCP
;
5343 /* 6,7 are implementation dependent */
5345 goto cp0_unimplemented
;
5351 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5352 tcg_gen_ext32s_tl(arg
, arg
);
5356 goto cp0_unimplemented
;
5362 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5365 /* 6,7 are implementation dependent */
5367 goto cp0_unimplemented
;
5373 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5377 check_insn(ctx
, ISA_MIPS32R2
);
5378 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5382 check_insn(ctx
, ISA_MIPS32R2
);
5383 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5387 check_insn(ctx
, ISA_MIPS32R2
);
5388 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5392 goto cp0_unimplemented
;
5398 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5402 goto cp0_unimplemented
;
5408 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5409 tcg_gen_ext32s_tl(arg
, arg
);
5413 goto cp0_unimplemented
;
5419 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5423 check_insn(ctx
, ISA_MIPS32R2
);
5424 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5425 tcg_gen_ext32s_tl(arg
, arg
);
5429 check_insn(ctx
, ISA_MIPS32R2
);
5430 CP0_CHECK(ctx
->cmgcr
);
5431 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5432 tcg_gen_ext32s_tl(arg
, arg
);
5436 goto cp0_unimplemented
;
5442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5446 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5454 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5458 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5462 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5465 /* 6,7 are implementation dependent */
5467 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5475 goto cp0_unimplemented
;
5481 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5485 CP0_CHECK(ctx
->mrp
);
5486 gen_helper_mfc0_maar(arg
, cpu_env
);
5490 CP0_CHECK(ctx
->mrp
);
5491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5495 goto cp0_unimplemented
;
5501 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5505 goto cp0_unimplemented
;
5511 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5515 goto cp0_unimplemented
;
5521 #if defined(TARGET_MIPS64)
5522 check_insn(ctx
, ISA_MIPS3
);
5523 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5524 tcg_gen_ext32s_tl(arg
, arg
);
5529 goto cp0_unimplemented
;
5533 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5534 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5541 goto cp0_unimplemented
;
5545 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5546 rn
= "'Diagnostic"; /* implementation dependent */
5551 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5555 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5556 rn
= "TraceControl";
5557 goto cp0_unimplemented
;
5559 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5560 rn
= "TraceControl2";
5561 goto cp0_unimplemented
;
5563 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5564 rn
= "UserTraceData";
5565 goto cp0_unimplemented
;
5567 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5569 goto cp0_unimplemented
;
5571 goto cp0_unimplemented
;
5578 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5579 tcg_gen_ext32s_tl(arg
, arg
);
5583 goto cp0_unimplemented
;
5589 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5590 rn
= "Performance0";
5593 // gen_helper_mfc0_performance1(arg);
5594 rn
= "Performance1";
5595 goto cp0_unimplemented
;
5597 // gen_helper_mfc0_performance2(arg);
5598 rn
= "Performance2";
5599 goto cp0_unimplemented
;
5601 // gen_helper_mfc0_performance3(arg);
5602 rn
= "Performance3";
5603 goto cp0_unimplemented
;
5605 // gen_helper_mfc0_performance4(arg);
5606 rn
= "Performance4";
5607 goto cp0_unimplemented
;
5609 // gen_helper_mfc0_performance5(arg);
5610 rn
= "Performance5";
5611 goto cp0_unimplemented
;
5613 // gen_helper_mfc0_performance6(arg);
5614 rn
= "Performance6";
5615 goto cp0_unimplemented
;
5617 // gen_helper_mfc0_performance7(arg);
5618 rn
= "Performance7";
5619 goto cp0_unimplemented
;
5621 goto cp0_unimplemented
;
5627 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5631 goto cp0_unimplemented
;
5637 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5641 goto cp0_unimplemented
;
5651 TCGv_i64 tmp
= tcg_temp_new_i64();
5652 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5653 gen_move_low32(arg
, tmp
);
5654 tcg_temp_free_i64(tmp
);
5662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5666 goto cp0_unimplemented
;
5675 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5682 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5686 goto cp0_unimplemented
;
5692 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5693 tcg_gen_ext32s_tl(arg
, arg
);
5697 goto cp0_unimplemented
;
5704 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5708 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5709 tcg_gen_ld_tl(arg
, cpu_env
,
5710 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5711 tcg_gen_ext32s_tl(arg
, arg
);
5715 goto cp0_unimplemented
;
5719 goto cp0_unimplemented
;
5721 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5725 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5726 gen_mfc0_unimplemented(ctx
, arg
);
5729 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5731 const char *rn
= "invalid";
5734 check_insn(ctx
, ISA_MIPS32
);
5736 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
5744 gen_helper_mtc0_index(cpu_env
, arg
);
5748 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5749 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5753 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5758 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5768 goto cp0_unimplemented
;
5778 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5779 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5783 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5784 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5788 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5789 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5793 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5794 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5798 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5799 tcg_gen_st_tl(arg
, cpu_env
,
5800 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5804 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5805 tcg_gen_st_tl(arg
, cpu_env
,
5806 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5807 rn
= "VPEScheFBack";
5810 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5811 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5815 goto cp0_unimplemented
;
5821 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5825 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5826 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5830 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5831 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5835 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5836 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5840 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5841 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5845 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5846 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5850 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5851 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5855 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5856 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5860 goto cp0_unimplemented
;
5866 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5872 rn
= "GlobalNumber";
5875 goto cp0_unimplemented
;
5881 gen_helper_mtc0_context(cpu_env
, arg
);
5885 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5886 rn
= "ContextConfig";
5887 goto cp0_unimplemented
;
5889 CP0_CHECK(ctx
->ulri
);
5890 tcg_gen_st_tl(arg
, cpu_env
,
5891 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5895 goto cp0_unimplemented
;
5901 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5905 check_insn(ctx
, ISA_MIPS32R2
);
5906 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5908 ctx
->bstate
= BS_STOP
;
5912 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5917 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5922 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5926 goto cp0_unimplemented
;
5932 gen_helper_mtc0_wired(cpu_env
, arg
);
5936 check_insn(ctx
, ISA_MIPS32R2
);
5937 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5941 check_insn(ctx
, ISA_MIPS32R2
);
5942 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5946 check_insn(ctx
, ISA_MIPS32R2
);
5947 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5951 check_insn(ctx
, ISA_MIPS32R2
);
5952 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5956 check_insn(ctx
, ISA_MIPS32R2
);
5957 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5961 goto cp0_unimplemented
;
5967 check_insn(ctx
, ISA_MIPS32R2
);
5968 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5969 ctx
->bstate
= BS_STOP
;
5973 goto cp0_unimplemented
;
5991 goto cp0_unimplemented
;
5997 gen_helper_mtc0_count(cpu_env
, arg
);
6000 /* 6,7 are implementation dependent */
6002 goto cp0_unimplemented
;
6008 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6012 goto cp0_unimplemented
;
6018 gen_helper_mtc0_compare(cpu_env
, arg
);
6021 /* 6,7 are implementation dependent */
6023 goto cp0_unimplemented
;
6029 save_cpu_state(ctx
, 1);
6030 gen_helper_mtc0_status(cpu_env
, arg
);
6031 /* BS_STOP isn't good enough here, hflags may have changed. */
6032 gen_save_pc(ctx
->pc
+ 4);
6033 ctx
->bstate
= BS_EXCP
;
6037 check_insn(ctx
, ISA_MIPS32R2
);
6038 gen_helper_mtc0_intctl(cpu_env
, arg
);
6039 /* Stop translation as we may have switched the execution mode */
6040 ctx
->bstate
= BS_STOP
;
6044 check_insn(ctx
, ISA_MIPS32R2
);
6045 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6046 /* Stop translation as we may have switched the execution mode */
6047 ctx
->bstate
= BS_STOP
;
6051 check_insn(ctx
, ISA_MIPS32R2
);
6052 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6053 /* Stop translation as we may have switched the execution mode */
6054 ctx
->bstate
= BS_STOP
;
6058 goto cp0_unimplemented
;
6064 save_cpu_state(ctx
, 1);
6065 gen_helper_mtc0_cause(cpu_env
, arg
);
6066 /* Stop translation as we may have triggered an interrupt. BS_STOP
6067 * isn't sufficient, we need to ensure we break out of translated
6068 * code to check for pending interrupts. */
6069 gen_save_pc(ctx
->pc
+ 4);
6070 ctx
->bstate
= BS_EXCP
;
6074 goto cp0_unimplemented
;
6080 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6084 goto cp0_unimplemented
;
6094 check_insn(ctx
, ISA_MIPS32R2
);
6095 gen_helper_mtc0_ebase(cpu_env
, arg
);
6099 goto cp0_unimplemented
;
6105 gen_helper_mtc0_config0(cpu_env
, arg
);
6107 /* Stop translation as we may have switched the execution mode */
6108 ctx
->bstate
= BS_STOP
;
6111 /* ignored, read only */
6115 gen_helper_mtc0_config2(cpu_env
, arg
);
6117 /* Stop translation as we may have switched the execution mode */
6118 ctx
->bstate
= BS_STOP
;
6121 gen_helper_mtc0_config3(cpu_env
, arg
);
6123 /* Stop translation as we may have switched the execution mode */
6124 ctx
->bstate
= BS_STOP
;
6127 gen_helper_mtc0_config4(cpu_env
, arg
);
6129 ctx
->bstate
= BS_STOP
;
6132 gen_helper_mtc0_config5(cpu_env
, arg
);
6134 /* Stop translation as we may have switched the execution mode */
6135 ctx
->bstate
= BS_STOP
;
6137 /* 6,7 are implementation dependent */
6147 rn
= "Invalid config selector";
6148 goto cp0_unimplemented
;
6154 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6158 CP0_CHECK(ctx
->mrp
);
6159 gen_helper_mtc0_maar(cpu_env
, arg
);
6163 CP0_CHECK(ctx
->mrp
);
6164 gen_helper_mtc0_maari(cpu_env
, arg
);
6168 goto cp0_unimplemented
;
6174 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6178 goto cp0_unimplemented
;
6184 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6188 goto cp0_unimplemented
;
6194 #if defined(TARGET_MIPS64)
6195 check_insn(ctx
, ISA_MIPS3
);
6196 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6201 goto cp0_unimplemented
;
6205 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6206 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6209 gen_helper_mtc0_framemask(cpu_env
, arg
);
6213 goto cp0_unimplemented
;
6218 rn
= "Diagnostic"; /* implementation dependent */
6223 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6224 /* BS_STOP isn't good enough here, hflags may have changed. */
6225 gen_save_pc(ctx
->pc
+ 4);
6226 ctx
->bstate
= BS_EXCP
;
6230 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6231 rn
= "TraceControl";
6232 /* Stop translation as we may have switched the execution mode */
6233 ctx
->bstate
= BS_STOP
;
6234 goto cp0_unimplemented
;
6236 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6237 rn
= "TraceControl2";
6238 /* Stop translation as we may have switched the execution mode */
6239 ctx
->bstate
= BS_STOP
;
6240 goto cp0_unimplemented
;
6242 /* Stop translation as we may have switched the execution mode */
6243 ctx
->bstate
= BS_STOP
;
6244 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6245 rn
= "UserTraceData";
6246 /* Stop translation as we may have switched the execution mode */
6247 ctx
->bstate
= BS_STOP
;
6248 goto cp0_unimplemented
;
6250 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6251 /* Stop translation as we may have switched the execution mode */
6252 ctx
->bstate
= BS_STOP
;
6254 goto cp0_unimplemented
;
6256 goto cp0_unimplemented
;
6263 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6267 goto cp0_unimplemented
;
6273 gen_helper_mtc0_performance0(cpu_env
, arg
);
6274 rn
= "Performance0";
6277 // gen_helper_mtc0_performance1(arg);
6278 rn
= "Performance1";
6279 goto cp0_unimplemented
;
6281 // gen_helper_mtc0_performance2(arg);
6282 rn
= "Performance2";
6283 goto cp0_unimplemented
;
6285 // gen_helper_mtc0_performance3(arg);
6286 rn
= "Performance3";
6287 goto cp0_unimplemented
;
6289 // gen_helper_mtc0_performance4(arg);
6290 rn
= "Performance4";
6291 goto cp0_unimplemented
;
6293 // gen_helper_mtc0_performance5(arg);
6294 rn
= "Performance5";
6295 goto cp0_unimplemented
;
6297 // gen_helper_mtc0_performance6(arg);
6298 rn
= "Performance6";
6299 goto cp0_unimplemented
;
6301 // gen_helper_mtc0_performance7(arg);
6302 rn
= "Performance7";
6303 goto cp0_unimplemented
;
6305 goto cp0_unimplemented
;
6311 gen_helper_mtc0_errctl(cpu_env
, arg
);
6312 ctx
->bstate
= BS_STOP
;
6316 goto cp0_unimplemented
;
6326 goto cp0_unimplemented
;
6335 gen_helper_mtc0_taglo(cpu_env
, arg
);
6342 gen_helper_mtc0_datalo(cpu_env
, arg
);
6346 goto cp0_unimplemented
;
6355 gen_helper_mtc0_taghi(cpu_env
, arg
);
6362 gen_helper_mtc0_datahi(cpu_env
, arg
);
6367 goto cp0_unimplemented
;
6373 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6377 goto cp0_unimplemented
;
6384 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6388 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6389 tcg_gen_st_tl(arg
, cpu_env
,
6390 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6394 goto cp0_unimplemented
;
6398 goto cp0_unimplemented
;
6400 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6402 /* For simplicity assume that all writes can cause interrupts. */
6403 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
6405 /* BS_STOP isn't sufficient, we need to ensure we break out of
6406 * translated code to check for pending interrupts. */
6407 gen_save_pc(ctx
->pc
+ 4);
6408 ctx
->bstate
= BS_EXCP
;
6413 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6416 #if defined(TARGET_MIPS64)
6417 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6419 const char *rn
= "invalid";
6422 check_insn(ctx
, ISA_MIPS64
);
6428 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6432 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6433 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6437 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6438 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6442 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6443 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6448 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6452 goto cp0_unimplemented
;
6458 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6459 gen_helper_mfc0_random(arg
, cpu_env
);
6463 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6464 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6468 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6473 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6474 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6478 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6479 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6483 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6484 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6488 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6489 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6490 rn
= "VPEScheFBack";
6493 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6494 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6498 goto cp0_unimplemented
;
6504 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6508 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6509 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6513 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6514 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6518 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6519 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6523 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6524 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6528 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6529 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6533 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6534 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6538 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6539 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6543 goto cp0_unimplemented
;
6549 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6555 rn
= "GlobalNumber";
6558 goto cp0_unimplemented
;
6564 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6568 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6569 rn
= "ContextConfig";
6570 goto cp0_unimplemented
;
6572 CP0_CHECK(ctx
->ulri
);
6573 tcg_gen_ld_tl(arg
, cpu_env
,
6574 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6578 goto cp0_unimplemented
;
6584 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6588 check_insn(ctx
, ISA_MIPS32R2
);
6589 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6594 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6599 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6604 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6608 goto cp0_unimplemented
;
6614 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6618 check_insn(ctx
, ISA_MIPS32R2
);
6619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6623 check_insn(ctx
, ISA_MIPS32R2
);
6624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6628 check_insn(ctx
, ISA_MIPS32R2
);
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6633 check_insn(ctx
, ISA_MIPS32R2
);
6634 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6638 check_insn(ctx
, ISA_MIPS32R2
);
6639 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6643 goto cp0_unimplemented
;
6649 check_insn(ctx
, ISA_MIPS32R2
);
6650 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6654 goto cp0_unimplemented
;
6660 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6674 goto cp0_unimplemented
;
6680 /* Mark as an IO operation because we read the time. */
6681 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
6684 gen_helper_mfc0_count(arg
, cpu_env
);
6685 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
6688 /* Break the TB to be able to take timer interrupts immediately
6689 after reading count. BS_STOP isn't sufficient, we need to ensure
6690 we break completely out of translated code. */
6691 gen_save_pc(ctx
->pc
+ 4);
6692 ctx
->bstate
= BS_EXCP
;
6695 /* 6,7 are implementation dependent */
6697 goto cp0_unimplemented
;
6703 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6707 goto cp0_unimplemented
;
6713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6716 /* 6,7 are implementation dependent */
6718 goto cp0_unimplemented
;
6724 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6728 check_insn(ctx
, ISA_MIPS32R2
);
6729 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6733 check_insn(ctx
, ISA_MIPS32R2
);
6734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6738 check_insn(ctx
, ISA_MIPS32R2
);
6739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6743 goto cp0_unimplemented
;
6749 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6753 goto cp0_unimplemented
;
6759 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6763 goto cp0_unimplemented
;
6769 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6773 check_insn(ctx
, ISA_MIPS32R2
);
6774 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6778 check_insn(ctx
, ISA_MIPS32R2
);
6779 CP0_CHECK(ctx
->cmgcr
);
6780 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6784 goto cp0_unimplemented
;
6790 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6794 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6802 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6806 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6810 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6813 /* 6,7 are implementation dependent */
6815 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6819 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6823 goto cp0_unimplemented
;
6829 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6833 CP0_CHECK(ctx
->mrp
);
6834 gen_helper_dmfc0_maar(arg
, cpu_env
);
6838 CP0_CHECK(ctx
->mrp
);
6839 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6843 goto cp0_unimplemented
;
6849 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6853 goto cp0_unimplemented
;
6859 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6863 goto cp0_unimplemented
;
6869 check_insn(ctx
, ISA_MIPS3
);
6870 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6874 goto cp0_unimplemented
;
6878 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6879 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6882 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6886 goto cp0_unimplemented
;
6890 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6891 rn
= "'Diagnostic"; /* implementation dependent */
6896 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6900 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6901 rn
= "TraceControl";
6902 goto cp0_unimplemented
;
6904 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6905 rn
= "TraceControl2";
6906 goto cp0_unimplemented
;
6908 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6909 rn
= "UserTraceData";
6910 goto cp0_unimplemented
;
6912 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6914 goto cp0_unimplemented
;
6916 goto cp0_unimplemented
;
6923 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6927 goto cp0_unimplemented
;
6933 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6934 rn
= "Performance0";
6937 // gen_helper_dmfc0_performance1(arg);
6938 rn
= "Performance1";
6939 goto cp0_unimplemented
;
6941 // gen_helper_dmfc0_performance2(arg);
6942 rn
= "Performance2";
6943 goto cp0_unimplemented
;
6945 // gen_helper_dmfc0_performance3(arg);
6946 rn
= "Performance3";
6947 goto cp0_unimplemented
;
6949 // gen_helper_dmfc0_performance4(arg);
6950 rn
= "Performance4";
6951 goto cp0_unimplemented
;
6953 // gen_helper_dmfc0_performance5(arg);
6954 rn
= "Performance5";
6955 goto cp0_unimplemented
;
6957 // gen_helper_dmfc0_performance6(arg);
6958 rn
= "Performance6";
6959 goto cp0_unimplemented
;
6961 // gen_helper_dmfc0_performance7(arg);
6962 rn
= "Performance7";
6963 goto cp0_unimplemented
;
6965 goto cp0_unimplemented
;
6971 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6975 goto cp0_unimplemented
;
6982 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6986 goto cp0_unimplemented
;
6995 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7006 goto cp0_unimplemented
;
7015 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7022 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7026 goto cp0_unimplemented
;
7032 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7036 goto cp0_unimplemented
;
7043 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7047 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7048 tcg_gen_ld_tl(arg
, cpu_env
,
7049 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7053 goto cp0_unimplemented
;
7057 goto cp0_unimplemented
;
7059 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7063 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7064 gen_mfc0_unimplemented(ctx
, arg
);
7067 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7069 const char *rn
= "invalid";
7072 check_insn(ctx
, ISA_MIPS64
);
7074 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
7082 gen_helper_mtc0_index(cpu_env
, arg
);
7086 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7087 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7091 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7096 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7106 goto cp0_unimplemented
;
7116 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7117 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7121 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7122 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7126 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7127 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7131 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7132 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7136 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7137 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7141 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7142 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7143 rn
= "VPEScheFBack";
7146 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7147 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7151 goto cp0_unimplemented
;
7157 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7161 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7162 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7166 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7167 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7171 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7172 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7176 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7177 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7181 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7182 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7186 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7187 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7191 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7192 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7196 goto cp0_unimplemented
;
7202 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7208 rn
= "GlobalNumber";
7211 goto cp0_unimplemented
;
7217 gen_helper_mtc0_context(cpu_env
, arg
);
7221 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7222 rn
= "ContextConfig";
7223 goto cp0_unimplemented
;
7225 CP0_CHECK(ctx
->ulri
);
7226 tcg_gen_st_tl(arg
, cpu_env
,
7227 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7231 goto cp0_unimplemented
;
7237 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7241 check_insn(ctx
, ISA_MIPS32R2
);
7242 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7247 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7252 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7257 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7261 goto cp0_unimplemented
;
7267 gen_helper_mtc0_wired(cpu_env
, arg
);
7271 check_insn(ctx
, ISA_MIPS32R2
);
7272 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7276 check_insn(ctx
, ISA_MIPS32R2
);
7277 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7281 check_insn(ctx
, ISA_MIPS32R2
);
7282 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7286 check_insn(ctx
, ISA_MIPS32R2
);
7287 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7291 check_insn(ctx
, ISA_MIPS32R2
);
7292 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7296 goto cp0_unimplemented
;
7302 check_insn(ctx
, ISA_MIPS32R2
);
7303 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7304 ctx
->bstate
= BS_STOP
;
7308 goto cp0_unimplemented
;
7326 goto cp0_unimplemented
;
7332 gen_helper_mtc0_count(cpu_env
, arg
);
7335 /* 6,7 are implementation dependent */
7337 goto cp0_unimplemented
;
7339 /* Stop translation as we may have switched the execution mode */
7340 ctx
->bstate
= BS_STOP
;
7345 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7349 goto cp0_unimplemented
;
7355 gen_helper_mtc0_compare(cpu_env
, arg
);
7358 /* 6,7 are implementation dependent */
7360 goto cp0_unimplemented
;
7362 /* Stop translation as we may have switched the execution mode */
7363 ctx
->bstate
= BS_STOP
;
7368 save_cpu_state(ctx
, 1);
7369 gen_helper_mtc0_status(cpu_env
, arg
);
7370 /* BS_STOP isn't good enough here, hflags may have changed. */
7371 gen_save_pc(ctx
->pc
+ 4);
7372 ctx
->bstate
= BS_EXCP
;
7376 check_insn(ctx
, ISA_MIPS32R2
);
7377 gen_helper_mtc0_intctl(cpu_env
, arg
);
7378 /* Stop translation as we may have switched the execution mode */
7379 ctx
->bstate
= BS_STOP
;
7383 check_insn(ctx
, ISA_MIPS32R2
);
7384 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7385 /* Stop translation as we may have switched the execution mode */
7386 ctx
->bstate
= BS_STOP
;
7390 check_insn(ctx
, ISA_MIPS32R2
);
7391 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7392 /* Stop translation as we may have switched the execution mode */
7393 ctx
->bstate
= BS_STOP
;
7397 goto cp0_unimplemented
;
7403 save_cpu_state(ctx
, 1);
7404 gen_helper_mtc0_cause(cpu_env
, arg
);
7405 /* Stop translation as we may have triggered an intetrupt. BS_STOP
7406 * isn't sufficient, we need to ensure we break out of translated
7407 * code to check for pending interrupts. */
7408 gen_save_pc(ctx
->pc
+ 4);
7409 ctx
->bstate
= BS_EXCP
;
7413 goto cp0_unimplemented
;
7419 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7423 goto cp0_unimplemented
;
7433 check_insn(ctx
, ISA_MIPS32R2
);
7434 gen_helper_mtc0_ebase(cpu_env
, arg
);
7438 goto cp0_unimplemented
;
7444 gen_helper_mtc0_config0(cpu_env
, arg
);
7446 /* Stop translation as we may have switched the execution mode */
7447 ctx
->bstate
= BS_STOP
;
7450 /* ignored, read only */
7454 gen_helper_mtc0_config2(cpu_env
, arg
);
7456 /* Stop translation as we may have switched the execution mode */
7457 ctx
->bstate
= BS_STOP
;
7460 gen_helper_mtc0_config3(cpu_env
, arg
);
7462 /* Stop translation as we may have switched the execution mode */
7463 ctx
->bstate
= BS_STOP
;
7466 /* currently ignored */
7470 gen_helper_mtc0_config5(cpu_env
, arg
);
7472 /* Stop translation as we may have switched the execution mode */
7473 ctx
->bstate
= BS_STOP
;
7475 /* 6,7 are implementation dependent */
7477 rn
= "Invalid config selector";
7478 goto cp0_unimplemented
;
7484 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7488 CP0_CHECK(ctx
->mrp
);
7489 gen_helper_mtc0_maar(cpu_env
, arg
);
7493 CP0_CHECK(ctx
->mrp
);
7494 gen_helper_mtc0_maari(cpu_env
, arg
);
7498 goto cp0_unimplemented
;
7504 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7508 goto cp0_unimplemented
;
7514 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7518 goto cp0_unimplemented
;
7524 check_insn(ctx
, ISA_MIPS3
);
7525 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7529 goto cp0_unimplemented
;
7533 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7534 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7537 gen_helper_mtc0_framemask(cpu_env
, arg
);
7541 goto cp0_unimplemented
;
7546 rn
= "Diagnostic"; /* implementation dependent */
7551 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7552 /* BS_STOP isn't good enough here, hflags may have changed. */
7553 gen_save_pc(ctx
->pc
+ 4);
7554 ctx
->bstate
= BS_EXCP
;
7558 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7559 /* Stop translation as we may have switched the execution mode */
7560 ctx
->bstate
= BS_STOP
;
7561 rn
= "TraceControl";
7562 goto cp0_unimplemented
;
7564 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7565 /* Stop translation as we may have switched the execution mode */
7566 ctx
->bstate
= BS_STOP
;
7567 rn
= "TraceControl2";
7568 goto cp0_unimplemented
;
7570 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7571 /* Stop translation as we may have switched the execution mode */
7572 ctx
->bstate
= BS_STOP
;
7573 rn
= "UserTraceData";
7574 goto cp0_unimplemented
;
7576 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7577 /* Stop translation as we may have switched the execution mode */
7578 ctx
->bstate
= BS_STOP
;
7580 goto cp0_unimplemented
;
7582 goto cp0_unimplemented
;
7589 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7593 goto cp0_unimplemented
;
7599 gen_helper_mtc0_performance0(cpu_env
, arg
);
7600 rn
= "Performance0";
7603 // gen_helper_mtc0_performance1(cpu_env, arg);
7604 rn
= "Performance1";
7605 goto cp0_unimplemented
;
7607 // gen_helper_mtc0_performance2(cpu_env, arg);
7608 rn
= "Performance2";
7609 goto cp0_unimplemented
;
7611 // gen_helper_mtc0_performance3(cpu_env, arg);
7612 rn
= "Performance3";
7613 goto cp0_unimplemented
;
7615 // gen_helper_mtc0_performance4(cpu_env, arg);
7616 rn
= "Performance4";
7617 goto cp0_unimplemented
;
7619 // gen_helper_mtc0_performance5(cpu_env, arg);
7620 rn
= "Performance5";
7621 goto cp0_unimplemented
;
7623 // gen_helper_mtc0_performance6(cpu_env, arg);
7624 rn
= "Performance6";
7625 goto cp0_unimplemented
;
7627 // gen_helper_mtc0_performance7(cpu_env, arg);
7628 rn
= "Performance7";
7629 goto cp0_unimplemented
;
7631 goto cp0_unimplemented
;
7637 gen_helper_mtc0_errctl(cpu_env
, arg
);
7638 ctx
->bstate
= BS_STOP
;
7642 goto cp0_unimplemented
;
7652 goto cp0_unimplemented
;
7661 gen_helper_mtc0_taglo(cpu_env
, arg
);
7668 gen_helper_mtc0_datalo(cpu_env
, arg
);
7672 goto cp0_unimplemented
;
7681 gen_helper_mtc0_taghi(cpu_env
, arg
);
7688 gen_helper_mtc0_datahi(cpu_env
, arg
);
7693 goto cp0_unimplemented
;
7699 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7703 goto cp0_unimplemented
;
7710 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7714 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7715 tcg_gen_st_tl(arg
, cpu_env
,
7716 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7720 goto cp0_unimplemented
;
7724 goto cp0_unimplemented
;
7726 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7728 /* For simplicity assume that all writes can cause interrupts. */
7729 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
7731 /* BS_STOP isn't sufficient, we need to ensure we break out of
7732 * translated code to check for pending interrupts. */
7733 gen_save_pc(ctx
->pc
+ 4);
7734 ctx
->bstate
= BS_EXCP
;
7739 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7741 #endif /* TARGET_MIPS64 */
7743 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7744 int u
, int sel
, int h
)
7746 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7747 TCGv t0
= tcg_temp_local_new();
7749 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7750 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7751 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7752 tcg_gen_movi_tl(t0
, -1);
7753 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7754 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7755 tcg_gen_movi_tl(t0
, -1);
7761 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7764 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7774 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7777 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7780 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7783 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7786 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7789 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7792 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7795 gen_mfc0(ctx
, t0
, rt
, sel
);
7802 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7805 gen_mfc0(ctx
, t0
, rt
, sel
);
7811 gen_helper_mftc0_status(t0
, cpu_env
);
7814 gen_mfc0(ctx
, t0
, rt
, sel
);
7820 gen_helper_mftc0_cause(t0
, cpu_env
);
7830 gen_helper_mftc0_epc(t0
, cpu_env
);
7840 gen_helper_mftc0_ebase(t0
, cpu_env
);
7850 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7860 gen_helper_mftc0_debug(t0
, cpu_env
);
7863 gen_mfc0(ctx
, t0
, rt
, sel
);
7868 gen_mfc0(ctx
, t0
, rt
, sel
);
7870 } else switch (sel
) {
7871 /* GPR registers. */
7873 gen_helper_1e0i(mftgpr
, t0
, rt
);
7875 /* Auxiliary CPU registers */
7879 gen_helper_1e0i(mftlo
, t0
, 0);
7882 gen_helper_1e0i(mfthi
, t0
, 0);
7885 gen_helper_1e0i(mftacx
, t0
, 0);
7888 gen_helper_1e0i(mftlo
, t0
, 1);
7891 gen_helper_1e0i(mfthi
, t0
, 1);
7894 gen_helper_1e0i(mftacx
, t0
, 1);
7897 gen_helper_1e0i(mftlo
, t0
, 2);
7900 gen_helper_1e0i(mfthi
, t0
, 2);
7903 gen_helper_1e0i(mftacx
, t0
, 2);
7906 gen_helper_1e0i(mftlo
, t0
, 3);
7909 gen_helper_1e0i(mfthi
, t0
, 3);
7912 gen_helper_1e0i(mftacx
, t0
, 3);
7915 gen_helper_mftdsp(t0
, cpu_env
);
7921 /* Floating point (COP1). */
7923 /* XXX: For now we support only a single FPU context. */
7925 TCGv_i32 fp0
= tcg_temp_new_i32();
7927 gen_load_fpr32(ctx
, fp0
, rt
);
7928 tcg_gen_ext_i32_tl(t0
, fp0
);
7929 tcg_temp_free_i32(fp0
);
7931 TCGv_i32 fp0
= tcg_temp_new_i32();
7933 gen_load_fpr32h(ctx
, fp0
, rt
);
7934 tcg_gen_ext_i32_tl(t0
, fp0
);
7935 tcg_temp_free_i32(fp0
);
7939 /* XXX: For now we support only a single FPU context. */
7940 gen_helper_1e0i(cfc1
, t0
, rt
);
7942 /* COP2: Not implemented. */
7949 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7950 gen_store_gpr(t0
, rd
);
7956 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7957 generate_exception_end(ctx
, EXCP_RI
);
7960 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7961 int u
, int sel
, int h
)
7963 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7964 TCGv t0
= tcg_temp_local_new();
7966 gen_load_gpr(t0
, rt
);
7967 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7968 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7969 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7971 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7972 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7979 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7982 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7992 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7995 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7998 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8001 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8004 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8007 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8010 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8013 gen_mtc0(ctx
, t0
, rd
, sel
);
8020 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8023 gen_mtc0(ctx
, t0
, rd
, sel
);
8029 gen_helper_mttc0_status(cpu_env
, t0
);
8032 gen_mtc0(ctx
, t0
, rd
, sel
);
8038 gen_helper_mttc0_cause(cpu_env
, t0
);
8048 gen_helper_mttc0_ebase(cpu_env
, t0
);
8058 gen_helper_mttc0_debug(cpu_env
, t0
);
8061 gen_mtc0(ctx
, t0
, rd
, sel
);
8066 gen_mtc0(ctx
, t0
, rd
, sel
);
8068 } else switch (sel
) {
8069 /* GPR registers. */
8071 gen_helper_0e1i(mttgpr
, t0
, rd
);
8073 /* Auxiliary CPU registers */
8077 gen_helper_0e1i(mttlo
, t0
, 0);
8080 gen_helper_0e1i(mtthi
, t0
, 0);
8083 gen_helper_0e1i(mttacx
, t0
, 0);
8086 gen_helper_0e1i(mttlo
, t0
, 1);
8089 gen_helper_0e1i(mtthi
, t0
, 1);
8092 gen_helper_0e1i(mttacx
, t0
, 1);
8095 gen_helper_0e1i(mttlo
, t0
, 2);
8098 gen_helper_0e1i(mtthi
, t0
, 2);
8101 gen_helper_0e1i(mttacx
, t0
, 2);
8104 gen_helper_0e1i(mttlo
, t0
, 3);
8107 gen_helper_0e1i(mtthi
, t0
, 3);
8110 gen_helper_0e1i(mttacx
, t0
, 3);
8113 gen_helper_mttdsp(cpu_env
, t0
);
8119 /* Floating point (COP1). */
8121 /* XXX: For now we support only a single FPU context. */
8123 TCGv_i32 fp0
= tcg_temp_new_i32();
8125 tcg_gen_trunc_tl_i32(fp0
, t0
);
8126 gen_store_fpr32(ctx
, fp0
, rd
);
8127 tcg_temp_free_i32(fp0
);
8129 TCGv_i32 fp0
= tcg_temp_new_i32();
8131 tcg_gen_trunc_tl_i32(fp0
, t0
);
8132 gen_store_fpr32h(ctx
, fp0
, rd
);
8133 tcg_temp_free_i32(fp0
);
8137 /* XXX: For now we support only a single FPU context. */
8139 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8141 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8142 tcg_temp_free_i32(fs_tmp
);
8144 /* Stop translation as we may have changed hflags */
8145 ctx
->bstate
= BS_STOP
;
8147 /* COP2: Not implemented. */
8154 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8160 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8161 generate_exception_end(ctx
, EXCP_RI
);
8164 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8166 const char *opn
= "ldst";
8168 check_cp0_enabled(ctx
);
8175 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8180 TCGv t0
= tcg_temp_new();
8182 gen_load_gpr(t0
, rt
);
8183 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8188 #if defined(TARGET_MIPS64)
8190 check_insn(ctx
, ISA_MIPS3
);
8195 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8199 check_insn(ctx
, ISA_MIPS3
);
8201 TCGv t0
= tcg_temp_new();
8203 gen_load_gpr(t0
, rt
);
8204 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8216 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8222 TCGv t0
= tcg_temp_new();
8223 gen_load_gpr(t0
, rt
);
8224 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8230 check_insn(ctx
, ASE_MT
);
8235 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8236 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8240 check_insn(ctx
, ASE_MT
);
8241 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8242 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8247 if (!env
->tlb
->helper_tlbwi
)
8249 gen_helper_tlbwi(cpu_env
);
8254 if (!env
->tlb
->helper_tlbinv
) {
8257 gen_helper_tlbinv(cpu_env
);
8258 } /* treat as nop if TLBINV not supported */
8263 if (!env
->tlb
->helper_tlbinvf
) {
8266 gen_helper_tlbinvf(cpu_env
);
8267 } /* treat as nop if TLBINV not supported */
8271 if (!env
->tlb
->helper_tlbwr
)
8273 gen_helper_tlbwr(cpu_env
);
8277 if (!env
->tlb
->helper_tlbp
)
8279 gen_helper_tlbp(cpu_env
);
8283 if (!env
->tlb
->helper_tlbr
)
8285 gen_helper_tlbr(cpu_env
);
8287 case OPC_ERET
: /* OPC_ERETNC */
8288 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8289 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8292 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8293 if (ctx
->opcode
& (1 << bit_shift
)) {
8296 check_insn(ctx
, ISA_MIPS32R5
);
8297 gen_helper_eretnc(cpu_env
);
8301 check_insn(ctx
, ISA_MIPS2
);
8302 gen_helper_eret(cpu_env
);
8304 ctx
->bstate
= BS_EXCP
;
8309 check_insn(ctx
, ISA_MIPS32
);
8310 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8311 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8314 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8316 generate_exception_end(ctx
, EXCP_RI
);
8318 gen_helper_deret(cpu_env
);
8319 ctx
->bstate
= BS_EXCP
;
8324 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8325 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8326 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8329 /* If we get an exception, we want to restart at next instruction */
8331 save_cpu_state(ctx
, 1);
8333 gen_helper_wait(cpu_env
);
8334 ctx
->bstate
= BS_EXCP
;
8339 generate_exception_end(ctx
, EXCP_RI
);
8342 (void)opn
; /* avoid a compiler warning */
8344 #endif /* !CONFIG_USER_ONLY */
8346 /* CP1 Branches (before delay slot) */
8347 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8348 int32_t cc
, int32_t offset
)
8350 target_ulong btarget
;
8351 TCGv_i32 t0
= tcg_temp_new_i32();
8353 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8354 generate_exception_end(ctx
, EXCP_RI
);
8359 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8361 btarget
= ctx
->pc
+ 4 + offset
;
8365 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8366 tcg_gen_not_i32(t0
, t0
);
8367 tcg_gen_andi_i32(t0
, t0
, 1);
8368 tcg_gen_extu_i32_tl(bcond
, t0
);
8371 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8372 tcg_gen_not_i32(t0
, t0
);
8373 tcg_gen_andi_i32(t0
, t0
, 1);
8374 tcg_gen_extu_i32_tl(bcond
, t0
);
8377 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8378 tcg_gen_andi_i32(t0
, t0
, 1);
8379 tcg_gen_extu_i32_tl(bcond
, t0
);
8382 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8383 tcg_gen_andi_i32(t0
, t0
, 1);
8384 tcg_gen_extu_i32_tl(bcond
, t0
);
8386 ctx
->hflags
|= MIPS_HFLAG_BL
;
8390 TCGv_i32 t1
= tcg_temp_new_i32();
8391 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8392 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8393 tcg_gen_nand_i32(t0
, t0
, t1
);
8394 tcg_temp_free_i32(t1
);
8395 tcg_gen_andi_i32(t0
, t0
, 1);
8396 tcg_gen_extu_i32_tl(bcond
, t0
);
8401 TCGv_i32 t1
= tcg_temp_new_i32();
8402 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8403 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8404 tcg_gen_or_i32(t0
, t0
, t1
);
8405 tcg_temp_free_i32(t1
);
8406 tcg_gen_andi_i32(t0
, t0
, 1);
8407 tcg_gen_extu_i32_tl(bcond
, t0
);
8412 TCGv_i32 t1
= tcg_temp_new_i32();
8413 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8414 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8415 tcg_gen_and_i32(t0
, t0
, t1
);
8416 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8417 tcg_gen_and_i32(t0
, t0
, t1
);
8418 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8419 tcg_gen_nand_i32(t0
, t0
, t1
);
8420 tcg_temp_free_i32(t1
);
8421 tcg_gen_andi_i32(t0
, t0
, 1);
8422 tcg_gen_extu_i32_tl(bcond
, t0
);
8427 TCGv_i32 t1
= tcg_temp_new_i32();
8428 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8429 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8430 tcg_gen_or_i32(t0
, t0
, t1
);
8431 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8432 tcg_gen_or_i32(t0
, t0
, t1
);
8433 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8434 tcg_gen_or_i32(t0
, t0
, t1
);
8435 tcg_temp_free_i32(t1
);
8436 tcg_gen_andi_i32(t0
, t0
, 1);
8437 tcg_gen_extu_i32_tl(bcond
, t0
);
8440 ctx
->hflags
|= MIPS_HFLAG_BC
;
8443 MIPS_INVAL("cp1 cond branch");
8444 generate_exception_end(ctx
, EXCP_RI
);
8447 ctx
->btarget
= btarget
;
8448 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8450 tcg_temp_free_i32(t0
);
8453 /* R6 CP1 Branches */
8454 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8455 int32_t ft
, int32_t offset
,
8458 target_ulong btarget
;
8459 TCGv_i64 t0
= tcg_temp_new_i64();
8461 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8462 #ifdef MIPS_DEBUG_DISAS
8463 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8466 generate_exception_end(ctx
, EXCP_RI
);
8470 gen_load_fpr64(ctx
, t0
, ft
);
8471 tcg_gen_andi_i64(t0
, t0
, 1);
8473 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8477 tcg_gen_xori_i64(t0
, t0
, 1);
8478 ctx
->hflags
|= MIPS_HFLAG_BC
;
8481 /* t0 already set */
8482 ctx
->hflags
|= MIPS_HFLAG_BC
;
8485 MIPS_INVAL("cp1 cond branch");
8486 generate_exception_end(ctx
, EXCP_RI
);
8490 tcg_gen_trunc_i64_tl(bcond
, t0
);
8492 ctx
->btarget
= btarget
;
8494 switch (delayslot_size
) {
8496 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8499 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8504 tcg_temp_free_i64(t0
);
8507 /* Coprocessor 1 (FPU) */
8509 #define FOP(func, fmt) (((fmt) << 21) | (func))
8512 OPC_ADD_S
= FOP(0, FMT_S
),
8513 OPC_SUB_S
= FOP(1, FMT_S
),
8514 OPC_MUL_S
= FOP(2, FMT_S
),
8515 OPC_DIV_S
= FOP(3, FMT_S
),
8516 OPC_SQRT_S
= FOP(4, FMT_S
),
8517 OPC_ABS_S
= FOP(5, FMT_S
),
8518 OPC_MOV_S
= FOP(6, FMT_S
),
8519 OPC_NEG_S
= FOP(7, FMT_S
),
8520 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8521 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8522 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8523 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8524 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8525 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8526 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8527 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8528 OPC_SEL_S
= FOP(16, FMT_S
),
8529 OPC_MOVCF_S
= FOP(17, FMT_S
),
8530 OPC_MOVZ_S
= FOP(18, FMT_S
),
8531 OPC_MOVN_S
= FOP(19, FMT_S
),
8532 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8533 OPC_RECIP_S
= FOP(21, FMT_S
),
8534 OPC_RSQRT_S
= FOP(22, FMT_S
),
8535 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8536 OPC_MADDF_S
= FOP(24, FMT_S
),
8537 OPC_MSUBF_S
= FOP(25, FMT_S
),
8538 OPC_RINT_S
= FOP(26, FMT_S
),
8539 OPC_CLASS_S
= FOP(27, FMT_S
),
8540 OPC_MIN_S
= FOP(28, FMT_S
),
8541 OPC_RECIP2_S
= FOP(28, FMT_S
),
8542 OPC_MINA_S
= FOP(29, FMT_S
),
8543 OPC_RECIP1_S
= FOP(29, FMT_S
),
8544 OPC_MAX_S
= FOP(30, FMT_S
),
8545 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8546 OPC_MAXA_S
= FOP(31, FMT_S
),
8547 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8548 OPC_CVT_D_S
= FOP(33, FMT_S
),
8549 OPC_CVT_W_S
= FOP(36, FMT_S
),
8550 OPC_CVT_L_S
= FOP(37, FMT_S
),
8551 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8552 OPC_CMP_F_S
= FOP (48, FMT_S
),
8553 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8554 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8555 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8556 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8557 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8558 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8559 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8560 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8561 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8562 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8563 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8564 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8565 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8566 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8567 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8569 OPC_ADD_D
= FOP(0, FMT_D
),
8570 OPC_SUB_D
= FOP(1, FMT_D
),
8571 OPC_MUL_D
= FOP(2, FMT_D
),
8572 OPC_DIV_D
= FOP(3, FMT_D
),
8573 OPC_SQRT_D
= FOP(4, FMT_D
),
8574 OPC_ABS_D
= FOP(5, FMT_D
),
8575 OPC_MOV_D
= FOP(6, FMT_D
),
8576 OPC_NEG_D
= FOP(7, FMT_D
),
8577 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8578 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8579 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8580 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8581 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8582 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8583 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8584 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8585 OPC_SEL_D
= FOP(16, FMT_D
),
8586 OPC_MOVCF_D
= FOP(17, FMT_D
),
8587 OPC_MOVZ_D
= FOP(18, FMT_D
),
8588 OPC_MOVN_D
= FOP(19, FMT_D
),
8589 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8590 OPC_RECIP_D
= FOP(21, FMT_D
),
8591 OPC_RSQRT_D
= FOP(22, FMT_D
),
8592 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8593 OPC_MADDF_D
= FOP(24, FMT_D
),
8594 OPC_MSUBF_D
= FOP(25, FMT_D
),
8595 OPC_RINT_D
= FOP(26, FMT_D
),
8596 OPC_CLASS_D
= FOP(27, FMT_D
),
8597 OPC_MIN_D
= FOP(28, FMT_D
),
8598 OPC_RECIP2_D
= FOP(28, FMT_D
),
8599 OPC_MINA_D
= FOP(29, FMT_D
),
8600 OPC_RECIP1_D
= FOP(29, FMT_D
),
8601 OPC_MAX_D
= FOP(30, FMT_D
),
8602 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8603 OPC_MAXA_D
= FOP(31, FMT_D
),
8604 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8605 OPC_CVT_S_D
= FOP(32, FMT_D
),
8606 OPC_CVT_W_D
= FOP(36, FMT_D
),
8607 OPC_CVT_L_D
= FOP(37, FMT_D
),
8608 OPC_CMP_F_D
= FOP (48, FMT_D
),
8609 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8610 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8611 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8612 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8613 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8614 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8615 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8616 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8617 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8618 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8619 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8620 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8621 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8622 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8623 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8625 OPC_CVT_S_W
= FOP(32, FMT_W
),
8626 OPC_CVT_D_W
= FOP(33, FMT_W
),
8627 OPC_CVT_S_L
= FOP(32, FMT_L
),
8628 OPC_CVT_D_L
= FOP(33, FMT_L
),
8629 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8631 OPC_ADD_PS
= FOP(0, FMT_PS
),
8632 OPC_SUB_PS
= FOP(1, FMT_PS
),
8633 OPC_MUL_PS
= FOP(2, FMT_PS
),
8634 OPC_DIV_PS
= FOP(3, FMT_PS
),
8635 OPC_ABS_PS
= FOP(5, FMT_PS
),
8636 OPC_MOV_PS
= FOP(6, FMT_PS
),
8637 OPC_NEG_PS
= FOP(7, FMT_PS
),
8638 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8639 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8640 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8641 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8642 OPC_MULR_PS
= FOP(26, FMT_PS
),
8643 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8644 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8645 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8646 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8648 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8649 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8650 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8651 OPC_PLL_PS
= FOP(44, FMT_PS
),
8652 OPC_PLU_PS
= FOP(45, FMT_PS
),
8653 OPC_PUL_PS
= FOP(46, FMT_PS
),
8654 OPC_PUU_PS
= FOP(47, FMT_PS
),
8655 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8656 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8657 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8658 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8659 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8660 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8661 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8662 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8663 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8664 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8665 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8666 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8667 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8668 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8669 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8670 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8674 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8675 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8676 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8677 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8678 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8679 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8680 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8681 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8682 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8683 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8684 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8685 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8686 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8687 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8688 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8689 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8690 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8691 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8692 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8693 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8694 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8695 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8697 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8698 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8699 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8700 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8701 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8702 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8703 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8704 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8705 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8706 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8707 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8708 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8709 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8710 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8711 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8712 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8713 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8714 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8715 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8716 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8717 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8718 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8720 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8722 TCGv t0
= tcg_temp_new();
8727 TCGv_i32 fp0
= tcg_temp_new_i32();
8729 gen_load_fpr32(ctx
, fp0
, fs
);
8730 tcg_gen_ext_i32_tl(t0
, fp0
);
8731 tcg_temp_free_i32(fp0
);
8733 gen_store_gpr(t0
, rt
);
8736 gen_load_gpr(t0
, rt
);
8738 TCGv_i32 fp0
= tcg_temp_new_i32();
8740 tcg_gen_trunc_tl_i32(fp0
, t0
);
8741 gen_store_fpr32(ctx
, fp0
, fs
);
8742 tcg_temp_free_i32(fp0
);
8746 gen_helper_1e0i(cfc1
, t0
, fs
);
8747 gen_store_gpr(t0
, rt
);
8750 gen_load_gpr(t0
, rt
);
8751 save_cpu_state(ctx
, 0);
8753 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8755 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8756 tcg_temp_free_i32(fs_tmp
);
8758 /* Stop translation as we may have changed hflags */
8759 ctx
->bstate
= BS_STOP
;
8761 #if defined(TARGET_MIPS64)
8763 gen_load_fpr64(ctx
, t0
, fs
);
8764 gen_store_gpr(t0
, rt
);
8767 gen_load_gpr(t0
, rt
);
8768 gen_store_fpr64(ctx
, t0
, fs
);
8773 TCGv_i32 fp0
= tcg_temp_new_i32();
8775 gen_load_fpr32h(ctx
, fp0
, fs
);
8776 tcg_gen_ext_i32_tl(t0
, fp0
);
8777 tcg_temp_free_i32(fp0
);
8779 gen_store_gpr(t0
, rt
);
8782 gen_load_gpr(t0
, rt
);
8784 TCGv_i32 fp0
= tcg_temp_new_i32();
8786 tcg_gen_trunc_tl_i32(fp0
, t0
);
8787 gen_store_fpr32h(ctx
, fp0
, fs
);
8788 tcg_temp_free_i32(fp0
);
8792 MIPS_INVAL("cp1 move");
8793 generate_exception_end(ctx
, EXCP_RI
);
8801 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8817 l1
= gen_new_label();
8818 t0
= tcg_temp_new_i32();
8819 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8820 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8821 tcg_temp_free_i32(t0
);
8823 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8825 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8830 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8834 TCGv_i32 t0
= tcg_temp_new_i32();
8835 TCGLabel
*l1
= gen_new_label();
8842 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8843 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8844 gen_load_fpr32(ctx
, t0
, fs
);
8845 gen_store_fpr32(ctx
, t0
, fd
);
8847 tcg_temp_free_i32(t0
);
8850 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8853 TCGv_i32 t0
= tcg_temp_new_i32();
8855 TCGLabel
*l1
= gen_new_label();
8862 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8863 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8864 tcg_temp_free_i32(t0
);
8865 fp0
= tcg_temp_new_i64();
8866 gen_load_fpr64(ctx
, fp0
, fs
);
8867 gen_store_fpr64(ctx
, fp0
, fd
);
8868 tcg_temp_free_i64(fp0
);
8872 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8876 TCGv_i32 t0
= tcg_temp_new_i32();
8877 TCGLabel
*l1
= gen_new_label();
8878 TCGLabel
*l2
= gen_new_label();
8885 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8886 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8887 gen_load_fpr32(ctx
, t0
, fs
);
8888 gen_store_fpr32(ctx
, t0
, fd
);
8891 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8892 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8893 gen_load_fpr32h(ctx
, t0
, fs
);
8894 gen_store_fpr32h(ctx
, t0
, fd
);
8895 tcg_temp_free_i32(t0
);
8899 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8902 TCGv_i32 t1
= tcg_const_i32(0);
8903 TCGv_i32 fp0
= tcg_temp_new_i32();
8904 TCGv_i32 fp1
= tcg_temp_new_i32();
8905 TCGv_i32 fp2
= tcg_temp_new_i32();
8906 gen_load_fpr32(ctx
, fp0
, fd
);
8907 gen_load_fpr32(ctx
, fp1
, ft
);
8908 gen_load_fpr32(ctx
, fp2
, fs
);
8912 tcg_gen_andi_i32(fp0
, fp0
, 1);
8913 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8916 tcg_gen_andi_i32(fp1
, fp1
, 1);
8917 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8920 tcg_gen_andi_i32(fp1
, fp1
, 1);
8921 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8924 MIPS_INVAL("gen_sel_s");
8925 generate_exception_end(ctx
, EXCP_RI
);
8929 gen_store_fpr32(ctx
, fp0
, fd
);
8930 tcg_temp_free_i32(fp2
);
8931 tcg_temp_free_i32(fp1
);
8932 tcg_temp_free_i32(fp0
);
8933 tcg_temp_free_i32(t1
);
8936 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8939 TCGv_i64 t1
= tcg_const_i64(0);
8940 TCGv_i64 fp0
= tcg_temp_new_i64();
8941 TCGv_i64 fp1
= tcg_temp_new_i64();
8942 TCGv_i64 fp2
= tcg_temp_new_i64();
8943 gen_load_fpr64(ctx
, fp0
, fd
);
8944 gen_load_fpr64(ctx
, fp1
, ft
);
8945 gen_load_fpr64(ctx
, fp2
, fs
);
8949 tcg_gen_andi_i64(fp0
, fp0
, 1);
8950 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8953 tcg_gen_andi_i64(fp1
, fp1
, 1);
8954 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8957 tcg_gen_andi_i64(fp1
, fp1
, 1);
8958 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8961 MIPS_INVAL("gen_sel_d");
8962 generate_exception_end(ctx
, EXCP_RI
);
8966 gen_store_fpr64(ctx
, fp0
, fd
);
8967 tcg_temp_free_i64(fp2
);
8968 tcg_temp_free_i64(fp1
);
8969 tcg_temp_free_i64(fp0
);
8970 tcg_temp_free_i64(t1
);
8973 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8974 int ft
, int fs
, int fd
, int cc
)
8976 uint32_t func
= ctx
->opcode
& 0x3f;
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8981 TCGv_i32 fp1
= tcg_temp_new_i32();
8983 gen_load_fpr32(ctx
, fp0
, fs
);
8984 gen_load_fpr32(ctx
, fp1
, ft
);
8985 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8986 tcg_temp_free_i32(fp1
);
8987 gen_store_fpr32(ctx
, fp0
, fd
);
8988 tcg_temp_free_i32(fp0
);
8993 TCGv_i32 fp0
= tcg_temp_new_i32();
8994 TCGv_i32 fp1
= tcg_temp_new_i32();
8996 gen_load_fpr32(ctx
, fp0
, fs
);
8997 gen_load_fpr32(ctx
, fp1
, ft
);
8998 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8999 tcg_temp_free_i32(fp1
);
9000 gen_store_fpr32(ctx
, fp0
, fd
);
9001 tcg_temp_free_i32(fp0
);
9006 TCGv_i32 fp0
= tcg_temp_new_i32();
9007 TCGv_i32 fp1
= tcg_temp_new_i32();
9009 gen_load_fpr32(ctx
, fp0
, fs
);
9010 gen_load_fpr32(ctx
, fp1
, ft
);
9011 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9012 tcg_temp_free_i32(fp1
);
9013 gen_store_fpr32(ctx
, fp0
, fd
);
9014 tcg_temp_free_i32(fp0
);
9019 TCGv_i32 fp0
= tcg_temp_new_i32();
9020 TCGv_i32 fp1
= tcg_temp_new_i32();
9022 gen_load_fpr32(ctx
, fp0
, fs
);
9023 gen_load_fpr32(ctx
, fp1
, ft
);
9024 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9025 tcg_temp_free_i32(fp1
);
9026 gen_store_fpr32(ctx
, fp0
, fd
);
9027 tcg_temp_free_i32(fp0
);
9032 TCGv_i32 fp0
= tcg_temp_new_i32();
9034 gen_load_fpr32(ctx
, fp0
, fs
);
9035 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9036 gen_store_fpr32(ctx
, fp0
, fd
);
9037 tcg_temp_free_i32(fp0
);
9042 TCGv_i32 fp0
= tcg_temp_new_i32();
9044 gen_load_fpr32(ctx
, fp0
, fs
);
9046 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9048 gen_helper_float_abs_s(fp0
, fp0
);
9050 gen_store_fpr32(ctx
, fp0
, fd
);
9051 tcg_temp_free_i32(fp0
);
9056 TCGv_i32 fp0
= tcg_temp_new_i32();
9058 gen_load_fpr32(ctx
, fp0
, fs
);
9059 gen_store_fpr32(ctx
, fp0
, fd
);
9060 tcg_temp_free_i32(fp0
);
9065 TCGv_i32 fp0
= tcg_temp_new_i32();
9067 gen_load_fpr32(ctx
, fp0
, fs
);
9069 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9071 gen_helper_float_chs_s(fp0
, fp0
);
9073 gen_store_fpr32(ctx
, fp0
, fd
);
9074 tcg_temp_free_i32(fp0
);
9078 check_cp1_64bitmode(ctx
);
9080 TCGv_i32 fp32
= tcg_temp_new_i32();
9081 TCGv_i64 fp64
= tcg_temp_new_i64();
9083 gen_load_fpr32(ctx
, fp32
, fs
);
9085 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9087 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9089 tcg_temp_free_i32(fp32
);
9090 gen_store_fpr64(ctx
, fp64
, fd
);
9091 tcg_temp_free_i64(fp64
);
9095 check_cp1_64bitmode(ctx
);
9097 TCGv_i32 fp32
= tcg_temp_new_i32();
9098 TCGv_i64 fp64
= tcg_temp_new_i64();
9100 gen_load_fpr32(ctx
, fp32
, fs
);
9102 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9104 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9106 tcg_temp_free_i32(fp32
);
9107 gen_store_fpr64(ctx
, fp64
, fd
);
9108 tcg_temp_free_i64(fp64
);
9112 check_cp1_64bitmode(ctx
);
9114 TCGv_i32 fp32
= tcg_temp_new_i32();
9115 TCGv_i64 fp64
= tcg_temp_new_i64();
9117 gen_load_fpr32(ctx
, fp32
, fs
);
9119 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9121 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9123 tcg_temp_free_i32(fp32
);
9124 gen_store_fpr64(ctx
, fp64
, fd
);
9125 tcg_temp_free_i64(fp64
);
9129 check_cp1_64bitmode(ctx
);
9131 TCGv_i32 fp32
= tcg_temp_new_i32();
9132 TCGv_i64 fp64
= tcg_temp_new_i64();
9134 gen_load_fpr32(ctx
, fp32
, fs
);
9136 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9138 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9140 tcg_temp_free_i32(fp32
);
9141 gen_store_fpr64(ctx
, fp64
, fd
);
9142 tcg_temp_free_i64(fp64
);
9147 TCGv_i32 fp0
= tcg_temp_new_i32();
9149 gen_load_fpr32(ctx
, fp0
, fs
);
9151 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9153 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9155 gen_store_fpr32(ctx
, fp0
, fd
);
9156 tcg_temp_free_i32(fp0
);
9161 TCGv_i32 fp0
= tcg_temp_new_i32();
9163 gen_load_fpr32(ctx
, fp0
, fs
);
9165 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9167 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9169 gen_store_fpr32(ctx
, fp0
, fd
);
9170 tcg_temp_free_i32(fp0
);
9175 TCGv_i32 fp0
= tcg_temp_new_i32();
9177 gen_load_fpr32(ctx
, fp0
, fs
);
9179 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9181 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9183 gen_store_fpr32(ctx
, fp0
, fd
);
9184 tcg_temp_free_i32(fp0
);
9189 TCGv_i32 fp0
= tcg_temp_new_i32();
9191 gen_load_fpr32(ctx
, fp0
, fs
);
9193 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9195 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9197 gen_store_fpr32(ctx
, fp0
, fd
);
9198 tcg_temp_free_i32(fp0
);
9202 check_insn(ctx
, ISA_MIPS32R6
);
9203 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9206 check_insn(ctx
, ISA_MIPS32R6
);
9207 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9210 check_insn(ctx
, ISA_MIPS32R6
);
9211 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9214 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9215 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9218 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9220 TCGLabel
*l1
= gen_new_label();
9224 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9226 fp0
= tcg_temp_new_i32();
9227 gen_load_fpr32(ctx
, fp0
, fs
);
9228 gen_store_fpr32(ctx
, fp0
, fd
);
9229 tcg_temp_free_i32(fp0
);
9234 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9236 TCGLabel
*l1
= gen_new_label();
9240 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9241 fp0
= tcg_temp_new_i32();
9242 gen_load_fpr32(ctx
, fp0
, fs
);
9243 gen_store_fpr32(ctx
, fp0
, fd
);
9244 tcg_temp_free_i32(fp0
);
9251 TCGv_i32 fp0
= tcg_temp_new_i32();
9253 gen_load_fpr32(ctx
, fp0
, fs
);
9254 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9255 gen_store_fpr32(ctx
, fp0
, fd
);
9256 tcg_temp_free_i32(fp0
);
9261 TCGv_i32 fp0
= tcg_temp_new_i32();
9263 gen_load_fpr32(ctx
, fp0
, fs
);
9264 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9265 gen_store_fpr32(ctx
, fp0
, fd
);
9266 tcg_temp_free_i32(fp0
);
9270 check_insn(ctx
, ISA_MIPS32R6
);
9272 TCGv_i32 fp0
= tcg_temp_new_i32();
9273 TCGv_i32 fp1
= tcg_temp_new_i32();
9274 TCGv_i32 fp2
= tcg_temp_new_i32();
9275 gen_load_fpr32(ctx
, fp0
, fs
);
9276 gen_load_fpr32(ctx
, fp1
, ft
);
9277 gen_load_fpr32(ctx
, fp2
, fd
);
9278 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9279 gen_store_fpr32(ctx
, fp2
, fd
);
9280 tcg_temp_free_i32(fp2
);
9281 tcg_temp_free_i32(fp1
);
9282 tcg_temp_free_i32(fp0
);
9286 check_insn(ctx
, ISA_MIPS32R6
);
9288 TCGv_i32 fp0
= tcg_temp_new_i32();
9289 TCGv_i32 fp1
= tcg_temp_new_i32();
9290 TCGv_i32 fp2
= tcg_temp_new_i32();
9291 gen_load_fpr32(ctx
, fp0
, fs
);
9292 gen_load_fpr32(ctx
, fp1
, ft
);
9293 gen_load_fpr32(ctx
, fp2
, fd
);
9294 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9295 gen_store_fpr32(ctx
, fp2
, fd
);
9296 tcg_temp_free_i32(fp2
);
9297 tcg_temp_free_i32(fp1
);
9298 tcg_temp_free_i32(fp0
);
9302 check_insn(ctx
, ISA_MIPS32R6
);
9304 TCGv_i32 fp0
= tcg_temp_new_i32();
9305 gen_load_fpr32(ctx
, fp0
, fs
);
9306 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9307 gen_store_fpr32(ctx
, fp0
, fd
);
9308 tcg_temp_free_i32(fp0
);
9312 check_insn(ctx
, ISA_MIPS32R6
);
9314 TCGv_i32 fp0
= tcg_temp_new_i32();
9315 gen_load_fpr32(ctx
, fp0
, fs
);
9316 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9317 gen_store_fpr32(ctx
, fp0
, fd
);
9318 tcg_temp_free_i32(fp0
);
9321 case OPC_MIN_S
: /* OPC_RECIP2_S */
9322 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9324 TCGv_i32 fp0
= tcg_temp_new_i32();
9325 TCGv_i32 fp1
= tcg_temp_new_i32();
9326 TCGv_i32 fp2
= tcg_temp_new_i32();
9327 gen_load_fpr32(ctx
, fp0
, fs
);
9328 gen_load_fpr32(ctx
, fp1
, ft
);
9329 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9330 gen_store_fpr32(ctx
, fp2
, fd
);
9331 tcg_temp_free_i32(fp2
);
9332 tcg_temp_free_i32(fp1
);
9333 tcg_temp_free_i32(fp0
);
9336 check_cp1_64bitmode(ctx
);
9338 TCGv_i32 fp0
= tcg_temp_new_i32();
9339 TCGv_i32 fp1
= tcg_temp_new_i32();
9341 gen_load_fpr32(ctx
, fp0
, fs
);
9342 gen_load_fpr32(ctx
, fp1
, ft
);
9343 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9344 tcg_temp_free_i32(fp1
);
9345 gen_store_fpr32(ctx
, fp0
, fd
);
9346 tcg_temp_free_i32(fp0
);
9350 case OPC_MINA_S
: /* OPC_RECIP1_S */
9351 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9353 TCGv_i32 fp0
= tcg_temp_new_i32();
9354 TCGv_i32 fp1
= tcg_temp_new_i32();
9355 TCGv_i32 fp2
= tcg_temp_new_i32();
9356 gen_load_fpr32(ctx
, fp0
, fs
);
9357 gen_load_fpr32(ctx
, fp1
, ft
);
9358 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9359 gen_store_fpr32(ctx
, fp2
, fd
);
9360 tcg_temp_free_i32(fp2
);
9361 tcg_temp_free_i32(fp1
);
9362 tcg_temp_free_i32(fp0
);
9365 check_cp1_64bitmode(ctx
);
9367 TCGv_i32 fp0
= tcg_temp_new_i32();
9369 gen_load_fpr32(ctx
, fp0
, fs
);
9370 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9371 gen_store_fpr32(ctx
, fp0
, fd
);
9372 tcg_temp_free_i32(fp0
);
9376 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9377 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9379 TCGv_i32 fp0
= tcg_temp_new_i32();
9380 TCGv_i32 fp1
= tcg_temp_new_i32();
9381 gen_load_fpr32(ctx
, fp0
, fs
);
9382 gen_load_fpr32(ctx
, fp1
, ft
);
9383 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9384 gen_store_fpr32(ctx
, fp1
, fd
);
9385 tcg_temp_free_i32(fp1
);
9386 tcg_temp_free_i32(fp0
);
9389 check_cp1_64bitmode(ctx
);
9391 TCGv_i32 fp0
= tcg_temp_new_i32();
9393 gen_load_fpr32(ctx
, fp0
, fs
);
9394 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9395 gen_store_fpr32(ctx
, fp0
, fd
);
9396 tcg_temp_free_i32(fp0
);
9400 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9401 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9403 TCGv_i32 fp0
= tcg_temp_new_i32();
9404 TCGv_i32 fp1
= tcg_temp_new_i32();
9405 gen_load_fpr32(ctx
, fp0
, fs
);
9406 gen_load_fpr32(ctx
, fp1
, ft
);
9407 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9408 gen_store_fpr32(ctx
, fp1
, fd
);
9409 tcg_temp_free_i32(fp1
);
9410 tcg_temp_free_i32(fp0
);
9413 check_cp1_64bitmode(ctx
);
9415 TCGv_i32 fp0
= tcg_temp_new_i32();
9416 TCGv_i32 fp1
= tcg_temp_new_i32();
9418 gen_load_fpr32(ctx
, fp0
, fs
);
9419 gen_load_fpr32(ctx
, fp1
, ft
);
9420 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9421 tcg_temp_free_i32(fp1
);
9422 gen_store_fpr32(ctx
, fp0
, fd
);
9423 tcg_temp_free_i32(fp0
);
9428 check_cp1_registers(ctx
, fd
);
9430 TCGv_i32 fp32
= tcg_temp_new_i32();
9431 TCGv_i64 fp64
= tcg_temp_new_i64();
9433 gen_load_fpr32(ctx
, fp32
, fs
);
9434 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9435 tcg_temp_free_i32(fp32
);
9436 gen_store_fpr64(ctx
, fp64
, fd
);
9437 tcg_temp_free_i64(fp64
);
9442 TCGv_i32 fp0
= tcg_temp_new_i32();
9444 gen_load_fpr32(ctx
, fp0
, fs
);
9446 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9448 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9450 gen_store_fpr32(ctx
, fp0
, fd
);
9451 tcg_temp_free_i32(fp0
);
9455 check_cp1_64bitmode(ctx
);
9457 TCGv_i32 fp32
= tcg_temp_new_i32();
9458 TCGv_i64 fp64
= tcg_temp_new_i64();
9460 gen_load_fpr32(ctx
, fp32
, fs
);
9462 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9464 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9466 tcg_temp_free_i32(fp32
);
9467 gen_store_fpr64(ctx
, fp64
, fd
);
9468 tcg_temp_free_i64(fp64
);
9474 TCGv_i64 fp64
= tcg_temp_new_i64();
9475 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9476 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9478 gen_load_fpr32(ctx
, fp32_0
, fs
);
9479 gen_load_fpr32(ctx
, fp32_1
, ft
);
9480 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9481 tcg_temp_free_i32(fp32_1
);
9482 tcg_temp_free_i32(fp32_0
);
9483 gen_store_fpr64(ctx
, fp64
, fd
);
9484 tcg_temp_free_i64(fp64
);
9496 case OPC_CMP_NGLE_S
:
9503 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9504 if (ctx
->opcode
& (1 << 6)) {
9505 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9507 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9511 check_cp1_registers(ctx
, fs
| ft
| fd
);
9513 TCGv_i64 fp0
= tcg_temp_new_i64();
9514 TCGv_i64 fp1
= tcg_temp_new_i64();
9516 gen_load_fpr64(ctx
, fp0
, fs
);
9517 gen_load_fpr64(ctx
, fp1
, ft
);
9518 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9519 tcg_temp_free_i64(fp1
);
9520 gen_store_fpr64(ctx
, fp0
, fd
);
9521 tcg_temp_free_i64(fp0
);
9525 check_cp1_registers(ctx
, fs
| ft
| fd
);
9527 TCGv_i64 fp0
= tcg_temp_new_i64();
9528 TCGv_i64 fp1
= tcg_temp_new_i64();
9530 gen_load_fpr64(ctx
, fp0
, fs
);
9531 gen_load_fpr64(ctx
, fp1
, ft
);
9532 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9533 tcg_temp_free_i64(fp1
);
9534 gen_store_fpr64(ctx
, fp0
, fd
);
9535 tcg_temp_free_i64(fp0
);
9539 check_cp1_registers(ctx
, fs
| ft
| fd
);
9541 TCGv_i64 fp0
= tcg_temp_new_i64();
9542 TCGv_i64 fp1
= tcg_temp_new_i64();
9544 gen_load_fpr64(ctx
, fp0
, fs
);
9545 gen_load_fpr64(ctx
, fp1
, ft
);
9546 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9547 tcg_temp_free_i64(fp1
);
9548 gen_store_fpr64(ctx
, fp0
, fd
);
9549 tcg_temp_free_i64(fp0
);
9553 check_cp1_registers(ctx
, fs
| ft
| fd
);
9555 TCGv_i64 fp0
= tcg_temp_new_i64();
9556 TCGv_i64 fp1
= tcg_temp_new_i64();
9558 gen_load_fpr64(ctx
, fp0
, fs
);
9559 gen_load_fpr64(ctx
, fp1
, ft
);
9560 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9561 tcg_temp_free_i64(fp1
);
9562 gen_store_fpr64(ctx
, fp0
, fd
);
9563 tcg_temp_free_i64(fp0
);
9567 check_cp1_registers(ctx
, fs
| fd
);
9569 TCGv_i64 fp0
= tcg_temp_new_i64();
9571 gen_load_fpr64(ctx
, fp0
, fs
);
9572 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9573 gen_store_fpr64(ctx
, fp0
, fd
);
9574 tcg_temp_free_i64(fp0
);
9578 check_cp1_registers(ctx
, fs
| fd
);
9580 TCGv_i64 fp0
= tcg_temp_new_i64();
9582 gen_load_fpr64(ctx
, fp0
, fs
);
9584 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9586 gen_helper_float_abs_d(fp0
, fp0
);
9588 gen_store_fpr64(ctx
, fp0
, fd
);
9589 tcg_temp_free_i64(fp0
);
9593 check_cp1_registers(ctx
, fs
| fd
);
9595 TCGv_i64 fp0
= tcg_temp_new_i64();
9597 gen_load_fpr64(ctx
, fp0
, fs
);
9598 gen_store_fpr64(ctx
, fp0
, fd
);
9599 tcg_temp_free_i64(fp0
);
9603 check_cp1_registers(ctx
, fs
| fd
);
9605 TCGv_i64 fp0
= tcg_temp_new_i64();
9607 gen_load_fpr64(ctx
, fp0
, fs
);
9609 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9611 gen_helper_float_chs_d(fp0
, fp0
);
9613 gen_store_fpr64(ctx
, fp0
, fd
);
9614 tcg_temp_free_i64(fp0
);
9618 check_cp1_64bitmode(ctx
);
9620 TCGv_i64 fp0
= tcg_temp_new_i64();
9622 gen_load_fpr64(ctx
, fp0
, fs
);
9624 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9626 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9628 gen_store_fpr64(ctx
, fp0
, fd
);
9629 tcg_temp_free_i64(fp0
);
9633 check_cp1_64bitmode(ctx
);
9635 TCGv_i64 fp0
= tcg_temp_new_i64();
9637 gen_load_fpr64(ctx
, fp0
, fs
);
9639 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9641 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9643 gen_store_fpr64(ctx
, fp0
, fd
);
9644 tcg_temp_free_i64(fp0
);
9648 check_cp1_64bitmode(ctx
);
9650 TCGv_i64 fp0
= tcg_temp_new_i64();
9652 gen_load_fpr64(ctx
, fp0
, fs
);
9654 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9656 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9658 gen_store_fpr64(ctx
, fp0
, fd
);
9659 tcg_temp_free_i64(fp0
);
9663 check_cp1_64bitmode(ctx
);
9665 TCGv_i64 fp0
= tcg_temp_new_i64();
9667 gen_load_fpr64(ctx
, fp0
, fs
);
9669 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9671 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9673 gen_store_fpr64(ctx
, fp0
, fd
);
9674 tcg_temp_free_i64(fp0
);
9678 check_cp1_registers(ctx
, fs
);
9680 TCGv_i32 fp32
= tcg_temp_new_i32();
9681 TCGv_i64 fp64
= tcg_temp_new_i64();
9683 gen_load_fpr64(ctx
, fp64
, fs
);
9685 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9687 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9689 tcg_temp_free_i64(fp64
);
9690 gen_store_fpr32(ctx
, fp32
, fd
);
9691 tcg_temp_free_i32(fp32
);
9695 check_cp1_registers(ctx
, fs
);
9697 TCGv_i32 fp32
= tcg_temp_new_i32();
9698 TCGv_i64 fp64
= tcg_temp_new_i64();
9700 gen_load_fpr64(ctx
, fp64
, fs
);
9702 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9704 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9706 tcg_temp_free_i64(fp64
);
9707 gen_store_fpr32(ctx
, fp32
, fd
);
9708 tcg_temp_free_i32(fp32
);
9712 check_cp1_registers(ctx
, fs
);
9714 TCGv_i32 fp32
= tcg_temp_new_i32();
9715 TCGv_i64 fp64
= tcg_temp_new_i64();
9717 gen_load_fpr64(ctx
, fp64
, fs
);
9719 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9721 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9723 tcg_temp_free_i64(fp64
);
9724 gen_store_fpr32(ctx
, fp32
, fd
);
9725 tcg_temp_free_i32(fp32
);
9729 check_cp1_registers(ctx
, fs
);
9731 TCGv_i32 fp32
= tcg_temp_new_i32();
9732 TCGv_i64 fp64
= tcg_temp_new_i64();
9734 gen_load_fpr64(ctx
, fp64
, fs
);
9736 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9738 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9740 tcg_temp_free_i64(fp64
);
9741 gen_store_fpr32(ctx
, fp32
, fd
);
9742 tcg_temp_free_i32(fp32
);
9746 check_insn(ctx
, ISA_MIPS32R6
);
9747 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9750 check_insn(ctx
, ISA_MIPS32R6
);
9751 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9754 check_insn(ctx
, ISA_MIPS32R6
);
9755 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9758 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9759 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9762 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9764 TCGLabel
*l1
= gen_new_label();
9768 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9770 fp0
= tcg_temp_new_i64();
9771 gen_load_fpr64(ctx
, fp0
, fs
);
9772 gen_store_fpr64(ctx
, fp0
, fd
);
9773 tcg_temp_free_i64(fp0
);
9778 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9780 TCGLabel
*l1
= gen_new_label();
9784 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9785 fp0
= tcg_temp_new_i64();
9786 gen_load_fpr64(ctx
, fp0
, fs
);
9787 gen_store_fpr64(ctx
, fp0
, fd
);
9788 tcg_temp_free_i64(fp0
);
9794 check_cp1_registers(ctx
, fs
| fd
);
9796 TCGv_i64 fp0
= tcg_temp_new_i64();
9798 gen_load_fpr64(ctx
, fp0
, fs
);
9799 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9800 gen_store_fpr64(ctx
, fp0
, fd
);
9801 tcg_temp_free_i64(fp0
);
9805 check_cp1_registers(ctx
, fs
| fd
);
9807 TCGv_i64 fp0
= tcg_temp_new_i64();
9809 gen_load_fpr64(ctx
, fp0
, fs
);
9810 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9811 gen_store_fpr64(ctx
, fp0
, fd
);
9812 tcg_temp_free_i64(fp0
);
9816 check_insn(ctx
, ISA_MIPS32R6
);
9818 TCGv_i64 fp0
= tcg_temp_new_i64();
9819 TCGv_i64 fp1
= tcg_temp_new_i64();
9820 TCGv_i64 fp2
= tcg_temp_new_i64();
9821 gen_load_fpr64(ctx
, fp0
, fs
);
9822 gen_load_fpr64(ctx
, fp1
, ft
);
9823 gen_load_fpr64(ctx
, fp2
, fd
);
9824 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9825 gen_store_fpr64(ctx
, fp2
, fd
);
9826 tcg_temp_free_i64(fp2
);
9827 tcg_temp_free_i64(fp1
);
9828 tcg_temp_free_i64(fp0
);
9832 check_insn(ctx
, ISA_MIPS32R6
);
9834 TCGv_i64 fp0
= tcg_temp_new_i64();
9835 TCGv_i64 fp1
= tcg_temp_new_i64();
9836 TCGv_i64 fp2
= tcg_temp_new_i64();
9837 gen_load_fpr64(ctx
, fp0
, fs
);
9838 gen_load_fpr64(ctx
, fp1
, ft
);
9839 gen_load_fpr64(ctx
, fp2
, fd
);
9840 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9841 gen_store_fpr64(ctx
, fp2
, fd
);
9842 tcg_temp_free_i64(fp2
);
9843 tcg_temp_free_i64(fp1
);
9844 tcg_temp_free_i64(fp0
);
9848 check_insn(ctx
, ISA_MIPS32R6
);
9850 TCGv_i64 fp0
= tcg_temp_new_i64();
9851 gen_load_fpr64(ctx
, fp0
, fs
);
9852 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9853 gen_store_fpr64(ctx
, fp0
, fd
);
9854 tcg_temp_free_i64(fp0
);
9858 check_insn(ctx
, ISA_MIPS32R6
);
9860 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp0
, fs
);
9862 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9863 gen_store_fpr64(ctx
, fp0
, fd
);
9864 tcg_temp_free_i64(fp0
);
9867 case OPC_MIN_D
: /* OPC_RECIP2_D */
9868 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9870 TCGv_i64 fp0
= tcg_temp_new_i64();
9871 TCGv_i64 fp1
= tcg_temp_new_i64();
9872 gen_load_fpr64(ctx
, fp0
, fs
);
9873 gen_load_fpr64(ctx
, fp1
, ft
);
9874 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9875 gen_store_fpr64(ctx
, fp1
, fd
);
9876 tcg_temp_free_i64(fp1
);
9877 tcg_temp_free_i64(fp0
);
9880 check_cp1_64bitmode(ctx
);
9882 TCGv_i64 fp0
= tcg_temp_new_i64();
9883 TCGv_i64 fp1
= tcg_temp_new_i64();
9885 gen_load_fpr64(ctx
, fp0
, fs
);
9886 gen_load_fpr64(ctx
, fp1
, ft
);
9887 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9888 tcg_temp_free_i64(fp1
);
9889 gen_store_fpr64(ctx
, fp0
, fd
);
9890 tcg_temp_free_i64(fp0
);
9894 case OPC_MINA_D
: /* OPC_RECIP1_D */
9895 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9897 TCGv_i64 fp0
= tcg_temp_new_i64();
9898 TCGv_i64 fp1
= tcg_temp_new_i64();
9899 gen_load_fpr64(ctx
, fp0
, fs
);
9900 gen_load_fpr64(ctx
, fp1
, ft
);
9901 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9902 gen_store_fpr64(ctx
, fp1
, fd
);
9903 tcg_temp_free_i64(fp1
);
9904 tcg_temp_free_i64(fp0
);
9907 check_cp1_64bitmode(ctx
);
9909 TCGv_i64 fp0
= tcg_temp_new_i64();
9911 gen_load_fpr64(ctx
, fp0
, fs
);
9912 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9913 gen_store_fpr64(ctx
, fp0
, fd
);
9914 tcg_temp_free_i64(fp0
);
9918 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9919 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9921 TCGv_i64 fp0
= tcg_temp_new_i64();
9922 TCGv_i64 fp1
= tcg_temp_new_i64();
9923 gen_load_fpr64(ctx
, fp0
, fs
);
9924 gen_load_fpr64(ctx
, fp1
, ft
);
9925 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9926 gen_store_fpr64(ctx
, fp1
, fd
);
9927 tcg_temp_free_i64(fp1
);
9928 tcg_temp_free_i64(fp0
);
9931 check_cp1_64bitmode(ctx
);
9933 TCGv_i64 fp0
= tcg_temp_new_i64();
9935 gen_load_fpr64(ctx
, fp0
, fs
);
9936 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9937 gen_store_fpr64(ctx
, fp0
, fd
);
9938 tcg_temp_free_i64(fp0
);
9942 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9943 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9945 TCGv_i64 fp0
= tcg_temp_new_i64();
9946 TCGv_i64 fp1
= tcg_temp_new_i64();
9947 gen_load_fpr64(ctx
, fp0
, fs
);
9948 gen_load_fpr64(ctx
, fp1
, ft
);
9949 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9950 gen_store_fpr64(ctx
, fp1
, fd
);
9951 tcg_temp_free_i64(fp1
);
9952 tcg_temp_free_i64(fp0
);
9955 check_cp1_64bitmode(ctx
);
9957 TCGv_i64 fp0
= tcg_temp_new_i64();
9958 TCGv_i64 fp1
= tcg_temp_new_i64();
9960 gen_load_fpr64(ctx
, fp0
, fs
);
9961 gen_load_fpr64(ctx
, fp1
, ft
);
9962 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9963 tcg_temp_free_i64(fp1
);
9964 gen_store_fpr64(ctx
, fp0
, fd
);
9965 tcg_temp_free_i64(fp0
);
9978 case OPC_CMP_NGLE_D
:
9985 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9986 if (ctx
->opcode
& (1 << 6)) {
9987 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9989 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9993 check_cp1_registers(ctx
, fs
);
9995 TCGv_i32 fp32
= tcg_temp_new_i32();
9996 TCGv_i64 fp64
= tcg_temp_new_i64();
9998 gen_load_fpr64(ctx
, fp64
, fs
);
9999 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10000 tcg_temp_free_i64(fp64
);
10001 gen_store_fpr32(ctx
, fp32
, fd
);
10002 tcg_temp_free_i32(fp32
);
10006 check_cp1_registers(ctx
, fs
);
10008 TCGv_i32 fp32
= tcg_temp_new_i32();
10009 TCGv_i64 fp64
= tcg_temp_new_i64();
10011 gen_load_fpr64(ctx
, fp64
, fs
);
10012 if (ctx
->nan2008
) {
10013 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10015 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10017 tcg_temp_free_i64(fp64
);
10018 gen_store_fpr32(ctx
, fp32
, fd
);
10019 tcg_temp_free_i32(fp32
);
10023 check_cp1_64bitmode(ctx
);
10025 TCGv_i64 fp0
= tcg_temp_new_i64();
10027 gen_load_fpr64(ctx
, fp0
, fs
);
10028 if (ctx
->nan2008
) {
10029 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10031 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10033 gen_store_fpr64(ctx
, fp0
, fd
);
10034 tcg_temp_free_i64(fp0
);
10039 TCGv_i32 fp0
= tcg_temp_new_i32();
10041 gen_load_fpr32(ctx
, fp0
, fs
);
10042 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10043 gen_store_fpr32(ctx
, fp0
, fd
);
10044 tcg_temp_free_i32(fp0
);
10048 check_cp1_registers(ctx
, fd
);
10050 TCGv_i32 fp32
= tcg_temp_new_i32();
10051 TCGv_i64 fp64
= tcg_temp_new_i64();
10053 gen_load_fpr32(ctx
, fp32
, fs
);
10054 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10055 tcg_temp_free_i32(fp32
);
10056 gen_store_fpr64(ctx
, fp64
, fd
);
10057 tcg_temp_free_i64(fp64
);
10061 check_cp1_64bitmode(ctx
);
10063 TCGv_i32 fp32
= tcg_temp_new_i32();
10064 TCGv_i64 fp64
= tcg_temp_new_i64();
10066 gen_load_fpr64(ctx
, fp64
, fs
);
10067 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10068 tcg_temp_free_i64(fp64
);
10069 gen_store_fpr32(ctx
, fp32
, fd
);
10070 tcg_temp_free_i32(fp32
);
10074 check_cp1_64bitmode(ctx
);
10076 TCGv_i64 fp0
= tcg_temp_new_i64();
10078 gen_load_fpr64(ctx
, fp0
, fs
);
10079 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10080 gen_store_fpr64(ctx
, fp0
, fd
);
10081 tcg_temp_free_i64(fp0
);
10084 case OPC_CVT_PS_PW
:
10087 TCGv_i64 fp0
= tcg_temp_new_i64();
10089 gen_load_fpr64(ctx
, fp0
, fs
);
10090 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10091 gen_store_fpr64(ctx
, fp0
, fd
);
10092 tcg_temp_free_i64(fp0
);
10098 TCGv_i64 fp0
= tcg_temp_new_i64();
10099 TCGv_i64 fp1
= tcg_temp_new_i64();
10101 gen_load_fpr64(ctx
, fp0
, fs
);
10102 gen_load_fpr64(ctx
, fp1
, ft
);
10103 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10104 tcg_temp_free_i64(fp1
);
10105 gen_store_fpr64(ctx
, fp0
, fd
);
10106 tcg_temp_free_i64(fp0
);
10112 TCGv_i64 fp0
= tcg_temp_new_i64();
10113 TCGv_i64 fp1
= tcg_temp_new_i64();
10115 gen_load_fpr64(ctx
, fp0
, fs
);
10116 gen_load_fpr64(ctx
, fp1
, ft
);
10117 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10118 tcg_temp_free_i64(fp1
);
10119 gen_store_fpr64(ctx
, fp0
, fd
);
10120 tcg_temp_free_i64(fp0
);
10126 TCGv_i64 fp0
= tcg_temp_new_i64();
10127 TCGv_i64 fp1
= tcg_temp_new_i64();
10129 gen_load_fpr64(ctx
, fp0
, fs
);
10130 gen_load_fpr64(ctx
, fp1
, ft
);
10131 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10132 tcg_temp_free_i64(fp1
);
10133 gen_store_fpr64(ctx
, fp0
, fd
);
10134 tcg_temp_free_i64(fp0
);
10140 TCGv_i64 fp0
= tcg_temp_new_i64();
10142 gen_load_fpr64(ctx
, fp0
, fs
);
10143 gen_helper_float_abs_ps(fp0
, fp0
);
10144 gen_store_fpr64(ctx
, fp0
, fd
);
10145 tcg_temp_free_i64(fp0
);
10151 TCGv_i64 fp0
= tcg_temp_new_i64();
10153 gen_load_fpr64(ctx
, fp0
, fs
);
10154 gen_store_fpr64(ctx
, fp0
, fd
);
10155 tcg_temp_free_i64(fp0
);
10161 TCGv_i64 fp0
= tcg_temp_new_i64();
10163 gen_load_fpr64(ctx
, fp0
, fs
);
10164 gen_helper_float_chs_ps(fp0
, fp0
);
10165 gen_store_fpr64(ctx
, fp0
, fd
);
10166 tcg_temp_free_i64(fp0
);
10171 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10176 TCGLabel
*l1
= gen_new_label();
10180 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10181 fp0
= tcg_temp_new_i64();
10182 gen_load_fpr64(ctx
, fp0
, fs
);
10183 gen_store_fpr64(ctx
, fp0
, fd
);
10184 tcg_temp_free_i64(fp0
);
10191 TCGLabel
*l1
= gen_new_label();
10195 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10196 fp0
= tcg_temp_new_i64();
10197 gen_load_fpr64(ctx
, fp0
, fs
);
10198 gen_store_fpr64(ctx
, fp0
, fd
);
10199 tcg_temp_free_i64(fp0
);
10207 TCGv_i64 fp0
= tcg_temp_new_i64();
10208 TCGv_i64 fp1
= tcg_temp_new_i64();
10210 gen_load_fpr64(ctx
, fp0
, ft
);
10211 gen_load_fpr64(ctx
, fp1
, fs
);
10212 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10213 tcg_temp_free_i64(fp1
);
10214 gen_store_fpr64(ctx
, fp0
, fd
);
10215 tcg_temp_free_i64(fp0
);
10221 TCGv_i64 fp0
= tcg_temp_new_i64();
10222 TCGv_i64 fp1
= tcg_temp_new_i64();
10224 gen_load_fpr64(ctx
, fp0
, ft
);
10225 gen_load_fpr64(ctx
, fp1
, fs
);
10226 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10227 tcg_temp_free_i64(fp1
);
10228 gen_store_fpr64(ctx
, fp0
, fd
);
10229 tcg_temp_free_i64(fp0
);
10232 case OPC_RECIP2_PS
:
10235 TCGv_i64 fp0
= tcg_temp_new_i64();
10236 TCGv_i64 fp1
= tcg_temp_new_i64();
10238 gen_load_fpr64(ctx
, fp0
, fs
);
10239 gen_load_fpr64(ctx
, fp1
, ft
);
10240 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10241 tcg_temp_free_i64(fp1
);
10242 gen_store_fpr64(ctx
, fp0
, fd
);
10243 tcg_temp_free_i64(fp0
);
10246 case OPC_RECIP1_PS
:
10249 TCGv_i64 fp0
= tcg_temp_new_i64();
10251 gen_load_fpr64(ctx
, fp0
, fs
);
10252 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10253 gen_store_fpr64(ctx
, fp0
, fd
);
10254 tcg_temp_free_i64(fp0
);
10257 case OPC_RSQRT1_PS
:
10260 TCGv_i64 fp0
= tcg_temp_new_i64();
10262 gen_load_fpr64(ctx
, fp0
, fs
);
10263 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10264 gen_store_fpr64(ctx
, fp0
, fd
);
10265 tcg_temp_free_i64(fp0
);
10268 case OPC_RSQRT2_PS
:
10271 TCGv_i64 fp0
= tcg_temp_new_i64();
10272 TCGv_i64 fp1
= tcg_temp_new_i64();
10274 gen_load_fpr64(ctx
, fp0
, fs
);
10275 gen_load_fpr64(ctx
, fp1
, ft
);
10276 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10277 tcg_temp_free_i64(fp1
);
10278 gen_store_fpr64(ctx
, fp0
, fd
);
10279 tcg_temp_free_i64(fp0
);
10283 check_cp1_64bitmode(ctx
);
10285 TCGv_i32 fp0
= tcg_temp_new_i32();
10287 gen_load_fpr32h(ctx
, fp0
, fs
);
10288 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10289 gen_store_fpr32(ctx
, fp0
, fd
);
10290 tcg_temp_free_i32(fp0
);
10293 case OPC_CVT_PW_PS
:
10296 TCGv_i64 fp0
= tcg_temp_new_i64();
10298 gen_load_fpr64(ctx
, fp0
, fs
);
10299 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10300 gen_store_fpr64(ctx
, fp0
, fd
);
10301 tcg_temp_free_i64(fp0
);
10305 check_cp1_64bitmode(ctx
);
10307 TCGv_i32 fp0
= tcg_temp_new_i32();
10309 gen_load_fpr32(ctx
, fp0
, fs
);
10310 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10311 gen_store_fpr32(ctx
, fp0
, fd
);
10312 tcg_temp_free_i32(fp0
);
10318 TCGv_i32 fp0
= tcg_temp_new_i32();
10319 TCGv_i32 fp1
= tcg_temp_new_i32();
10321 gen_load_fpr32(ctx
, fp0
, fs
);
10322 gen_load_fpr32(ctx
, fp1
, ft
);
10323 gen_store_fpr32h(ctx
, fp0
, fd
);
10324 gen_store_fpr32(ctx
, fp1
, fd
);
10325 tcg_temp_free_i32(fp0
);
10326 tcg_temp_free_i32(fp1
);
10332 TCGv_i32 fp0
= tcg_temp_new_i32();
10333 TCGv_i32 fp1
= tcg_temp_new_i32();
10335 gen_load_fpr32(ctx
, fp0
, fs
);
10336 gen_load_fpr32h(ctx
, fp1
, ft
);
10337 gen_store_fpr32(ctx
, fp1
, fd
);
10338 gen_store_fpr32h(ctx
, fp0
, fd
);
10339 tcg_temp_free_i32(fp0
);
10340 tcg_temp_free_i32(fp1
);
10346 TCGv_i32 fp0
= tcg_temp_new_i32();
10347 TCGv_i32 fp1
= tcg_temp_new_i32();
10349 gen_load_fpr32h(ctx
, fp0
, fs
);
10350 gen_load_fpr32(ctx
, fp1
, ft
);
10351 gen_store_fpr32(ctx
, fp1
, fd
);
10352 gen_store_fpr32h(ctx
, fp0
, fd
);
10353 tcg_temp_free_i32(fp0
);
10354 tcg_temp_free_i32(fp1
);
10360 TCGv_i32 fp0
= tcg_temp_new_i32();
10361 TCGv_i32 fp1
= tcg_temp_new_i32();
10363 gen_load_fpr32h(ctx
, fp0
, fs
);
10364 gen_load_fpr32h(ctx
, fp1
, ft
);
10365 gen_store_fpr32(ctx
, fp1
, fd
);
10366 gen_store_fpr32h(ctx
, fp0
, fd
);
10367 tcg_temp_free_i32(fp0
);
10368 tcg_temp_free_i32(fp1
);
10372 case OPC_CMP_UN_PS
:
10373 case OPC_CMP_EQ_PS
:
10374 case OPC_CMP_UEQ_PS
:
10375 case OPC_CMP_OLT_PS
:
10376 case OPC_CMP_ULT_PS
:
10377 case OPC_CMP_OLE_PS
:
10378 case OPC_CMP_ULE_PS
:
10379 case OPC_CMP_SF_PS
:
10380 case OPC_CMP_NGLE_PS
:
10381 case OPC_CMP_SEQ_PS
:
10382 case OPC_CMP_NGL_PS
:
10383 case OPC_CMP_LT_PS
:
10384 case OPC_CMP_NGE_PS
:
10385 case OPC_CMP_LE_PS
:
10386 case OPC_CMP_NGT_PS
:
10387 if (ctx
->opcode
& (1 << 6)) {
10388 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10390 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10394 MIPS_INVAL("farith");
10395 generate_exception_end(ctx
, EXCP_RI
);
10400 /* Coprocessor 3 (FPU) */
10401 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10402 int fd
, int fs
, int base
, int index
)
10404 TCGv t0
= tcg_temp_new();
10407 gen_load_gpr(t0
, index
);
10408 } else if (index
== 0) {
10409 gen_load_gpr(t0
, base
);
10411 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10413 /* Don't do NOP if destination is zero: we must perform the actual
10419 TCGv_i32 fp0
= tcg_temp_new_i32();
10421 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10422 tcg_gen_trunc_tl_i32(fp0
, t0
);
10423 gen_store_fpr32(ctx
, fp0
, fd
);
10424 tcg_temp_free_i32(fp0
);
10429 check_cp1_registers(ctx
, fd
);
10431 TCGv_i64 fp0
= tcg_temp_new_i64();
10432 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10433 gen_store_fpr64(ctx
, fp0
, fd
);
10434 tcg_temp_free_i64(fp0
);
10438 check_cp1_64bitmode(ctx
);
10439 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10441 TCGv_i64 fp0
= tcg_temp_new_i64();
10443 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10444 gen_store_fpr64(ctx
, fp0
, fd
);
10445 tcg_temp_free_i64(fp0
);
10451 TCGv_i32 fp0
= tcg_temp_new_i32();
10452 gen_load_fpr32(ctx
, fp0
, fs
);
10453 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10454 tcg_temp_free_i32(fp0
);
10459 check_cp1_registers(ctx
, fs
);
10461 TCGv_i64 fp0
= tcg_temp_new_i64();
10462 gen_load_fpr64(ctx
, fp0
, fs
);
10463 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10464 tcg_temp_free_i64(fp0
);
10468 check_cp1_64bitmode(ctx
);
10469 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10471 TCGv_i64 fp0
= tcg_temp_new_i64();
10472 gen_load_fpr64(ctx
, fp0
, fs
);
10473 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10474 tcg_temp_free_i64(fp0
);
10481 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10482 int fd
, int fr
, int fs
, int ft
)
10488 TCGv t0
= tcg_temp_local_new();
10489 TCGv_i32 fp
= tcg_temp_new_i32();
10490 TCGv_i32 fph
= tcg_temp_new_i32();
10491 TCGLabel
*l1
= gen_new_label();
10492 TCGLabel
*l2
= gen_new_label();
10494 gen_load_gpr(t0
, fr
);
10495 tcg_gen_andi_tl(t0
, t0
, 0x7);
10497 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10498 gen_load_fpr32(ctx
, fp
, fs
);
10499 gen_load_fpr32h(ctx
, fph
, fs
);
10500 gen_store_fpr32(ctx
, fp
, fd
);
10501 gen_store_fpr32h(ctx
, fph
, fd
);
10504 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10506 #ifdef TARGET_WORDS_BIGENDIAN
10507 gen_load_fpr32(ctx
, fp
, fs
);
10508 gen_load_fpr32h(ctx
, fph
, ft
);
10509 gen_store_fpr32h(ctx
, fp
, fd
);
10510 gen_store_fpr32(ctx
, fph
, fd
);
10512 gen_load_fpr32h(ctx
, fph
, fs
);
10513 gen_load_fpr32(ctx
, fp
, ft
);
10514 gen_store_fpr32(ctx
, fph
, fd
);
10515 gen_store_fpr32h(ctx
, fp
, fd
);
10518 tcg_temp_free_i32(fp
);
10519 tcg_temp_free_i32(fph
);
10525 TCGv_i32 fp0
= tcg_temp_new_i32();
10526 TCGv_i32 fp1
= tcg_temp_new_i32();
10527 TCGv_i32 fp2
= tcg_temp_new_i32();
10529 gen_load_fpr32(ctx
, fp0
, fs
);
10530 gen_load_fpr32(ctx
, fp1
, ft
);
10531 gen_load_fpr32(ctx
, fp2
, fr
);
10532 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10533 tcg_temp_free_i32(fp0
);
10534 tcg_temp_free_i32(fp1
);
10535 gen_store_fpr32(ctx
, fp2
, fd
);
10536 tcg_temp_free_i32(fp2
);
10541 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10543 TCGv_i64 fp0
= tcg_temp_new_i64();
10544 TCGv_i64 fp1
= tcg_temp_new_i64();
10545 TCGv_i64 fp2
= tcg_temp_new_i64();
10547 gen_load_fpr64(ctx
, fp0
, fs
);
10548 gen_load_fpr64(ctx
, fp1
, ft
);
10549 gen_load_fpr64(ctx
, fp2
, fr
);
10550 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10551 tcg_temp_free_i64(fp0
);
10552 tcg_temp_free_i64(fp1
);
10553 gen_store_fpr64(ctx
, fp2
, fd
);
10554 tcg_temp_free_i64(fp2
);
10560 TCGv_i64 fp0
= tcg_temp_new_i64();
10561 TCGv_i64 fp1
= tcg_temp_new_i64();
10562 TCGv_i64 fp2
= tcg_temp_new_i64();
10564 gen_load_fpr64(ctx
, fp0
, fs
);
10565 gen_load_fpr64(ctx
, fp1
, ft
);
10566 gen_load_fpr64(ctx
, fp2
, fr
);
10567 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10568 tcg_temp_free_i64(fp0
);
10569 tcg_temp_free_i64(fp1
);
10570 gen_store_fpr64(ctx
, fp2
, fd
);
10571 tcg_temp_free_i64(fp2
);
10577 TCGv_i32 fp0
= tcg_temp_new_i32();
10578 TCGv_i32 fp1
= tcg_temp_new_i32();
10579 TCGv_i32 fp2
= tcg_temp_new_i32();
10581 gen_load_fpr32(ctx
, fp0
, fs
);
10582 gen_load_fpr32(ctx
, fp1
, ft
);
10583 gen_load_fpr32(ctx
, fp2
, fr
);
10584 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10585 tcg_temp_free_i32(fp0
);
10586 tcg_temp_free_i32(fp1
);
10587 gen_store_fpr32(ctx
, fp2
, fd
);
10588 tcg_temp_free_i32(fp2
);
10593 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10595 TCGv_i64 fp0
= tcg_temp_new_i64();
10596 TCGv_i64 fp1
= tcg_temp_new_i64();
10597 TCGv_i64 fp2
= tcg_temp_new_i64();
10599 gen_load_fpr64(ctx
, fp0
, fs
);
10600 gen_load_fpr64(ctx
, fp1
, ft
);
10601 gen_load_fpr64(ctx
, fp2
, fr
);
10602 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10603 tcg_temp_free_i64(fp0
);
10604 tcg_temp_free_i64(fp1
);
10605 gen_store_fpr64(ctx
, fp2
, fd
);
10606 tcg_temp_free_i64(fp2
);
10612 TCGv_i64 fp0
= tcg_temp_new_i64();
10613 TCGv_i64 fp1
= tcg_temp_new_i64();
10614 TCGv_i64 fp2
= tcg_temp_new_i64();
10616 gen_load_fpr64(ctx
, fp0
, fs
);
10617 gen_load_fpr64(ctx
, fp1
, ft
);
10618 gen_load_fpr64(ctx
, fp2
, fr
);
10619 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10620 tcg_temp_free_i64(fp0
);
10621 tcg_temp_free_i64(fp1
);
10622 gen_store_fpr64(ctx
, fp2
, fd
);
10623 tcg_temp_free_i64(fp2
);
10629 TCGv_i32 fp0
= tcg_temp_new_i32();
10630 TCGv_i32 fp1
= tcg_temp_new_i32();
10631 TCGv_i32 fp2
= tcg_temp_new_i32();
10633 gen_load_fpr32(ctx
, fp0
, fs
);
10634 gen_load_fpr32(ctx
, fp1
, ft
);
10635 gen_load_fpr32(ctx
, fp2
, fr
);
10636 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10637 tcg_temp_free_i32(fp0
);
10638 tcg_temp_free_i32(fp1
);
10639 gen_store_fpr32(ctx
, fp2
, fd
);
10640 tcg_temp_free_i32(fp2
);
10645 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10647 TCGv_i64 fp0
= tcg_temp_new_i64();
10648 TCGv_i64 fp1
= tcg_temp_new_i64();
10649 TCGv_i64 fp2
= tcg_temp_new_i64();
10651 gen_load_fpr64(ctx
, fp0
, fs
);
10652 gen_load_fpr64(ctx
, fp1
, ft
);
10653 gen_load_fpr64(ctx
, fp2
, fr
);
10654 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10655 tcg_temp_free_i64(fp0
);
10656 tcg_temp_free_i64(fp1
);
10657 gen_store_fpr64(ctx
, fp2
, fd
);
10658 tcg_temp_free_i64(fp2
);
10664 TCGv_i64 fp0
= tcg_temp_new_i64();
10665 TCGv_i64 fp1
= tcg_temp_new_i64();
10666 TCGv_i64 fp2
= tcg_temp_new_i64();
10668 gen_load_fpr64(ctx
, fp0
, fs
);
10669 gen_load_fpr64(ctx
, fp1
, ft
);
10670 gen_load_fpr64(ctx
, fp2
, fr
);
10671 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10672 tcg_temp_free_i64(fp0
);
10673 tcg_temp_free_i64(fp1
);
10674 gen_store_fpr64(ctx
, fp2
, fd
);
10675 tcg_temp_free_i64(fp2
);
10681 TCGv_i32 fp0
= tcg_temp_new_i32();
10682 TCGv_i32 fp1
= tcg_temp_new_i32();
10683 TCGv_i32 fp2
= tcg_temp_new_i32();
10685 gen_load_fpr32(ctx
, fp0
, fs
);
10686 gen_load_fpr32(ctx
, fp1
, ft
);
10687 gen_load_fpr32(ctx
, fp2
, fr
);
10688 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10689 tcg_temp_free_i32(fp0
);
10690 tcg_temp_free_i32(fp1
);
10691 gen_store_fpr32(ctx
, fp2
, fd
);
10692 tcg_temp_free_i32(fp2
);
10697 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10699 TCGv_i64 fp0
= tcg_temp_new_i64();
10700 TCGv_i64 fp1
= tcg_temp_new_i64();
10701 TCGv_i64 fp2
= tcg_temp_new_i64();
10703 gen_load_fpr64(ctx
, fp0
, fs
);
10704 gen_load_fpr64(ctx
, fp1
, ft
);
10705 gen_load_fpr64(ctx
, fp2
, fr
);
10706 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10707 tcg_temp_free_i64(fp0
);
10708 tcg_temp_free_i64(fp1
);
10709 gen_store_fpr64(ctx
, fp2
, fd
);
10710 tcg_temp_free_i64(fp2
);
10716 TCGv_i64 fp0
= tcg_temp_new_i64();
10717 TCGv_i64 fp1
= tcg_temp_new_i64();
10718 TCGv_i64 fp2
= tcg_temp_new_i64();
10720 gen_load_fpr64(ctx
, fp0
, fs
);
10721 gen_load_fpr64(ctx
, fp1
, ft
);
10722 gen_load_fpr64(ctx
, fp2
, fr
);
10723 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10724 tcg_temp_free_i64(fp0
);
10725 tcg_temp_free_i64(fp1
);
10726 gen_store_fpr64(ctx
, fp2
, fd
);
10727 tcg_temp_free_i64(fp2
);
10731 MIPS_INVAL("flt3_arith");
10732 generate_exception_end(ctx
, EXCP_RI
);
10737 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10741 #if !defined(CONFIG_USER_ONLY)
10742 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10743 Therefore only check the ISA in system mode. */
10744 check_insn(ctx
, ISA_MIPS32R2
);
10746 t0
= tcg_temp_new();
10750 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10751 gen_store_gpr(t0
, rt
);
10754 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10755 gen_store_gpr(t0
, rt
);
10758 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
10761 gen_helper_rdhwr_cc(t0
, cpu_env
);
10762 if (tb_cflags(ctx
->tb
) & CF_USE_ICOUNT
) {
10765 gen_store_gpr(t0
, rt
);
10766 /* Break the TB to be able to take timer interrupts immediately
10767 after reading count. BS_STOP isn't sufficient, we need to ensure
10768 we break completely out of translated code. */
10769 gen_save_pc(ctx
->pc
+ 4);
10770 ctx
->bstate
= BS_EXCP
;
10773 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10774 gen_store_gpr(t0
, rt
);
10777 check_insn(ctx
, ISA_MIPS32R6
);
10779 /* Performance counter registers are not implemented other than
10780 * control register 0.
10782 generate_exception(ctx
, EXCP_RI
);
10784 gen_helper_rdhwr_performance(t0
, cpu_env
);
10785 gen_store_gpr(t0
, rt
);
10788 check_insn(ctx
, ISA_MIPS32R6
);
10789 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10790 gen_store_gpr(t0
, rt
);
10793 #if defined(CONFIG_USER_ONLY)
10794 tcg_gen_ld_tl(t0
, cpu_env
,
10795 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10796 gen_store_gpr(t0
, rt
);
10799 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10800 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10801 tcg_gen_ld_tl(t0
, cpu_env
,
10802 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10803 gen_store_gpr(t0
, rt
);
10805 generate_exception_end(ctx
, EXCP_RI
);
10809 default: /* Invalid */
10810 MIPS_INVAL("rdhwr");
10811 generate_exception_end(ctx
, EXCP_RI
);
10817 static inline void clear_branch_hflags(DisasContext
*ctx
)
10819 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10820 if (ctx
->bstate
== BS_NONE
) {
10821 save_cpu_state(ctx
, 0);
10823 /* it is not safe to save ctx->hflags as hflags may be changed
10824 in execution time by the instruction in delay / forbidden slot. */
10825 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10829 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10831 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10832 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10833 /* Branches completion */
10834 clear_branch_hflags(ctx
);
10835 ctx
->bstate
= BS_BRANCH
;
10836 /* FIXME: Need to clear can_do_io. */
10837 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10838 case MIPS_HFLAG_FBNSLOT
:
10839 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10842 /* unconditional branch */
10843 if (proc_hflags
& MIPS_HFLAG_BX
) {
10844 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10846 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10848 case MIPS_HFLAG_BL
:
10849 /* blikely taken case */
10850 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10852 case MIPS_HFLAG_BC
:
10853 /* Conditional branch */
10855 TCGLabel
*l1
= gen_new_label();
10857 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10858 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10860 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10863 case MIPS_HFLAG_BR
:
10864 /* unconditional branch to register */
10865 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10866 TCGv t0
= tcg_temp_new();
10867 TCGv_i32 t1
= tcg_temp_new_i32();
10869 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10870 tcg_gen_trunc_tl_i32(t1
, t0
);
10872 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10873 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10874 tcg_gen_or_i32(hflags
, hflags
, t1
);
10875 tcg_temp_free_i32(t1
);
10877 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10879 tcg_gen_mov_tl(cpu_PC
, btarget
);
10881 if (ctx
->singlestep_enabled
) {
10882 save_cpu_state(ctx
, 0);
10883 gen_helper_raise_exception_debug(cpu_env
);
10885 tcg_gen_lookup_and_goto_ptr();
10888 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10894 /* Compact Branches */
10895 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10896 int rs
, int rt
, int32_t offset
)
10898 int bcond_compute
= 0;
10899 TCGv t0
= tcg_temp_new();
10900 TCGv t1
= tcg_temp_new();
10901 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10903 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10904 #ifdef MIPS_DEBUG_DISAS
10905 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10908 generate_exception_end(ctx
, EXCP_RI
);
10912 /* Load needed operands and calculate btarget */
10914 /* compact branch */
10915 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10916 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10917 gen_load_gpr(t0
, rs
);
10918 gen_load_gpr(t1
, rt
);
10920 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10921 if (rs
<= rt
&& rs
== 0) {
10922 /* OPC_BEQZALC, OPC_BNEZALC */
10923 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10926 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10927 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10928 gen_load_gpr(t0
, rs
);
10929 gen_load_gpr(t1
, rt
);
10931 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10933 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10934 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10935 if (rs
== 0 || rs
== rt
) {
10936 /* OPC_BLEZALC, OPC_BGEZALC */
10937 /* OPC_BGTZALC, OPC_BLTZALC */
10938 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10940 gen_load_gpr(t0
, rs
);
10941 gen_load_gpr(t1
, rt
);
10943 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10947 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10952 /* OPC_BEQZC, OPC_BNEZC */
10953 gen_load_gpr(t0
, rs
);
10955 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10957 /* OPC_JIC, OPC_JIALC */
10958 TCGv tbase
= tcg_temp_new();
10959 TCGv toffset
= tcg_temp_new();
10961 gen_load_gpr(tbase
, rt
);
10962 tcg_gen_movi_tl(toffset
, offset
);
10963 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10964 tcg_temp_free(tbase
);
10965 tcg_temp_free(toffset
);
10969 MIPS_INVAL("Compact branch/jump");
10970 generate_exception_end(ctx
, EXCP_RI
);
10974 if (bcond_compute
== 0) {
10975 /* Uncoditional compact branch */
10978 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10981 ctx
->hflags
|= MIPS_HFLAG_BR
;
10984 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10987 ctx
->hflags
|= MIPS_HFLAG_B
;
10990 MIPS_INVAL("Compact branch/jump");
10991 generate_exception_end(ctx
, EXCP_RI
);
10995 /* Generating branch here as compact branches don't have delay slot */
10996 gen_branch(ctx
, 4);
10998 /* Conditional compact branch */
10999 TCGLabel
*fs
= gen_new_label();
11000 save_cpu_state(ctx
, 0);
11003 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11004 if (rs
== 0 && rt
!= 0) {
11006 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11007 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11009 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11012 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11015 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11016 if (rs
== 0 && rt
!= 0) {
11018 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11019 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11021 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11024 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11027 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11028 if (rs
== 0 && rt
!= 0) {
11030 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11031 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11033 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11036 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11039 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11040 if (rs
== 0 && rt
!= 0) {
11042 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11043 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11045 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11048 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11051 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11052 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11054 /* OPC_BOVC, OPC_BNVC */
11055 TCGv t2
= tcg_temp_new();
11056 TCGv t3
= tcg_temp_new();
11057 TCGv t4
= tcg_temp_new();
11058 TCGv input_overflow
= tcg_temp_new();
11060 gen_load_gpr(t0
, rs
);
11061 gen_load_gpr(t1
, rt
);
11062 tcg_gen_ext32s_tl(t2
, t0
);
11063 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11064 tcg_gen_ext32s_tl(t3
, t1
);
11065 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11066 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11068 tcg_gen_add_tl(t4
, t2
, t3
);
11069 tcg_gen_ext32s_tl(t4
, t4
);
11070 tcg_gen_xor_tl(t2
, t2
, t3
);
11071 tcg_gen_xor_tl(t3
, t4
, t3
);
11072 tcg_gen_andc_tl(t2
, t3
, t2
);
11073 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11074 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11075 if (opc
== OPC_BOVC
) {
11077 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11080 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11082 tcg_temp_free(input_overflow
);
11086 } else if (rs
< rt
&& rs
== 0) {
11087 /* OPC_BEQZALC, OPC_BNEZALC */
11088 if (opc
== OPC_BEQZALC
) {
11090 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11093 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11096 /* OPC_BEQC, OPC_BNEC */
11097 if (opc
== OPC_BEQC
) {
11099 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11102 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11107 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11110 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11113 MIPS_INVAL("Compact conditional branch/jump");
11114 generate_exception_end(ctx
, EXCP_RI
);
11118 /* Generating branch here as compact branches don't have delay slot */
11119 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11122 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11130 /* ISA extensions (ASEs) */
11131 /* MIPS16 extension to MIPS32 */
11133 /* MIPS16 major opcodes */
11135 M16_OPC_ADDIUSP
= 0x00,
11136 M16_OPC_ADDIUPC
= 0x01,
11138 M16_OPC_JAL
= 0x03,
11139 M16_OPC_BEQZ
= 0x04,
11140 M16_OPC_BNEQZ
= 0x05,
11141 M16_OPC_SHIFT
= 0x06,
11143 M16_OPC_RRIA
= 0x08,
11144 M16_OPC_ADDIU8
= 0x09,
11145 M16_OPC_SLTI
= 0x0a,
11146 M16_OPC_SLTIU
= 0x0b,
11149 M16_OPC_CMPI
= 0x0e,
11153 M16_OPC_LWSP
= 0x12,
11155 M16_OPC_LBU
= 0x14,
11156 M16_OPC_LHU
= 0x15,
11157 M16_OPC_LWPC
= 0x16,
11158 M16_OPC_LWU
= 0x17,
11161 M16_OPC_SWSP
= 0x1a,
11163 M16_OPC_RRR
= 0x1c,
11165 M16_OPC_EXTEND
= 0x1e,
11169 /* I8 funct field */
11188 /* RR funct field */
11222 /* I64 funct field */
11230 I64_DADDIUPC
= 0x6,
11234 /* RR ry field for CNVT */
11236 RR_RY_CNVT_ZEB
= 0x0,
11237 RR_RY_CNVT_ZEH
= 0x1,
11238 RR_RY_CNVT_ZEW
= 0x2,
11239 RR_RY_CNVT_SEB
= 0x4,
11240 RR_RY_CNVT_SEH
= 0x5,
11241 RR_RY_CNVT_SEW
= 0x6,
11244 static int xlat (int r
)
11246 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11251 static void gen_mips16_save (DisasContext
*ctx
,
11252 int xsregs
, int aregs
,
11253 int do_ra
, int do_s0
, int do_s1
,
11256 TCGv t0
= tcg_temp_new();
11257 TCGv t1
= tcg_temp_new();
11258 TCGv t2
= tcg_temp_new();
11288 generate_exception_end(ctx
, EXCP_RI
);
11294 gen_base_offset_addr(ctx
, t0
, 29, 12);
11295 gen_load_gpr(t1
, 7);
11296 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11299 gen_base_offset_addr(ctx
, t0
, 29, 8);
11300 gen_load_gpr(t1
, 6);
11301 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11304 gen_base_offset_addr(ctx
, t0
, 29, 4);
11305 gen_load_gpr(t1
, 5);
11306 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11309 gen_base_offset_addr(ctx
, t0
, 29, 0);
11310 gen_load_gpr(t1
, 4);
11311 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11314 gen_load_gpr(t0
, 29);
11316 #define DECR_AND_STORE(reg) do { \
11317 tcg_gen_movi_tl(t2, -4); \
11318 gen_op_addr_add(ctx, t0, t0, t2); \
11319 gen_load_gpr(t1, reg); \
11320 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11324 DECR_AND_STORE(31);
11329 DECR_AND_STORE(30);
11332 DECR_AND_STORE(23);
11335 DECR_AND_STORE(22);
11338 DECR_AND_STORE(21);
11341 DECR_AND_STORE(20);
11344 DECR_AND_STORE(19);
11347 DECR_AND_STORE(18);
11351 DECR_AND_STORE(17);
11354 DECR_AND_STORE(16);
11384 generate_exception_end(ctx
, EXCP_RI
);
11400 #undef DECR_AND_STORE
11402 tcg_gen_movi_tl(t2
, -framesize
);
11403 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11409 static void gen_mips16_restore (DisasContext
*ctx
,
11410 int xsregs
, int aregs
,
11411 int do_ra
, int do_s0
, int do_s1
,
11415 TCGv t0
= tcg_temp_new();
11416 TCGv t1
= tcg_temp_new();
11417 TCGv t2
= tcg_temp_new();
11419 tcg_gen_movi_tl(t2
, framesize
);
11420 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11422 #define DECR_AND_LOAD(reg) do { \
11423 tcg_gen_movi_tl(t2, -4); \
11424 gen_op_addr_add(ctx, t0, t0, t2); \
11425 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11426 gen_store_gpr(t1, reg); \
11490 generate_exception_end(ctx
, EXCP_RI
);
11506 #undef DECR_AND_LOAD
11508 tcg_gen_movi_tl(t2
, framesize
);
11509 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11515 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11516 int is_64_bit
, int extended
)
11520 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11521 generate_exception_end(ctx
, EXCP_RI
);
11525 t0
= tcg_temp_new();
11527 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11528 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11530 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11536 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11539 TCGv_i32 t0
= tcg_const_i32(op
);
11540 TCGv t1
= tcg_temp_new();
11541 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11542 gen_helper_cache(cpu_env
, t1
, t0
);
11545 #if defined(TARGET_MIPS64)
11546 static void decode_i64_mips16 (DisasContext
*ctx
,
11547 int ry
, int funct
, int16_t offset
,
11552 check_insn(ctx
, ISA_MIPS3
);
11553 check_mips_64(ctx
);
11554 offset
= extended
? offset
: offset
<< 3;
11555 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11558 check_insn(ctx
, ISA_MIPS3
);
11559 check_mips_64(ctx
);
11560 offset
= extended
? offset
: offset
<< 3;
11561 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11564 check_insn(ctx
, ISA_MIPS3
);
11565 check_mips_64(ctx
);
11566 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11567 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11570 check_insn(ctx
, ISA_MIPS3
);
11571 check_mips_64(ctx
);
11572 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11573 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11576 check_insn(ctx
, ISA_MIPS3
);
11577 check_mips_64(ctx
);
11578 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11579 generate_exception_end(ctx
, EXCP_RI
);
11581 offset
= extended
? offset
: offset
<< 3;
11582 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11586 check_insn(ctx
, ISA_MIPS3
);
11587 check_mips_64(ctx
);
11588 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11589 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11592 check_insn(ctx
, ISA_MIPS3
);
11593 check_mips_64(ctx
);
11594 offset
= extended
? offset
: offset
<< 2;
11595 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11598 check_insn(ctx
, ISA_MIPS3
);
11599 check_mips_64(ctx
);
11600 offset
= extended
? offset
: offset
<< 2;
11601 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11607 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11609 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11610 int op
, rx
, ry
, funct
, sa
;
11611 int16_t imm
, offset
;
11613 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11614 op
= (ctx
->opcode
>> 11) & 0x1f;
11615 sa
= (ctx
->opcode
>> 22) & 0x1f;
11616 funct
= (ctx
->opcode
>> 8) & 0x7;
11617 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11618 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11619 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11620 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11621 | (ctx
->opcode
& 0x1f));
11623 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11626 case M16_OPC_ADDIUSP
:
11627 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11629 case M16_OPC_ADDIUPC
:
11630 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11633 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11634 /* No delay slot, so just process as a normal instruction */
11637 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11638 /* No delay slot, so just process as a normal instruction */
11640 case M16_OPC_BNEQZ
:
11641 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11642 /* No delay slot, so just process as a normal instruction */
11644 case M16_OPC_SHIFT
:
11645 switch (ctx
->opcode
& 0x3) {
11647 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11650 #if defined(TARGET_MIPS64)
11651 check_mips_64(ctx
);
11652 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11654 generate_exception_end(ctx
, EXCP_RI
);
11658 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11661 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11665 #if defined(TARGET_MIPS64)
11667 check_insn(ctx
, ISA_MIPS3
);
11668 check_mips_64(ctx
);
11669 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11673 imm
= ctx
->opcode
& 0xf;
11674 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11675 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11676 imm
= (int16_t) (imm
<< 1) >> 1;
11677 if ((ctx
->opcode
>> 4) & 0x1) {
11678 #if defined(TARGET_MIPS64)
11679 check_mips_64(ctx
);
11680 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11682 generate_exception_end(ctx
, EXCP_RI
);
11685 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11688 case M16_OPC_ADDIU8
:
11689 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11692 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11694 case M16_OPC_SLTIU
:
11695 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11700 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11703 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11706 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11709 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11712 check_insn(ctx
, ISA_MIPS32
);
11714 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11715 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11716 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11717 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11718 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11719 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11720 | (ctx
->opcode
& 0xf)) << 3;
11722 if (ctx
->opcode
& (1 << 7)) {
11723 gen_mips16_save(ctx
, xsregs
, aregs
,
11724 do_ra
, do_s0
, do_s1
,
11727 gen_mips16_restore(ctx
, xsregs
, aregs
,
11728 do_ra
, do_s0
, do_s1
,
11734 generate_exception_end(ctx
, EXCP_RI
);
11739 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11742 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11744 #if defined(TARGET_MIPS64)
11746 check_insn(ctx
, ISA_MIPS3
);
11747 check_mips_64(ctx
);
11748 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11752 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11755 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11758 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11761 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11764 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11767 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11770 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11772 #if defined(TARGET_MIPS64)
11774 check_insn(ctx
, ISA_MIPS3
);
11775 check_mips_64(ctx
);
11776 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11780 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11783 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11786 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11789 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11791 #if defined(TARGET_MIPS64)
11793 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11797 generate_exception_end(ctx
, EXCP_RI
);
11804 static inline bool is_uhi(int sdbbp_code
)
11806 #ifdef CONFIG_USER_ONLY
11809 return semihosting_enabled() && sdbbp_code
== 1;
11813 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11817 int op
, cnvt_op
, op1
, offset
;
11821 op
= (ctx
->opcode
>> 11) & 0x1f;
11822 sa
= (ctx
->opcode
>> 2) & 0x7;
11823 sa
= sa
== 0 ? 8 : sa
;
11824 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11825 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11826 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11827 op1
= offset
= ctx
->opcode
& 0x1f;
11832 case M16_OPC_ADDIUSP
:
11834 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11836 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11839 case M16_OPC_ADDIUPC
:
11840 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11843 offset
= (ctx
->opcode
& 0x7ff) << 1;
11844 offset
= (int16_t)(offset
<< 4) >> 4;
11845 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11846 /* No delay slot, so just process as a normal instruction */
11849 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11850 offset
= (((ctx
->opcode
& 0x1f) << 21)
11851 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11853 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11854 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11858 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11859 ((int8_t)ctx
->opcode
) << 1, 0);
11860 /* No delay slot, so just process as a normal instruction */
11862 case M16_OPC_BNEQZ
:
11863 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11864 ((int8_t)ctx
->opcode
) << 1, 0);
11865 /* No delay slot, so just process as a normal instruction */
11867 case M16_OPC_SHIFT
:
11868 switch (ctx
->opcode
& 0x3) {
11870 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11873 #if defined(TARGET_MIPS64)
11874 check_insn(ctx
, ISA_MIPS3
);
11875 check_mips_64(ctx
);
11876 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11878 generate_exception_end(ctx
, EXCP_RI
);
11882 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11885 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11889 #if defined(TARGET_MIPS64)
11891 check_insn(ctx
, ISA_MIPS3
);
11892 check_mips_64(ctx
);
11893 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11898 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11900 if ((ctx
->opcode
>> 4) & 1) {
11901 #if defined(TARGET_MIPS64)
11902 check_insn(ctx
, ISA_MIPS3
);
11903 check_mips_64(ctx
);
11904 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11906 generate_exception_end(ctx
, EXCP_RI
);
11909 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11913 case M16_OPC_ADDIU8
:
11915 int16_t imm
= (int8_t) ctx
->opcode
;
11917 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11922 int16_t imm
= (uint8_t) ctx
->opcode
;
11923 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11926 case M16_OPC_SLTIU
:
11928 int16_t imm
= (uint8_t) ctx
->opcode
;
11929 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11936 funct
= (ctx
->opcode
>> 8) & 0x7;
11939 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11940 ((int8_t)ctx
->opcode
) << 1, 0);
11943 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11944 ((int8_t)ctx
->opcode
) << 1, 0);
11947 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11950 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11951 ((int8_t)ctx
->opcode
) << 3);
11954 check_insn(ctx
, ISA_MIPS32
);
11956 int do_ra
= ctx
->opcode
& (1 << 6);
11957 int do_s0
= ctx
->opcode
& (1 << 5);
11958 int do_s1
= ctx
->opcode
& (1 << 4);
11959 int framesize
= ctx
->opcode
& 0xf;
11961 if (framesize
== 0) {
11964 framesize
= framesize
<< 3;
11967 if (ctx
->opcode
& (1 << 7)) {
11968 gen_mips16_save(ctx
, 0, 0,
11969 do_ra
, do_s0
, do_s1
, framesize
);
11971 gen_mips16_restore(ctx
, 0, 0,
11972 do_ra
, do_s0
, do_s1
, framesize
);
11978 int rz
= xlat(ctx
->opcode
& 0x7);
11980 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11981 ((ctx
->opcode
>> 5) & 0x7);
11982 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11986 reg32
= ctx
->opcode
& 0x1f;
11987 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11990 generate_exception_end(ctx
, EXCP_RI
);
11997 int16_t imm
= (uint8_t) ctx
->opcode
;
11999 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12004 int16_t imm
= (uint8_t) ctx
->opcode
;
12005 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12008 #if defined(TARGET_MIPS64)
12010 check_insn(ctx
, ISA_MIPS3
);
12011 check_mips_64(ctx
);
12012 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12016 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12019 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12022 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12025 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12028 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12031 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12034 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12036 #if defined (TARGET_MIPS64)
12038 check_insn(ctx
, ISA_MIPS3
);
12039 check_mips_64(ctx
);
12040 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12044 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12047 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12050 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12053 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12057 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12060 switch (ctx
->opcode
& 0x3) {
12062 mips32_op
= OPC_ADDU
;
12065 mips32_op
= OPC_SUBU
;
12067 #if defined(TARGET_MIPS64)
12069 mips32_op
= OPC_DADDU
;
12070 check_insn(ctx
, ISA_MIPS3
);
12071 check_mips_64(ctx
);
12074 mips32_op
= OPC_DSUBU
;
12075 check_insn(ctx
, ISA_MIPS3
);
12076 check_mips_64(ctx
);
12080 generate_exception_end(ctx
, EXCP_RI
);
12084 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12093 int nd
= (ctx
->opcode
>> 7) & 0x1;
12094 int link
= (ctx
->opcode
>> 6) & 0x1;
12095 int ra
= (ctx
->opcode
>> 5) & 0x1;
12098 check_insn(ctx
, ISA_MIPS32
);
12107 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12112 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12113 gen_helper_do_semihosting(cpu_env
);
12115 /* XXX: not clear which exception should be raised
12116 * when in debug mode...
12118 check_insn(ctx
, ISA_MIPS32
);
12119 generate_exception_end(ctx
, EXCP_DBp
);
12123 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12126 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12129 generate_exception_end(ctx
, EXCP_BREAK
);
12132 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12135 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12138 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12140 #if defined (TARGET_MIPS64)
12142 check_insn(ctx
, ISA_MIPS3
);
12143 check_mips_64(ctx
);
12144 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12148 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12151 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12154 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12157 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12160 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12163 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12166 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12169 check_insn(ctx
, ISA_MIPS32
);
12171 case RR_RY_CNVT_ZEB
:
12172 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12174 case RR_RY_CNVT_ZEH
:
12175 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12177 case RR_RY_CNVT_SEB
:
12178 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12180 case RR_RY_CNVT_SEH
:
12181 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12183 #if defined (TARGET_MIPS64)
12184 case RR_RY_CNVT_ZEW
:
12185 check_insn(ctx
, ISA_MIPS64
);
12186 check_mips_64(ctx
);
12187 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12189 case RR_RY_CNVT_SEW
:
12190 check_insn(ctx
, ISA_MIPS64
);
12191 check_mips_64(ctx
);
12192 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12196 generate_exception_end(ctx
, EXCP_RI
);
12201 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12203 #if defined (TARGET_MIPS64)
12205 check_insn(ctx
, ISA_MIPS3
);
12206 check_mips_64(ctx
);
12207 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12210 check_insn(ctx
, ISA_MIPS3
);
12211 check_mips_64(ctx
);
12212 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12215 check_insn(ctx
, ISA_MIPS3
);
12216 check_mips_64(ctx
);
12217 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12220 check_insn(ctx
, ISA_MIPS3
);
12221 check_mips_64(ctx
);
12222 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12226 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12229 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12232 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12235 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12237 #if defined (TARGET_MIPS64)
12239 check_insn(ctx
, ISA_MIPS3
);
12240 check_mips_64(ctx
);
12241 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12244 check_insn(ctx
, ISA_MIPS3
);
12245 check_mips_64(ctx
);
12246 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12249 check_insn(ctx
, ISA_MIPS3
);
12250 check_mips_64(ctx
);
12251 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12254 check_insn(ctx
, ISA_MIPS3
);
12255 check_mips_64(ctx
);
12256 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12260 generate_exception_end(ctx
, EXCP_RI
);
12264 case M16_OPC_EXTEND
:
12265 decode_extended_mips16_opc(env
, ctx
);
12268 #if defined(TARGET_MIPS64)
12270 funct
= (ctx
->opcode
>> 8) & 0x7;
12271 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12275 generate_exception_end(ctx
, EXCP_RI
);
12282 /* microMIPS extension to MIPS32/MIPS64 */
12285 * microMIPS32/microMIPS64 major opcodes
12287 * 1. MIPS Architecture for Programmers Volume II-B:
12288 * The microMIPS32 Instruction Set (Revision 3.05)
12290 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12292 * 2. MIPS Architecture For Programmers Volume II-A:
12293 * The MIPS64 Instruction Set (Revision 3.51)
12323 POOL32S
= 0x16, /* MIPS64 */
12324 DADDIU32
= 0x17, /* MIPS64 */
12353 /* 0x29 is reserved */
12366 /* 0x31 is reserved */
12379 SD32
= 0x36, /* MIPS64 */
12380 LD32
= 0x37, /* MIPS64 */
12382 /* 0x39 is reserved */
12398 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12408 /* POOL32A encoding of minor opcode field */
12411 /* These opcodes are distinguished only by bits 9..6; those bits are
12412 * what are recorded below. */
12449 /* The following can be distinguished by their lower 6 bits. */
12459 /* POOL32AXF encoding of minor opcode field extension */
12462 * 1. MIPS Architecture for Programmers Volume II-B:
12463 * The microMIPS32 Instruction Set (Revision 3.05)
12465 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12467 * 2. MIPS Architecture for Programmers VolumeIV-e:
12468 * The MIPS DSP Application-Specific Extension
12469 * to the microMIPS32 Architecture (Revision 2.34)
12471 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12486 /* begin of microMIPS32 DSP */
12488 /* bits 13..12 for 0x01 */
12494 /* bits 13..12 for 0x2a */
12500 /* bits 13..12 for 0x32 */
12504 /* end of microMIPS32 DSP */
12506 /* bits 15..12 for 0x2c */
12523 /* bits 15..12 for 0x34 */
12531 /* bits 15..12 for 0x3c */
12533 JR
= 0x0, /* alias */
12541 /* bits 15..12 for 0x05 */
12545 /* bits 15..12 for 0x0d */
12557 /* bits 15..12 for 0x15 */
12563 /* bits 15..12 for 0x1d */
12567 /* bits 15..12 for 0x2d */
12572 /* bits 15..12 for 0x35 */
12579 /* POOL32B encoding of minor opcode field (bits 15..12) */
12595 /* POOL32C encoding of minor opcode field (bits 15..12) */
12616 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12629 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12642 /* POOL32F encoding of minor opcode field (bits 5..0) */
12645 /* These are the bit 7..6 values */
12654 /* These are the bit 8..6 values */
12679 MOVZ_FMT_05
= 0x05,
12713 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12720 /* POOL32Fxf encoding of minor opcode extension field */
12758 /* POOL32I encoding of minor opcode field (bits 25..21) */
12788 /* These overlap and are distinguished by bit16 of the instruction */
12797 /* POOL16A encoding of minor opcode field */
12804 /* POOL16B encoding of minor opcode field */
12811 /* POOL16C encoding of minor opcode field */
12831 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12851 /* POOL16D encoding of minor opcode field */
12858 /* POOL16E encoding of minor opcode field */
12865 static int mmreg (int r
)
12867 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12872 /* Used for 16-bit store instructions. */
12873 static int mmreg2 (int r
)
12875 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12880 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12881 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12882 #define uMIPS_RS2(op) uMIPS_RS(op)
12883 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12884 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12885 #define uMIPS_RS5(op) (op & 0x1f)
12887 /* Signed immediate */
12888 #define SIMM(op, start, width) \
12889 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12892 /* Zero-extended immediate */
12893 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12895 static void gen_addiur1sp(DisasContext
*ctx
)
12897 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12899 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12902 static void gen_addiur2(DisasContext
*ctx
)
12904 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12905 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12906 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12908 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12911 static void gen_addiusp(DisasContext
*ctx
)
12913 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12916 if (encoded
<= 1) {
12917 decoded
= 256 + encoded
;
12918 } else if (encoded
<= 255) {
12920 } else if (encoded
<= 509) {
12921 decoded
= encoded
- 512;
12923 decoded
= encoded
- 768;
12926 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12929 static void gen_addius5(DisasContext
*ctx
)
12931 int imm
= SIMM(ctx
->opcode
, 1, 4);
12932 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12934 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12937 static void gen_andi16(DisasContext
*ctx
)
12939 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12940 31, 32, 63, 64, 255, 32768, 65535 };
12941 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12942 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12943 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12945 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12948 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12949 int base
, int16_t offset
)
12954 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12955 generate_exception_end(ctx
, EXCP_RI
);
12959 t0
= tcg_temp_new();
12961 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12963 t1
= tcg_const_tl(reglist
);
12964 t2
= tcg_const_i32(ctx
->mem_idx
);
12966 save_cpu_state(ctx
, 1);
12969 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12972 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12974 #ifdef TARGET_MIPS64
12976 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12979 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12985 tcg_temp_free_i32(t2
);
12989 static void gen_pool16c_insn(DisasContext
*ctx
)
12991 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12992 int rs
= mmreg(ctx
->opcode
& 0x7);
12994 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12999 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13005 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13011 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13017 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13024 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13025 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13027 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13036 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13037 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13039 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13046 int reg
= ctx
->opcode
& 0x1f;
13048 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13054 int reg
= ctx
->opcode
& 0x1f;
13055 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13056 /* Let normal delay slot handling in our caller take us
13057 to the branch target. */
13062 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13063 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13067 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13068 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13072 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13076 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13079 generate_exception_end(ctx
, EXCP_BREAK
);
13082 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13083 gen_helper_do_semihosting(cpu_env
);
13085 /* XXX: not clear which exception should be raised
13086 * when in debug mode...
13088 check_insn(ctx
, ISA_MIPS32
);
13089 generate_exception_end(ctx
, EXCP_DBp
);
13092 case JRADDIUSP
+ 0:
13093 case JRADDIUSP
+ 1:
13095 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13096 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13097 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13098 /* Let normal delay slot handling in our caller take us
13099 to the branch target. */
13103 generate_exception_end(ctx
, EXCP_RI
);
13108 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13111 int rd
, rs
, re
, rt
;
13112 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13113 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13114 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13115 rd
= rd_enc
[enc_dest
];
13116 re
= re_enc
[enc_dest
];
13117 rs
= rs_rt_enc
[enc_rs
];
13118 rt
= rs_rt_enc
[enc_rt
];
13120 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13122 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13125 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13127 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13131 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13133 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13134 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13136 switch (ctx
->opcode
& 0xf) {
13138 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13141 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13145 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13146 int offset
= extract32(ctx
->opcode
, 4, 4);
13147 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13150 case R6_JRC16
: /* JRCADDIUSP */
13151 if ((ctx
->opcode
>> 4) & 1) {
13153 int imm
= extract32(ctx
->opcode
, 5, 5);
13154 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13155 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13158 int rs
= extract32(ctx
->opcode
, 5, 5);
13159 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13162 case MOVEP
... MOVEP_07
:
13163 case MOVEP_0C
... MOVEP_0F
:
13165 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13166 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13167 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13168 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13172 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13175 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13179 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13180 int offset
= extract32(ctx
->opcode
, 4, 4);
13181 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13184 case JALRC16
: /* BREAK16, SDBBP16 */
13185 switch (ctx
->opcode
& 0x3f) {
13187 case JALRC16
+ 0x20:
13189 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13194 generate_exception(ctx
, EXCP_BREAK
);
13198 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13199 gen_helper_do_semihosting(cpu_env
);
13201 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13202 generate_exception(ctx
, EXCP_RI
);
13204 generate_exception(ctx
, EXCP_DBp
);
13211 generate_exception(ctx
, EXCP_RI
);
13216 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13218 TCGv t0
= tcg_temp_new();
13219 TCGv t1
= tcg_temp_new();
13221 gen_load_gpr(t0
, base
);
13224 gen_load_gpr(t1
, index
);
13225 tcg_gen_shli_tl(t1
, t1
, 2);
13226 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13229 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13230 gen_store_gpr(t1
, rd
);
13236 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13237 int base
, int16_t offset
)
13241 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13242 generate_exception_end(ctx
, EXCP_RI
);
13246 t0
= tcg_temp_new();
13247 t1
= tcg_temp_new();
13249 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13254 generate_exception_end(ctx
, EXCP_RI
);
13257 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13258 gen_store_gpr(t1
, rd
);
13259 tcg_gen_movi_tl(t1
, 4);
13260 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13261 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13262 gen_store_gpr(t1
, rd
+1);
13265 gen_load_gpr(t1
, rd
);
13266 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13267 tcg_gen_movi_tl(t1
, 4);
13268 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13269 gen_load_gpr(t1
, rd
+1);
13270 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13272 #ifdef TARGET_MIPS64
13275 generate_exception_end(ctx
, EXCP_RI
);
13278 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13279 gen_store_gpr(t1
, rd
);
13280 tcg_gen_movi_tl(t1
, 8);
13281 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13282 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13283 gen_store_gpr(t1
, rd
+1);
13286 gen_load_gpr(t1
, rd
);
13287 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13288 tcg_gen_movi_tl(t1
, 8);
13289 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13290 gen_load_gpr(t1
, rd
+1);
13291 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13299 static void gen_sync(int stype
)
13301 TCGBar tcg_mo
= TCG_BAR_SC
;
13304 case 0x4: /* SYNC_WMB */
13305 tcg_mo
|= TCG_MO_ST_ST
;
13307 case 0x10: /* SYNC_MB */
13308 tcg_mo
|= TCG_MO_ALL
;
13310 case 0x11: /* SYNC_ACQUIRE */
13311 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13313 case 0x12: /* SYNC_RELEASE */
13314 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13316 case 0x13: /* SYNC_RMB */
13317 tcg_mo
|= TCG_MO_LD_LD
;
13320 tcg_mo
|= TCG_MO_ALL
;
13324 tcg_gen_mb(tcg_mo
);
13327 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13329 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13330 int minor
= (ctx
->opcode
>> 12) & 0xf;
13331 uint32_t mips32_op
;
13333 switch (extension
) {
13335 mips32_op
= OPC_TEQ
;
13338 mips32_op
= OPC_TGE
;
13341 mips32_op
= OPC_TGEU
;
13344 mips32_op
= OPC_TLT
;
13347 mips32_op
= OPC_TLTU
;
13350 mips32_op
= OPC_TNE
;
13352 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13354 #ifndef CONFIG_USER_ONLY
13357 check_cp0_enabled(ctx
);
13359 /* Treat as NOP. */
13362 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13366 check_cp0_enabled(ctx
);
13368 TCGv t0
= tcg_temp_new();
13370 gen_load_gpr(t0
, rt
);
13371 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13377 switch (minor
& 3) {
13379 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13382 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13385 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13388 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13391 goto pool32axf_invalid
;
13395 switch (minor
& 3) {
13397 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13400 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13403 goto pool32axf_invalid
;
13409 check_insn(ctx
, ISA_MIPS32R6
);
13410 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13413 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13416 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13419 mips32_op
= OPC_CLO
;
13422 mips32_op
= OPC_CLZ
;
13424 check_insn(ctx
, ISA_MIPS32
);
13425 gen_cl(ctx
, mips32_op
, rt
, rs
);
13428 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13429 gen_rdhwr(ctx
, rt
, rs
, 0);
13432 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13435 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13436 mips32_op
= OPC_MULT
;
13439 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13440 mips32_op
= OPC_MULTU
;
13443 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13444 mips32_op
= OPC_DIV
;
13447 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13448 mips32_op
= OPC_DIVU
;
13451 check_insn(ctx
, ISA_MIPS32
);
13452 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13455 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13456 mips32_op
= OPC_MADD
;
13459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13460 mips32_op
= OPC_MADDU
;
13463 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13464 mips32_op
= OPC_MSUB
;
13467 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13468 mips32_op
= OPC_MSUBU
;
13470 check_insn(ctx
, ISA_MIPS32
);
13471 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13474 goto pool32axf_invalid
;
13485 generate_exception_err(ctx
, EXCP_CpU
, 2);
13488 goto pool32axf_invalid
;
13493 case JALR
: /* JALRC */
13494 case JALR_HB
: /* JALRC_HB */
13495 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13496 /* JALRC, JALRC_HB */
13497 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13499 /* JALR, JALR_HB */
13500 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13501 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13506 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13507 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13508 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13511 goto pool32axf_invalid
;
13517 check_cp0_enabled(ctx
);
13518 check_insn(ctx
, ISA_MIPS32R2
);
13519 gen_load_srsgpr(rs
, rt
);
13522 check_cp0_enabled(ctx
);
13523 check_insn(ctx
, ISA_MIPS32R2
);
13524 gen_store_srsgpr(rs
, rt
);
13527 goto pool32axf_invalid
;
13530 #ifndef CONFIG_USER_ONLY
13534 mips32_op
= OPC_TLBP
;
13537 mips32_op
= OPC_TLBR
;
13540 mips32_op
= OPC_TLBWI
;
13543 mips32_op
= OPC_TLBWR
;
13546 mips32_op
= OPC_TLBINV
;
13549 mips32_op
= OPC_TLBINVF
;
13552 mips32_op
= OPC_WAIT
;
13555 mips32_op
= OPC_DERET
;
13558 mips32_op
= OPC_ERET
;
13560 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13563 goto pool32axf_invalid
;
13569 check_cp0_enabled(ctx
);
13571 TCGv t0
= tcg_temp_new();
13573 save_cpu_state(ctx
, 1);
13574 gen_helper_di(t0
, cpu_env
);
13575 gen_store_gpr(t0
, rs
);
13576 /* Stop translation as we may have switched the execution mode */
13577 ctx
->bstate
= BS_STOP
;
13582 check_cp0_enabled(ctx
);
13584 TCGv t0
= tcg_temp_new();
13586 save_cpu_state(ctx
, 1);
13587 gen_helper_ei(t0
, cpu_env
);
13588 gen_store_gpr(t0
, rs
);
13589 /* BS_STOP isn't sufficient, we need to ensure we break out
13590 of translated code to check for pending interrupts. */
13591 gen_save_pc(ctx
->pc
+ 4);
13592 ctx
->bstate
= BS_EXCP
;
13597 goto pool32axf_invalid
;
13604 gen_sync(extract32(ctx
->opcode
, 16, 5));
13607 generate_exception_end(ctx
, EXCP_SYSCALL
);
13610 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13611 gen_helper_do_semihosting(cpu_env
);
13613 check_insn(ctx
, ISA_MIPS32
);
13614 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13615 generate_exception_end(ctx
, EXCP_RI
);
13617 generate_exception_end(ctx
, EXCP_DBp
);
13622 goto pool32axf_invalid
;
13626 switch (minor
& 3) {
13628 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13631 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13634 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13637 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13640 goto pool32axf_invalid
;
13644 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13647 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13650 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13653 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13656 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13659 goto pool32axf_invalid
;
13664 MIPS_INVAL("pool32axf");
13665 generate_exception_end(ctx
, EXCP_RI
);
13670 /* Values for microMIPS fmt field. Variable-width, depending on which
13671 formats the instruction supports. */
13690 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13692 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13693 uint32_t mips32_op
;
13695 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13696 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13697 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13699 switch (extension
) {
13700 case FLOAT_1BIT_FMT(CFC1
, 0):
13701 mips32_op
= OPC_CFC1
;
13703 case FLOAT_1BIT_FMT(CTC1
, 0):
13704 mips32_op
= OPC_CTC1
;
13706 case FLOAT_1BIT_FMT(MFC1
, 0):
13707 mips32_op
= OPC_MFC1
;
13709 case FLOAT_1BIT_FMT(MTC1
, 0):
13710 mips32_op
= OPC_MTC1
;
13712 case FLOAT_1BIT_FMT(MFHC1
, 0):
13713 mips32_op
= OPC_MFHC1
;
13715 case FLOAT_1BIT_FMT(MTHC1
, 0):
13716 mips32_op
= OPC_MTHC1
;
13718 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13721 /* Reciprocal square root */
13722 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13723 mips32_op
= OPC_RSQRT_S
;
13725 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13726 mips32_op
= OPC_RSQRT_D
;
13730 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13731 mips32_op
= OPC_SQRT_S
;
13733 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13734 mips32_op
= OPC_SQRT_D
;
13738 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13739 mips32_op
= OPC_RECIP_S
;
13741 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13742 mips32_op
= OPC_RECIP_D
;
13746 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13747 mips32_op
= OPC_FLOOR_L_S
;
13749 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13750 mips32_op
= OPC_FLOOR_L_D
;
13752 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13753 mips32_op
= OPC_FLOOR_W_S
;
13755 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13756 mips32_op
= OPC_FLOOR_W_D
;
13760 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13761 mips32_op
= OPC_CEIL_L_S
;
13763 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13764 mips32_op
= OPC_CEIL_L_D
;
13766 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13767 mips32_op
= OPC_CEIL_W_S
;
13769 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13770 mips32_op
= OPC_CEIL_W_D
;
13774 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13775 mips32_op
= OPC_TRUNC_L_S
;
13777 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13778 mips32_op
= OPC_TRUNC_L_D
;
13780 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13781 mips32_op
= OPC_TRUNC_W_S
;
13783 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13784 mips32_op
= OPC_TRUNC_W_D
;
13788 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13789 mips32_op
= OPC_ROUND_L_S
;
13791 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13792 mips32_op
= OPC_ROUND_L_D
;
13794 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13795 mips32_op
= OPC_ROUND_W_S
;
13797 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13798 mips32_op
= OPC_ROUND_W_D
;
13801 /* Integer to floating-point conversion */
13802 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13803 mips32_op
= OPC_CVT_L_S
;
13805 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13806 mips32_op
= OPC_CVT_L_D
;
13808 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13809 mips32_op
= OPC_CVT_W_S
;
13811 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13812 mips32_op
= OPC_CVT_W_D
;
13815 /* Paired-foo conversions */
13816 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13817 mips32_op
= OPC_CVT_S_PL
;
13819 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13820 mips32_op
= OPC_CVT_S_PU
;
13822 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13823 mips32_op
= OPC_CVT_PW_PS
;
13825 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13826 mips32_op
= OPC_CVT_PS_PW
;
13829 /* Floating-point moves */
13830 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13831 mips32_op
= OPC_MOV_S
;
13833 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13834 mips32_op
= OPC_MOV_D
;
13836 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13837 mips32_op
= OPC_MOV_PS
;
13840 /* Absolute value */
13841 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13842 mips32_op
= OPC_ABS_S
;
13844 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13845 mips32_op
= OPC_ABS_D
;
13847 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13848 mips32_op
= OPC_ABS_PS
;
13852 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13853 mips32_op
= OPC_NEG_S
;
13855 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13856 mips32_op
= OPC_NEG_D
;
13858 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13859 mips32_op
= OPC_NEG_PS
;
13862 /* Reciprocal square root step */
13863 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13864 mips32_op
= OPC_RSQRT1_S
;
13866 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13867 mips32_op
= OPC_RSQRT1_D
;
13869 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13870 mips32_op
= OPC_RSQRT1_PS
;
13873 /* Reciprocal step */
13874 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13875 mips32_op
= OPC_RECIP1_S
;
13877 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13878 mips32_op
= OPC_RECIP1_S
;
13880 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13881 mips32_op
= OPC_RECIP1_PS
;
13884 /* Conversions from double */
13885 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13886 mips32_op
= OPC_CVT_D_S
;
13888 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13889 mips32_op
= OPC_CVT_D_W
;
13891 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13892 mips32_op
= OPC_CVT_D_L
;
13895 /* Conversions from single */
13896 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13897 mips32_op
= OPC_CVT_S_D
;
13899 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13900 mips32_op
= OPC_CVT_S_W
;
13902 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13903 mips32_op
= OPC_CVT_S_L
;
13905 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13908 /* Conditional moves on floating-point codes */
13909 case COND_FLOAT_MOV(MOVT
, 0):
13910 case COND_FLOAT_MOV(MOVT
, 1):
13911 case COND_FLOAT_MOV(MOVT
, 2):
13912 case COND_FLOAT_MOV(MOVT
, 3):
13913 case COND_FLOAT_MOV(MOVT
, 4):
13914 case COND_FLOAT_MOV(MOVT
, 5):
13915 case COND_FLOAT_MOV(MOVT
, 6):
13916 case COND_FLOAT_MOV(MOVT
, 7):
13917 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13918 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13920 case COND_FLOAT_MOV(MOVF
, 0):
13921 case COND_FLOAT_MOV(MOVF
, 1):
13922 case COND_FLOAT_MOV(MOVF
, 2):
13923 case COND_FLOAT_MOV(MOVF
, 3):
13924 case COND_FLOAT_MOV(MOVF
, 4):
13925 case COND_FLOAT_MOV(MOVF
, 5):
13926 case COND_FLOAT_MOV(MOVF
, 6):
13927 case COND_FLOAT_MOV(MOVF
, 7):
13928 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13929 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13932 MIPS_INVAL("pool32fxf");
13933 generate_exception_end(ctx
, EXCP_RI
);
13938 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13942 int rt
, rs
, rd
, rr
;
13944 uint32_t op
, minor
, minor2
, mips32_op
;
13945 uint32_t cond
, fmt
, cc
;
13947 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13948 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13950 rt
= (ctx
->opcode
>> 21) & 0x1f;
13951 rs
= (ctx
->opcode
>> 16) & 0x1f;
13952 rd
= (ctx
->opcode
>> 11) & 0x1f;
13953 rr
= (ctx
->opcode
>> 6) & 0x1f;
13954 imm
= (int16_t) ctx
->opcode
;
13956 op
= (ctx
->opcode
>> 26) & 0x3f;
13959 minor
= ctx
->opcode
& 0x3f;
13962 minor
= (ctx
->opcode
>> 6) & 0xf;
13965 mips32_op
= OPC_SLL
;
13968 mips32_op
= OPC_SRA
;
13971 mips32_op
= OPC_SRL
;
13974 mips32_op
= OPC_ROTR
;
13976 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13979 check_insn(ctx
, ISA_MIPS32R6
);
13980 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13983 check_insn(ctx
, ISA_MIPS32R6
);
13984 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13987 check_insn(ctx
, ISA_MIPS32R6
);
13988 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13991 goto pool32a_invalid
;
13995 minor
= (ctx
->opcode
>> 6) & 0xf;
13999 mips32_op
= OPC_ADD
;
14002 mips32_op
= OPC_ADDU
;
14005 mips32_op
= OPC_SUB
;
14008 mips32_op
= OPC_SUBU
;
14011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14012 mips32_op
= OPC_MUL
;
14014 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14018 mips32_op
= OPC_SLLV
;
14021 mips32_op
= OPC_SRLV
;
14024 mips32_op
= OPC_SRAV
;
14027 mips32_op
= OPC_ROTRV
;
14029 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14031 /* Logical operations */
14033 mips32_op
= OPC_AND
;
14036 mips32_op
= OPC_OR
;
14039 mips32_op
= OPC_NOR
;
14042 mips32_op
= OPC_XOR
;
14044 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14046 /* Set less than */
14048 mips32_op
= OPC_SLT
;
14051 mips32_op
= OPC_SLTU
;
14053 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14056 goto pool32a_invalid
;
14060 minor
= (ctx
->opcode
>> 6) & 0xf;
14062 /* Conditional moves */
14063 case MOVN
: /* MUL */
14064 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14066 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14069 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14072 case MOVZ
: /* MUH */
14073 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14075 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14078 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14082 check_insn(ctx
, ISA_MIPS32R6
);
14083 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14086 check_insn(ctx
, ISA_MIPS32R6
);
14087 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14089 case LWXS
: /* DIV */
14090 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14092 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14095 gen_ldxs(ctx
, rs
, rt
, rd
);
14099 check_insn(ctx
, ISA_MIPS32R6
);
14100 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14103 check_insn(ctx
, ISA_MIPS32R6
);
14104 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14107 check_insn(ctx
, ISA_MIPS32R6
);
14108 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14111 goto pool32a_invalid
;
14115 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14118 check_insn(ctx
, ISA_MIPS32R6
);
14119 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14120 extract32(ctx
->opcode
, 9, 2));
14123 check_insn(ctx
, ISA_MIPS32R6
);
14124 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14125 extract32(ctx
->opcode
, 9, 2));
14128 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14131 gen_pool32axf(env
, ctx
, rt
, rs
);
14134 generate_exception_end(ctx
, EXCP_BREAK
);
14137 check_insn(ctx
, ISA_MIPS32R6
);
14138 generate_exception_end(ctx
, EXCP_RI
);
14142 MIPS_INVAL("pool32a");
14143 generate_exception_end(ctx
, EXCP_RI
);
14148 minor
= (ctx
->opcode
>> 12) & 0xf;
14151 check_cp0_enabled(ctx
);
14152 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14153 gen_cache_operation(ctx
, rt
, rs
, imm
);
14158 /* COP2: Not implemented. */
14159 generate_exception_err(ctx
, EXCP_CpU
, 2);
14161 #ifdef TARGET_MIPS64
14164 check_insn(ctx
, ISA_MIPS3
);
14165 check_mips_64(ctx
);
14170 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14172 #ifdef TARGET_MIPS64
14175 check_insn(ctx
, ISA_MIPS3
);
14176 check_mips_64(ctx
);
14181 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14184 MIPS_INVAL("pool32b");
14185 generate_exception_end(ctx
, EXCP_RI
);
14190 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14191 minor
= ctx
->opcode
& 0x3f;
14192 check_cp1_enabled(ctx
);
14195 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14196 mips32_op
= OPC_ALNV_PS
;
14199 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14200 mips32_op
= OPC_MADD_S
;
14203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14204 mips32_op
= OPC_MADD_D
;
14207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14208 mips32_op
= OPC_MADD_PS
;
14211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14212 mips32_op
= OPC_MSUB_S
;
14215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14216 mips32_op
= OPC_MSUB_D
;
14219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14220 mips32_op
= OPC_MSUB_PS
;
14223 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14224 mips32_op
= OPC_NMADD_S
;
14227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14228 mips32_op
= OPC_NMADD_D
;
14231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14232 mips32_op
= OPC_NMADD_PS
;
14235 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14236 mips32_op
= OPC_NMSUB_S
;
14239 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14240 mips32_op
= OPC_NMSUB_D
;
14243 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14244 mips32_op
= OPC_NMSUB_PS
;
14246 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14248 case CABS_COND_FMT
:
14249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14250 cond
= (ctx
->opcode
>> 6) & 0xf;
14251 cc
= (ctx
->opcode
>> 13) & 0x7;
14252 fmt
= (ctx
->opcode
>> 10) & 0x3;
14255 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14258 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14261 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14264 goto pool32f_invalid
;
14268 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14269 cond
= (ctx
->opcode
>> 6) & 0xf;
14270 cc
= (ctx
->opcode
>> 13) & 0x7;
14271 fmt
= (ctx
->opcode
>> 10) & 0x3;
14274 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14277 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14280 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14283 goto pool32f_invalid
;
14287 check_insn(ctx
, ISA_MIPS32R6
);
14288 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14291 check_insn(ctx
, ISA_MIPS32R6
);
14292 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14295 gen_pool32fxf(ctx
, rt
, rs
);
14299 switch ((ctx
->opcode
>> 6) & 0x7) {
14301 mips32_op
= OPC_PLL_PS
;
14304 mips32_op
= OPC_PLU_PS
;
14307 mips32_op
= OPC_PUL_PS
;
14310 mips32_op
= OPC_PUU_PS
;
14313 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14314 mips32_op
= OPC_CVT_PS_S
;
14316 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14319 goto pool32f_invalid
;
14323 check_insn(ctx
, ISA_MIPS32R6
);
14324 switch ((ctx
->opcode
>> 9) & 0x3) {
14326 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14329 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14332 goto pool32f_invalid
;
14337 switch ((ctx
->opcode
>> 6) & 0x7) {
14339 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14340 mips32_op
= OPC_LWXC1
;
14343 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14344 mips32_op
= OPC_SWXC1
;
14347 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14348 mips32_op
= OPC_LDXC1
;
14351 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14352 mips32_op
= OPC_SDXC1
;
14355 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14356 mips32_op
= OPC_LUXC1
;
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 mips32_op
= OPC_SUXC1
;
14362 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14365 goto pool32f_invalid
;
14369 check_insn(ctx
, ISA_MIPS32R6
);
14370 switch ((ctx
->opcode
>> 9) & 0x3) {
14372 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14375 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14378 goto pool32f_invalid
;
14383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14384 fmt
= (ctx
->opcode
>> 9) & 0x3;
14385 switch ((ctx
->opcode
>> 6) & 0x7) {
14389 mips32_op
= OPC_RSQRT2_S
;
14392 mips32_op
= OPC_RSQRT2_D
;
14395 mips32_op
= OPC_RSQRT2_PS
;
14398 goto pool32f_invalid
;
14404 mips32_op
= OPC_RECIP2_S
;
14407 mips32_op
= OPC_RECIP2_D
;
14410 mips32_op
= OPC_RECIP2_PS
;
14413 goto pool32f_invalid
;
14417 mips32_op
= OPC_ADDR_PS
;
14420 mips32_op
= OPC_MULR_PS
;
14422 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14425 goto pool32f_invalid
;
14429 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14430 cc
= (ctx
->opcode
>> 13) & 0x7;
14431 fmt
= (ctx
->opcode
>> 9) & 0x3;
14432 switch ((ctx
->opcode
>> 6) & 0x7) {
14433 case MOVF_FMT
: /* RINT_FMT */
14434 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14438 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14441 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14444 goto pool32f_invalid
;
14450 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14453 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14457 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14460 goto pool32f_invalid
;
14464 case MOVT_FMT
: /* CLASS_FMT */
14465 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14469 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14472 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14475 goto pool32f_invalid
;
14481 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14484 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14488 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14491 goto pool32f_invalid
;
14496 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14499 goto pool32f_invalid
;
14502 #define FINSN_3ARG_SDPS(prfx) \
14503 switch ((ctx->opcode >> 8) & 0x3) { \
14505 mips32_op = OPC_##prfx##_S; \
14508 mips32_op = OPC_##prfx##_D; \
14510 case FMT_SDPS_PS: \
14512 mips32_op = OPC_##prfx##_PS; \
14515 goto pool32f_invalid; \
14518 check_insn(ctx
, ISA_MIPS32R6
);
14519 switch ((ctx
->opcode
>> 9) & 0x3) {
14521 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14524 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14527 goto pool32f_invalid
;
14531 check_insn(ctx
, ISA_MIPS32R6
);
14532 switch ((ctx
->opcode
>> 9) & 0x3) {
14534 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14537 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14540 goto pool32f_invalid
;
14544 /* regular FP ops */
14545 switch ((ctx
->opcode
>> 6) & 0x3) {
14547 FINSN_3ARG_SDPS(ADD
);
14550 FINSN_3ARG_SDPS(SUB
);
14553 FINSN_3ARG_SDPS(MUL
);
14556 fmt
= (ctx
->opcode
>> 8) & 0x3;
14558 mips32_op
= OPC_DIV_D
;
14559 } else if (fmt
== 0) {
14560 mips32_op
= OPC_DIV_S
;
14562 goto pool32f_invalid
;
14566 goto pool32f_invalid
;
14571 switch ((ctx
->opcode
>> 6) & 0x7) {
14572 case MOVN_FMT
: /* SELNEZ_FMT */
14573 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14575 switch ((ctx
->opcode
>> 9) & 0x3) {
14577 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14580 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14583 goto pool32f_invalid
;
14587 FINSN_3ARG_SDPS(MOVN
);
14591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14592 FINSN_3ARG_SDPS(MOVN
);
14594 case MOVZ_FMT
: /* SELEQZ_FMT */
14595 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14597 switch ((ctx
->opcode
>> 9) & 0x3) {
14599 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14602 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14605 goto pool32f_invalid
;
14609 FINSN_3ARG_SDPS(MOVZ
);
14613 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14614 FINSN_3ARG_SDPS(MOVZ
);
14617 check_insn(ctx
, ISA_MIPS32R6
);
14618 switch ((ctx
->opcode
>> 9) & 0x3) {
14620 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14623 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14626 goto pool32f_invalid
;
14630 check_insn(ctx
, ISA_MIPS32R6
);
14631 switch ((ctx
->opcode
>> 9) & 0x3) {
14633 mips32_op
= OPC_MADDF_S
;
14636 mips32_op
= OPC_MADDF_D
;
14639 goto pool32f_invalid
;
14643 check_insn(ctx
, ISA_MIPS32R6
);
14644 switch ((ctx
->opcode
>> 9) & 0x3) {
14646 mips32_op
= OPC_MSUBF_S
;
14649 mips32_op
= OPC_MSUBF_D
;
14652 goto pool32f_invalid
;
14656 goto pool32f_invalid
;
14660 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14664 MIPS_INVAL("pool32f");
14665 generate_exception_end(ctx
, EXCP_RI
);
14669 generate_exception_err(ctx
, EXCP_CpU
, 1);
14673 minor
= (ctx
->opcode
>> 21) & 0x1f;
14676 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14677 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14680 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14681 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14682 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14685 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14686 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14687 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14690 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14691 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14694 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14695 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14696 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14699 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14700 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14701 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14704 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14705 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14708 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14709 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14713 case TLTI
: /* BC1EQZC */
14714 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14716 check_cp1_enabled(ctx
);
14717 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14720 mips32_op
= OPC_TLTI
;
14724 case TGEI
: /* BC1NEZC */
14725 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14727 check_cp1_enabled(ctx
);
14728 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14731 mips32_op
= OPC_TGEI
;
14736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14737 mips32_op
= OPC_TLTIU
;
14740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14741 mips32_op
= OPC_TGEIU
;
14743 case TNEI
: /* SYNCI */
14744 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14746 /* Break the TB to be able to sync copied instructions
14748 ctx
->bstate
= BS_STOP
;
14751 mips32_op
= OPC_TNEI
;
14756 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14757 mips32_op
= OPC_TEQI
;
14759 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14764 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14765 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14766 4, rs
, 0, imm
<< 1, 0);
14767 /* Compact branches don't have a delay slot, so just let
14768 the normal delay slot handling take us to the branch
14772 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14773 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14776 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14777 /* Break the TB to be able to sync copied instructions
14779 ctx
->bstate
= BS_STOP
;
14783 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14784 /* COP2: Not implemented. */
14785 generate_exception_err(ctx
, EXCP_CpU
, 2);
14788 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14789 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14792 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14793 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14796 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14797 mips32_op
= OPC_BC1FANY4
;
14800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14801 mips32_op
= OPC_BC1TANY4
;
14804 check_insn(ctx
, ASE_MIPS3D
);
14807 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14808 check_cp1_enabled(ctx
);
14809 gen_compute_branch1(ctx
, mips32_op
,
14810 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14812 generate_exception_err(ctx
, EXCP_CpU
, 1);
14817 /* MIPS DSP: not implemented */
14820 MIPS_INVAL("pool32i");
14821 generate_exception_end(ctx
, EXCP_RI
);
14826 minor
= (ctx
->opcode
>> 12) & 0xf;
14827 offset
= sextract32(ctx
->opcode
, 0,
14828 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14832 mips32_op
= OPC_LWL
;
14835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14836 mips32_op
= OPC_SWL
;
14839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14840 mips32_op
= OPC_LWR
;
14843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14844 mips32_op
= OPC_SWR
;
14846 #if defined(TARGET_MIPS64)
14848 check_insn(ctx
, ISA_MIPS3
);
14849 check_mips_64(ctx
);
14850 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14851 mips32_op
= OPC_LDL
;
14854 check_insn(ctx
, ISA_MIPS3
);
14855 check_mips_64(ctx
);
14856 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14857 mips32_op
= OPC_SDL
;
14860 check_insn(ctx
, ISA_MIPS3
);
14861 check_mips_64(ctx
);
14862 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14863 mips32_op
= OPC_LDR
;
14866 check_insn(ctx
, ISA_MIPS3
);
14867 check_mips_64(ctx
);
14868 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14869 mips32_op
= OPC_SDR
;
14872 check_insn(ctx
, ISA_MIPS3
);
14873 check_mips_64(ctx
);
14874 mips32_op
= OPC_LWU
;
14877 check_insn(ctx
, ISA_MIPS3
);
14878 check_mips_64(ctx
);
14879 mips32_op
= OPC_LLD
;
14883 mips32_op
= OPC_LL
;
14886 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14889 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
14892 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14894 #if defined(TARGET_MIPS64)
14896 check_insn(ctx
, ISA_MIPS3
);
14897 check_mips_64(ctx
);
14898 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14903 MIPS_INVAL("pool32c ld-eva");
14904 generate_exception_end(ctx
, EXCP_RI
);
14907 check_cp0_enabled(ctx
);
14909 minor2
= (ctx
->opcode
>> 9) & 0x7;
14910 offset
= sextract32(ctx
->opcode
, 0, 9);
14913 mips32_op
= OPC_LBUE
;
14916 mips32_op
= OPC_LHUE
;
14919 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14920 mips32_op
= OPC_LWLE
;
14923 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14924 mips32_op
= OPC_LWRE
;
14927 mips32_op
= OPC_LBE
;
14930 mips32_op
= OPC_LHE
;
14933 mips32_op
= OPC_LLE
;
14936 mips32_op
= OPC_LWE
;
14942 MIPS_INVAL("pool32c st-eva");
14943 generate_exception_end(ctx
, EXCP_RI
);
14946 check_cp0_enabled(ctx
);
14948 minor2
= (ctx
->opcode
>> 9) & 0x7;
14949 offset
= sextract32(ctx
->opcode
, 0, 9);
14952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14953 mips32_op
= OPC_SWLE
;
14956 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14957 mips32_op
= OPC_SWRE
;
14960 /* Treat as no-op */
14961 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14962 /* hint codes 24-31 are reserved and signal RI */
14963 generate_exception(ctx
, EXCP_RI
);
14967 /* Treat as no-op */
14968 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14969 gen_cache_operation(ctx
, rt
, rs
, offset
);
14973 mips32_op
= OPC_SBE
;
14976 mips32_op
= OPC_SHE
;
14979 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
14982 mips32_op
= OPC_SWE
;
14987 /* Treat as no-op */
14988 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14989 /* hint codes 24-31 are reserved and signal RI */
14990 generate_exception(ctx
, EXCP_RI
);
14994 MIPS_INVAL("pool32c");
14995 generate_exception_end(ctx
, EXCP_RI
);
14999 case ADDI32
: /* AUI, LUI */
15000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15002 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15005 mips32_op
= OPC_ADDI
;
15010 mips32_op
= OPC_ADDIU
;
15012 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15015 /* Logical operations */
15017 mips32_op
= OPC_ORI
;
15020 mips32_op
= OPC_XORI
;
15023 mips32_op
= OPC_ANDI
;
15025 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15028 /* Set less than immediate */
15030 mips32_op
= OPC_SLTI
;
15033 mips32_op
= OPC_SLTIU
;
15035 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15038 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15039 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15040 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15041 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15043 case JALS32
: /* BOVC, BEQC, BEQZALC */
15044 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15047 mips32_op
= OPC_BOVC
;
15048 } else if (rs
< rt
&& rs
== 0) {
15050 mips32_op
= OPC_BEQZALC
;
15053 mips32_op
= OPC_BEQC
;
15055 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15058 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15059 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15060 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15063 case BEQ32
: /* BC */
15064 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15066 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15067 sextract32(ctx
->opcode
<< 1, 0, 27));
15070 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15073 case BNE32
: /* BALC */
15074 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15076 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15077 sextract32(ctx
->opcode
<< 1, 0, 27));
15080 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15083 case J32
: /* BGTZC, BLTZC, BLTC */
15084 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15085 if (rs
== 0 && rt
!= 0) {
15087 mips32_op
= OPC_BGTZC
;
15088 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15090 mips32_op
= OPC_BLTZC
;
15093 mips32_op
= OPC_BLTC
;
15095 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15098 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15099 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15102 case JAL32
: /* BLEZC, BGEZC, BGEC */
15103 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15104 if (rs
== 0 && rt
!= 0) {
15106 mips32_op
= OPC_BLEZC
;
15107 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15109 mips32_op
= OPC_BGEZC
;
15112 mips32_op
= OPC_BGEC
;
15114 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15117 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15118 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15119 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15122 /* Floating point (COP1) */
15124 mips32_op
= OPC_LWC1
;
15127 mips32_op
= OPC_LDC1
;
15130 mips32_op
= OPC_SWC1
;
15133 mips32_op
= OPC_SDC1
;
15135 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15137 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15138 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15139 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15140 switch ((ctx
->opcode
>> 16) & 0x1f) {
15141 case ADDIUPC_00
... ADDIUPC_07
:
15142 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
15145 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
15148 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
15150 case LWPC_08
... LWPC_0F
:
15151 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
15154 generate_exception(ctx
, EXCP_RI
);
15159 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15160 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15162 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15165 case BNVC
: /* BNEC, BNEZALC */
15166 check_insn(ctx
, ISA_MIPS32R6
);
15169 mips32_op
= OPC_BNVC
;
15170 } else if (rs
< rt
&& rs
== 0) {
15172 mips32_op
= OPC_BNEZALC
;
15175 mips32_op
= OPC_BNEC
;
15177 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15179 case R6_BNEZC
: /* JIALC */
15180 check_insn(ctx
, ISA_MIPS32R6
);
15183 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15184 sextract32(ctx
->opcode
<< 1, 0, 22));
15187 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15190 case R6_BEQZC
: /* JIC */
15191 check_insn(ctx
, ISA_MIPS32R6
);
15194 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15195 sextract32(ctx
->opcode
<< 1, 0, 22));
15198 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15201 case BLEZALC
: /* BGEZALC, BGEUC */
15202 check_insn(ctx
, ISA_MIPS32R6
);
15203 if (rs
== 0 && rt
!= 0) {
15205 mips32_op
= OPC_BLEZALC
;
15206 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15208 mips32_op
= OPC_BGEZALC
;
15211 mips32_op
= OPC_BGEUC
;
15213 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15215 case BGTZALC
: /* BLTZALC, BLTUC */
15216 check_insn(ctx
, ISA_MIPS32R6
);
15217 if (rs
== 0 && rt
!= 0) {
15219 mips32_op
= OPC_BGTZALC
;
15220 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15222 mips32_op
= OPC_BLTZALC
;
15225 mips32_op
= OPC_BLTUC
;
15227 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15229 /* Loads and stores */
15231 mips32_op
= OPC_LB
;
15234 mips32_op
= OPC_LBU
;
15237 mips32_op
= OPC_LH
;
15240 mips32_op
= OPC_LHU
;
15243 mips32_op
= OPC_LW
;
15245 #ifdef TARGET_MIPS64
15247 check_insn(ctx
, ISA_MIPS3
);
15248 check_mips_64(ctx
);
15249 mips32_op
= OPC_LD
;
15252 check_insn(ctx
, ISA_MIPS3
);
15253 check_mips_64(ctx
);
15254 mips32_op
= OPC_SD
;
15258 mips32_op
= OPC_SB
;
15261 mips32_op
= OPC_SH
;
15264 mips32_op
= OPC_SW
;
15267 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15270 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15273 generate_exception_end(ctx
, EXCP_RI
);
15278 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15282 /* make sure instructions are on a halfword boundary */
15283 if (ctx
->pc
& 0x1) {
15284 env
->CP0_BadVAddr
= ctx
->pc
;
15285 generate_exception_end(ctx
, EXCP_AdEL
);
15289 op
= (ctx
->opcode
>> 10) & 0x3f;
15290 /* Enforce properly-sized instructions in a delay slot */
15291 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15292 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15294 /* POOL32A, POOL32B, POOL32I, POOL32C */
15296 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15298 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15300 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15302 /* LB32, LH32, LWC132, LDC132, LW32 */
15303 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15304 generate_exception_end(ctx
, EXCP_RI
);
15309 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15311 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15313 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15314 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15315 generate_exception_end(ctx
, EXCP_RI
);
15325 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15326 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15327 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15330 switch (ctx
->opcode
& 0x1) {
15338 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15339 /* In the Release 6 the register number location in
15340 * the instruction encoding has changed.
15342 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15344 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15350 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15351 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15352 int amount
= (ctx
->opcode
>> 1) & 0x7;
15354 amount
= amount
== 0 ? 8 : amount
;
15356 switch (ctx
->opcode
& 0x1) {
15365 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15369 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15370 gen_pool16c_r6_insn(ctx
);
15372 gen_pool16c_insn(ctx
);
15377 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15378 int rb
= 28; /* GP */
15379 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15381 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15386 if (ctx
->opcode
& 1) {
15387 generate_exception_end(ctx
, EXCP_RI
);
15390 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15391 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15392 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15393 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15398 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15399 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15400 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15401 offset
= (offset
== 0xf ? -1 : offset
);
15403 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15408 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15409 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15410 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15412 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15417 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15418 int rb
= 29; /* SP */
15419 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15421 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15426 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15427 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15428 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15430 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15435 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15436 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15437 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15439 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15444 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15445 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15446 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15448 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15453 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15454 int rb
= 29; /* SP */
15455 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15457 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15462 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15463 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15464 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15466 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15471 int rd
= uMIPS_RD5(ctx
->opcode
);
15472 int rs
= uMIPS_RS5(ctx
->opcode
);
15474 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15481 switch (ctx
->opcode
& 0x1) {
15491 switch (ctx
->opcode
& 0x1) {
15496 gen_addiur1sp(ctx
);
15500 case B16
: /* BC16 */
15501 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15502 sextract32(ctx
->opcode
, 0, 10) << 1,
15503 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15505 case BNEZ16
: /* BNEZC16 */
15506 case BEQZ16
: /* BEQZC16 */
15507 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15508 mmreg(uMIPS_RD(ctx
->opcode
)),
15509 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15510 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15515 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15516 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15518 imm
= (imm
== 0x7f ? -1 : imm
);
15519 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15525 generate_exception_end(ctx
, EXCP_RI
);
15528 decode_micromips32_opc(env
, ctx
);
15535 /* SmartMIPS extension to MIPS32 */
15537 #if defined(TARGET_MIPS64)
15539 /* MDMX extension to MIPS64 */
15543 /* MIPSDSP functions. */
15544 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15545 int rd
, int base
, int offset
)
15550 t0
= tcg_temp_new();
15553 gen_load_gpr(t0
, offset
);
15554 } else if (offset
== 0) {
15555 gen_load_gpr(t0
, base
);
15557 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15562 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15563 gen_store_gpr(t0
, rd
);
15566 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15567 gen_store_gpr(t0
, rd
);
15570 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15571 gen_store_gpr(t0
, rd
);
15573 #if defined(TARGET_MIPS64)
15575 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15576 gen_store_gpr(t0
, rd
);
15583 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15584 int ret
, int v1
, int v2
)
15590 /* Treat as NOP. */
15594 v1_t
= tcg_temp_new();
15595 v2_t
= tcg_temp_new();
15597 gen_load_gpr(v1_t
, v1
);
15598 gen_load_gpr(v2_t
, v2
);
15601 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15602 case OPC_MULT_G_2E
:
15606 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15608 case OPC_ADDUH_R_QB
:
15609 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15612 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15614 case OPC_ADDQH_R_PH
:
15615 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15618 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15620 case OPC_ADDQH_R_W
:
15621 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15624 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15626 case OPC_SUBUH_R_QB
:
15627 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15630 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15632 case OPC_SUBQH_R_PH
:
15633 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15636 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15638 case OPC_SUBQH_R_W
:
15639 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15643 case OPC_ABSQ_S_PH_DSP
:
15645 case OPC_ABSQ_S_QB
:
15647 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15649 case OPC_ABSQ_S_PH
:
15651 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15655 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15657 case OPC_PRECEQ_W_PHL
:
15659 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15660 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15662 case OPC_PRECEQ_W_PHR
:
15664 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15665 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15666 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15668 case OPC_PRECEQU_PH_QBL
:
15670 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15672 case OPC_PRECEQU_PH_QBR
:
15674 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15676 case OPC_PRECEQU_PH_QBLA
:
15678 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15680 case OPC_PRECEQU_PH_QBRA
:
15682 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15684 case OPC_PRECEU_PH_QBL
:
15686 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15688 case OPC_PRECEU_PH_QBR
:
15690 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15692 case OPC_PRECEU_PH_QBLA
:
15694 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15696 case OPC_PRECEU_PH_QBRA
:
15698 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15702 case OPC_ADDU_QB_DSP
:
15706 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15708 case OPC_ADDQ_S_PH
:
15710 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15714 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15718 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15720 case OPC_ADDU_S_QB
:
15722 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15726 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15728 case OPC_ADDU_S_PH
:
15730 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15734 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15736 case OPC_SUBQ_S_PH
:
15738 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15742 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15746 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15748 case OPC_SUBU_S_QB
:
15750 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15754 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15756 case OPC_SUBU_S_PH
:
15758 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15762 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15766 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15770 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15772 case OPC_RADDU_W_QB
:
15774 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15778 case OPC_CMPU_EQ_QB_DSP
:
15780 case OPC_PRECR_QB_PH
:
15782 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15784 case OPC_PRECRQ_QB_PH
:
15786 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15788 case OPC_PRECR_SRA_PH_W
:
15791 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15792 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15794 tcg_temp_free_i32(sa_t
);
15797 case OPC_PRECR_SRA_R_PH_W
:
15800 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15801 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15803 tcg_temp_free_i32(sa_t
);
15806 case OPC_PRECRQ_PH_W
:
15808 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15810 case OPC_PRECRQ_RS_PH_W
:
15812 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15814 case OPC_PRECRQU_S_QB_PH
:
15816 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15820 #ifdef TARGET_MIPS64
15821 case OPC_ABSQ_S_QH_DSP
:
15823 case OPC_PRECEQ_L_PWL
:
15825 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15827 case OPC_PRECEQ_L_PWR
:
15829 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15831 case OPC_PRECEQ_PW_QHL
:
15833 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15835 case OPC_PRECEQ_PW_QHR
:
15837 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15839 case OPC_PRECEQ_PW_QHLA
:
15841 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15843 case OPC_PRECEQ_PW_QHRA
:
15845 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15847 case OPC_PRECEQU_QH_OBL
:
15849 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15851 case OPC_PRECEQU_QH_OBR
:
15853 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15855 case OPC_PRECEQU_QH_OBLA
:
15857 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15859 case OPC_PRECEQU_QH_OBRA
:
15861 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15863 case OPC_PRECEU_QH_OBL
:
15865 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15867 case OPC_PRECEU_QH_OBR
:
15869 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15871 case OPC_PRECEU_QH_OBLA
:
15873 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15875 case OPC_PRECEU_QH_OBRA
:
15877 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15879 case OPC_ABSQ_S_OB
:
15881 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15883 case OPC_ABSQ_S_PW
:
15885 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15887 case OPC_ABSQ_S_QH
:
15889 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15893 case OPC_ADDU_OB_DSP
:
15895 case OPC_RADDU_L_OB
:
15897 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15901 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15903 case OPC_SUBQ_S_PW
:
15905 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15909 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15911 case OPC_SUBQ_S_QH
:
15913 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15917 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15919 case OPC_SUBU_S_OB
:
15921 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15925 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15927 case OPC_SUBU_S_QH
:
15929 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15933 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15935 case OPC_SUBUH_R_OB
:
15937 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15941 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15943 case OPC_ADDQ_S_PW
:
15945 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15949 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15951 case OPC_ADDQ_S_QH
:
15953 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15957 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15959 case OPC_ADDU_S_OB
:
15961 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15965 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15967 case OPC_ADDU_S_QH
:
15969 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15973 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15975 case OPC_ADDUH_R_OB
:
15977 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15981 case OPC_CMPU_EQ_OB_DSP
:
15983 case OPC_PRECR_OB_QH
:
15985 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15987 case OPC_PRECR_SRA_QH_PW
:
15990 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15991 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15992 tcg_temp_free_i32(ret_t
);
15995 case OPC_PRECR_SRA_R_QH_PW
:
15998 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15999 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
16000 tcg_temp_free_i32(sa_v
);
16003 case OPC_PRECRQ_OB_QH
:
16005 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16007 case OPC_PRECRQ_PW_L
:
16009 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
16011 case OPC_PRECRQ_QH_PW
:
16013 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16015 case OPC_PRECRQ_RS_QH_PW
:
16017 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16019 case OPC_PRECRQU_S_OB_QH
:
16021 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16028 tcg_temp_free(v1_t
);
16029 tcg_temp_free(v2_t
);
16032 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16033 int ret
, int v1
, int v2
)
16041 /* Treat as NOP. */
16045 t0
= tcg_temp_new();
16046 v1_t
= tcg_temp_new();
16047 v2_t
= tcg_temp_new();
16049 tcg_gen_movi_tl(t0
, v1
);
16050 gen_load_gpr(v1_t
, v1
);
16051 gen_load_gpr(v2_t
, v2
);
16054 case OPC_SHLL_QB_DSP
:
16056 op2
= MASK_SHLL_QB(ctx
->opcode
);
16060 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16064 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16068 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16072 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16074 case OPC_SHLL_S_PH
:
16076 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16078 case OPC_SHLLV_S_PH
:
16080 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16084 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16086 case OPC_SHLLV_S_W
:
16088 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16092 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16096 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16100 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16104 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16108 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16110 case OPC_SHRA_R_QB
:
16112 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16116 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16118 case OPC_SHRAV_R_QB
:
16120 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16124 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16126 case OPC_SHRA_R_PH
:
16128 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16132 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16134 case OPC_SHRAV_R_PH
:
16136 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16140 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16142 case OPC_SHRAV_R_W
:
16144 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16146 default: /* Invalid */
16147 MIPS_INVAL("MASK SHLL.QB");
16148 generate_exception_end(ctx
, EXCP_RI
);
16153 #ifdef TARGET_MIPS64
16154 case OPC_SHLL_OB_DSP
:
16155 op2
= MASK_SHLL_OB(ctx
->opcode
);
16159 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16163 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16165 case OPC_SHLL_S_PW
:
16167 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16169 case OPC_SHLLV_S_PW
:
16171 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16175 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16179 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16183 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16187 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16189 case OPC_SHLL_S_QH
:
16191 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16193 case OPC_SHLLV_S_QH
:
16195 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16199 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16203 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16205 case OPC_SHRA_R_OB
:
16207 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16209 case OPC_SHRAV_R_OB
:
16211 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16215 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16219 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16221 case OPC_SHRA_R_PW
:
16223 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16225 case OPC_SHRAV_R_PW
:
16227 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16231 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16235 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16237 case OPC_SHRA_R_QH
:
16239 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16241 case OPC_SHRAV_R_QH
:
16243 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16247 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16251 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16255 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16259 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16261 default: /* Invalid */
16262 MIPS_INVAL("MASK SHLL.OB");
16263 generate_exception_end(ctx
, EXCP_RI
);
16271 tcg_temp_free(v1_t
);
16272 tcg_temp_free(v2_t
);
16275 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16276 int ret
, int v1
, int v2
, int check_ret
)
16282 if ((ret
== 0) && (check_ret
== 1)) {
16283 /* Treat as NOP. */
16287 t0
= tcg_temp_new_i32();
16288 v1_t
= tcg_temp_new();
16289 v2_t
= tcg_temp_new();
16291 tcg_gen_movi_i32(t0
, ret
);
16292 gen_load_gpr(v1_t
, v1
);
16293 gen_load_gpr(v2_t
, v2
);
16296 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16297 * the same mask and op1. */
16298 case OPC_MULT_G_2E
:
16302 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16305 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16308 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16310 case OPC_MULQ_RS_W
:
16311 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16315 case OPC_DPA_W_PH_DSP
:
16317 case OPC_DPAU_H_QBL
:
16319 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16321 case OPC_DPAU_H_QBR
:
16323 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16325 case OPC_DPSU_H_QBL
:
16327 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16329 case OPC_DPSU_H_QBR
:
16331 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16335 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16337 case OPC_DPAX_W_PH
:
16339 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16341 case OPC_DPAQ_S_W_PH
:
16343 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16345 case OPC_DPAQX_S_W_PH
:
16347 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16349 case OPC_DPAQX_SA_W_PH
:
16351 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16355 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16357 case OPC_DPSX_W_PH
:
16359 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16361 case OPC_DPSQ_S_W_PH
:
16363 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16365 case OPC_DPSQX_S_W_PH
:
16367 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16369 case OPC_DPSQX_SA_W_PH
:
16371 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16373 case OPC_MULSAQ_S_W_PH
:
16375 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16377 case OPC_DPAQ_SA_L_W
:
16379 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16381 case OPC_DPSQ_SA_L_W
:
16383 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16385 case OPC_MAQ_S_W_PHL
:
16387 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16389 case OPC_MAQ_S_W_PHR
:
16391 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16393 case OPC_MAQ_SA_W_PHL
:
16395 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16397 case OPC_MAQ_SA_W_PHR
:
16399 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16401 case OPC_MULSA_W_PH
:
16403 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16407 #ifdef TARGET_MIPS64
16408 case OPC_DPAQ_W_QH_DSP
:
16410 int ac
= ret
& 0x03;
16411 tcg_gen_movi_i32(t0
, ac
);
16416 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16420 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16424 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16428 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16432 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16434 case OPC_DPAQ_S_W_QH
:
16436 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16438 case OPC_DPAQ_SA_L_PW
:
16440 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16442 case OPC_DPAU_H_OBL
:
16444 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16446 case OPC_DPAU_H_OBR
:
16448 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16452 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16454 case OPC_DPSQ_S_W_QH
:
16456 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16458 case OPC_DPSQ_SA_L_PW
:
16460 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16462 case OPC_DPSU_H_OBL
:
16464 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16466 case OPC_DPSU_H_OBR
:
16468 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16470 case OPC_MAQ_S_L_PWL
:
16472 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16474 case OPC_MAQ_S_L_PWR
:
16476 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16478 case OPC_MAQ_S_W_QHLL
:
16480 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16482 case OPC_MAQ_SA_W_QHLL
:
16484 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16486 case OPC_MAQ_S_W_QHLR
:
16488 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16490 case OPC_MAQ_SA_W_QHLR
:
16492 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16494 case OPC_MAQ_S_W_QHRL
:
16496 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16498 case OPC_MAQ_SA_W_QHRL
:
16500 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16502 case OPC_MAQ_S_W_QHRR
:
16504 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16506 case OPC_MAQ_SA_W_QHRR
:
16508 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16510 case OPC_MULSAQ_S_L_PW
:
16512 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16514 case OPC_MULSAQ_S_W_QH
:
16516 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16522 case OPC_ADDU_QB_DSP
:
16524 case OPC_MULEU_S_PH_QBL
:
16526 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16528 case OPC_MULEU_S_PH_QBR
:
16530 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16532 case OPC_MULQ_RS_PH
:
16534 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16536 case OPC_MULEQ_S_W_PHL
:
16538 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16540 case OPC_MULEQ_S_W_PHR
:
16542 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16544 case OPC_MULQ_S_PH
:
16546 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16550 #ifdef TARGET_MIPS64
16551 case OPC_ADDU_OB_DSP
:
16553 case OPC_MULEQ_S_PW_QHL
:
16555 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16557 case OPC_MULEQ_S_PW_QHR
:
16559 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16561 case OPC_MULEU_S_QH_OBL
:
16563 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16565 case OPC_MULEU_S_QH_OBR
:
16567 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16569 case OPC_MULQ_RS_QH
:
16571 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16578 tcg_temp_free_i32(t0
);
16579 tcg_temp_free(v1_t
);
16580 tcg_temp_free(v2_t
);
16583 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16591 /* Treat as NOP. */
16595 t0
= tcg_temp_new();
16596 val_t
= tcg_temp_new();
16597 gen_load_gpr(val_t
, val
);
16600 case OPC_ABSQ_S_PH_DSP
:
16604 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16609 target_long result
;
16610 imm
= (ctx
->opcode
>> 16) & 0xFF;
16611 result
= (uint32_t)imm
<< 24 |
16612 (uint32_t)imm
<< 16 |
16613 (uint32_t)imm
<< 8 |
16615 result
= (int32_t)result
;
16616 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16621 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16622 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16623 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16624 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16625 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16626 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16631 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16632 imm
= (int16_t)(imm
<< 6) >> 6;
16633 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16634 (target_long
)((int32_t)imm
<< 16 | \
16640 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16641 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16642 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16643 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16647 #ifdef TARGET_MIPS64
16648 case OPC_ABSQ_S_QH_DSP
:
16655 imm
= (ctx
->opcode
>> 16) & 0xFF;
16656 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16657 temp
= (temp
<< 16) | temp
;
16658 temp
= (temp
<< 32) | temp
;
16659 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16667 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16668 imm
= (int16_t)(imm
<< 6) >> 6;
16669 temp
= ((target_long
)imm
<< 32) \
16670 | ((target_long
)imm
& 0xFFFFFFFF);
16671 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16679 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16680 imm
= (int16_t)(imm
<< 6) >> 6;
16682 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16683 ((uint64_t)(uint16_t)imm
<< 32) |
16684 ((uint64_t)(uint16_t)imm
<< 16) |
16685 (uint64_t)(uint16_t)imm
;
16686 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16691 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16692 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16693 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16694 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16695 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16696 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16697 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16701 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16702 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16703 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16707 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16708 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16709 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16710 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16711 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16718 tcg_temp_free(val_t
);
16721 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16722 uint32_t op1
, uint32_t op2
,
16723 int ret
, int v1
, int v2
, int check_ret
)
16729 if ((ret
== 0) && (check_ret
== 1)) {
16730 /* Treat as NOP. */
16734 t1
= tcg_temp_new();
16735 v1_t
= tcg_temp_new();
16736 v2_t
= tcg_temp_new();
16738 gen_load_gpr(v1_t
, v1
);
16739 gen_load_gpr(v2_t
, v2
);
16742 case OPC_CMPU_EQ_QB_DSP
:
16744 case OPC_CMPU_EQ_QB
:
16746 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16748 case OPC_CMPU_LT_QB
:
16750 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16752 case OPC_CMPU_LE_QB
:
16754 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16756 case OPC_CMPGU_EQ_QB
:
16758 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16760 case OPC_CMPGU_LT_QB
:
16762 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16764 case OPC_CMPGU_LE_QB
:
16766 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16768 case OPC_CMPGDU_EQ_QB
:
16770 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16771 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16772 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16773 tcg_gen_shli_tl(t1
, t1
, 24);
16774 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16776 case OPC_CMPGDU_LT_QB
:
16778 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16779 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16780 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16781 tcg_gen_shli_tl(t1
, t1
, 24);
16782 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16784 case OPC_CMPGDU_LE_QB
:
16786 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16787 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16788 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16789 tcg_gen_shli_tl(t1
, t1
, 24);
16790 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16792 case OPC_CMP_EQ_PH
:
16794 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16796 case OPC_CMP_LT_PH
:
16798 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16800 case OPC_CMP_LE_PH
:
16802 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16806 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16810 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16812 case OPC_PACKRL_PH
:
16814 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16818 #ifdef TARGET_MIPS64
16819 case OPC_CMPU_EQ_OB_DSP
:
16821 case OPC_CMP_EQ_PW
:
16823 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16825 case OPC_CMP_LT_PW
:
16827 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16829 case OPC_CMP_LE_PW
:
16831 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16833 case OPC_CMP_EQ_QH
:
16835 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16837 case OPC_CMP_LT_QH
:
16839 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16841 case OPC_CMP_LE_QH
:
16843 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16845 case OPC_CMPGDU_EQ_OB
:
16847 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16849 case OPC_CMPGDU_LT_OB
:
16851 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16853 case OPC_CMPGDU_LE_OB
:
16855 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16857 case OPC_CMPGU_EQ_OB
:
16859 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16861 case OPC_CMPGU_LT_OB
:
16863 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16865 case OPC_CMPGU_LE_OB
:
16867 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16869 case OPC_CMPU_EQ_OB
:
16871 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16873 case OPC_CMPU_LT_OB
:
16875 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16877 case OPC_CMPU_LE_OB
:
16879 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16881 case OPC_PACKRL_PW
:
16883 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16887 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16891 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16895 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16903 tcg_temp_free(v1_t
);
16904 tcg_temp_free(v2_t
);
16907 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16908 uint32_t op1
, int rt
, int rs
, int sa
)
16915 /* Treat as NOP. */
16919 t0
= tcg_temp_new();
16920 gen_load_gpr(t0
, rs
);
16923 case OPC_APPEND_DSP
:
16924 switch (MASK_APPEND(ctx
->opcode
)) {
16927 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16929 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16933 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16934 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16935 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16936 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16938 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16942 if (sa
!= 0 && sa
!= 2) {
16943 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16944 tcg_gen_ext32u_tl(t0
, t0
);
16945 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16946 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16948 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16950 default: /* Invalid */
16951 MIPS_INVAL("MASK APPEND");
16952 generate_exception_end(ctx
, EXCP_RI
);
16956 #ifdef TARGET_MIPS64
16957 case OPC_DAPPEND_DSP
:
16958 switch (MASK_DAPPEND(ctx
->opcode
)) {
16961 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16965 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16966 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16967 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16971 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16972 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16973 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16978 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16979 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16980 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16981 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16984 default: /* Invalid */
16985 MIPS_INVAL("MASK DAPPEND");
16986 generate_exception_end(ctx
, EXCP_RI
);
16995 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16996 int ret
, int v1
, int v2
, int check_ret
)
17005 if ((ret
== 0) && (check_ret
== 1)) {
17006 /* Treat as NOP. */
17010 t0
= tcg_temp_new();
17011 t1
= tcg_temp_new();
17012 v1_t
= tcg_temp_new();
17013 v2_t
= tcg_temp_new();
17015 gen_load_gpr(v1_t
, v1
);
17016 gen_load_gpr(v2_t
, v2
);
17019 case OPC_EXTR_W_DSP
:
17023 tcg_gen_movi_tl(t0
, v2
);
17024 tcg_gen_movi_tl(t1
, v1
);
17025 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17028 tcg_gen_movi_tl(t0
, v2
);
17029 tcg_gen_movi_tl(t1
, v1
);
17030 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17032 case OPC_EXTR_RS_W
:
17033 tcg_gen_movi_tl(t0
, v2
);
17034 tcg_gen_movi_tl(t1
, v1
);
17035 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17038 tcg_gen_movi_tl(t0
, v2
);
17039 tcg_gen_movi_tl(t1
, v1
);
17040 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17042 case OPC_EXTRV_S_H
:
17043 tcg_gen_movi_tl(t0
, v2
);
17044 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17047 tcg_gen_movi_tl(t0
, v2
);
17048 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17050 case OPC_EXTRV_R_W
:
17051 tcg_gen_movi_tl(t0
, v2
);
17052 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17054 case OPC_EXTRV_RS_W
:
17055 tcg_gen_movi_tl(t0
, v2
);
17056 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17059 tcg_gen_movi_tl(t0
, v2
);
17060 tcg_gen_movi_tl(t1
, v1
);
17061 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17064 tcg_gen_movi_tl(t0
, v2
);
17065 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17068 tcg_gen_movi_tl(t0
, v2
);
17069 tcg_gen_movi_tl(t1
, v1
);
17070 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17073 tcg_gen_movi_tl(t0
, v2
);
17074 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17077 imm
= (ctx
->opcode
>> 20) & 0x3F;
17078 tcg_gen_movi_tl(t0
, ret
);
17079 tcg_gen_movi_tl(t1
, imm
);
17080 gen_helper_shilo(t0
, t1
, cpu_env
);
17083 tcg_gen_movi_tl(t0
, ret
);
17084 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17087 tcg_gen_movi_tl(t0
, ret
);
17088 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17091 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17092 tcg_gen_movi_tl(t0
, imm
);
17093 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17096 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17097 tcg_gen_movi_tl(t0
, imm
);
17098 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17102 #ifdef TARGET_MIPS64
17103 case OPC_DEXTR_W_DSP
:
17107 tcg_gen_movi_tl(t0
, ret
);
17108 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17112 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17113 int ac
= (ctx
->opcode
>> 11) & 0x03;
17114 tcg_gen_movi_tl(t0
, shift
);
17115 tcg_gen_movi_tl(t1
, ac
);
17116 gen_helper_dshilo(t0
, t1
, cpu_env
);
17121 int ac
= (ctx
->opcode
>> 11) & 0x03;
17122 tcg_gen_movi_tl(t0
, ac
);
17123 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17127 tcg_gen_movi_tl(t0
, v2
);
17128 tcg_gen_movi_tl(t1
, v1
);
17130 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17133 tcg_gen_movi_tl(t0
, v2
);
17134 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17137 tcg_gen_movi_tl(t0
, v2
);
17138 tcg_gen_movi_tl(t1
, v1
);
17139 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17142 tcg_gen_movi_tl(t0
, v2
);
17143 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17146 tcg_gen_movi_tl(t0
, v2
);
17147 tcg_gen_movi_tl(t1
, v1
);
17148 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17150 case OPC_DEXTR_R_L
:
17151 tcg_gen_movi_tl(t0
, v2
);
17152 tcg_gen_movi_tl(t1
, v1
);
17153 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17155 case OPC_DEXTR_RS_L
:
17156 tcg_gen_movi_tl(t0
, v2
);
17157 tcg_gen_movi_tl(t1
, v1
);
17158 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17161 tcg_gen_movi_tl(t0
, v2
);
17162 tcg_gen_movi_tl(t1
, v1
);
17163 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17165 case OPC_DEXTR_R_W
:
17166 tcg_gen_movi_tl(t0
, v2
);
17167 tcg_gen_movi_tl(t1
, v1
);
17168 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17170 case OPC_DEXTR_RS_W
:
17171 tcg_gen_movi_tl(t0
, v2
);
17172 tcg_gen_movi_tl(t1
, v1
);
17173 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17175 case OPC_DEXTR_S_H
:
17176 tcg_gen_movi_tl(t0
, v2
);
17177 tcg_gen_movi_tl(t1
, v1
);
17178 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17180 case OPC_DEXTRV_S_H
:
17181 tcg_gen_movi_tl(t0
, v2
);
17182 tcg_gen_movi_tl(t1
, v1
);
17183 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17186 tcg_gen_movi_tl(t0
, v2
);
17187 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17189 case OPC_DEXTRV_R_L
:
17190 tcg_gen_movi_tl(t0
, v2
);
17191 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17193 case OPC_DEXTRV_RS_L
:
17194 tcg_gen_movi_tl(t0
, v2
);
17195 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17198 tcg_gen_movi_tl(t0
, v2
);
17199 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17201 case OPC_DEXTRV_R_W
:
17202 tcg_gen_movi_tl(t0
, v2
);
17203 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17205 case OPC_DEXTRV_RS_W
:
17206 tcg_gen_movi_tl(t0
, v2
);
17207 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17216 tcg_temp_free(v1_t
);
17217 tcg_temp_free(v2_t
);
17220 /* End MIPSDSP functions. */
17222 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17224 int rs
, rt
, rd
, sa
;
17227 rs
= (ctx
->opcode
>> 21) & 0x1f;
17228 rt
= (ctx
->opcode
>> 16) & 0x1f;
17229 rd
= (ctx
->opcode
>> 11) & 0x1f;
17230 sa
= (ctx
->opcode
>> 6) & 0x1f;
17232 op1
= MASK_SPECIAL(ctx
->opcode
);
17235 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17237 case OPC_MULT
... OPC_DIVU
:
17238 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17248 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17251 MIPS_INVAL("special_r6 muldiv");
17252 generate_exception_end(ctx
, EXCP_RI
);
17258 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17262 if (rt
== 0 && sa
== 1) {
17263 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17264 We need additionally to check other fields */
17265 gen_cl(ctx
, op1
, rd
, rs
);
17267 generate_exception_end(ctx
, EXCP_RI
);
17271 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17272 gen_helper_do_semihosting(cpu_env
);
17274 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17275 generate_exception_end(ctx
, EXCP_RI
);
17277 generate_exception_end(ctx
, EXCP_DBp
);
17281 #if defined(TARGET_MIPS64)
17283 check_mips_64(ctx
);
17284 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17288 if (rt
== 0 && sa
== 1) {
17289 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17290 We need additionally to check other fields */
17291 check_mips_64(ctx
);
17292 gen_cl(ctx
, op1
, rd
, rs
);
17294 generate_exception_end(ctx
, EXCP_RI
);
17297 case OPC_DMULT
... OPC_DDIVU
:
17298 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17308 check_mips_64(ctx
);
17309 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17312 MIPS_INVAL("special_r6 muldiv");
17313 generate_exception_end(ctx
, EXCP_RI
);
17318 default: /* Invalid */
17319 MIPS_INVAL("special_r6");
17320 generate_exception_end(ctx
, EXCP_RI
);
17325 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17327 int rs
, rt
, rd
, sa
;
17330 rs
= (ctx
->opcode
>> 21) & 0x1f;
17331 rt
= (ctx
->opcode
>> 16) & 0x1f;
17332 rd
= (ctx
->opcode
>> 11) & 0x1f;
17333 sa
= (ctx
->opcode
>> 6) & 0x1f;
17335 op1
= MASK_SPECIAL(ctx
->opcode
);
17337 case OPC_MOVN
: /* Conditional move */
17339 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17340 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17341 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17343 case OPC_MFHI
: /* Move from HI/LO */
17345 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17348 case OPC_MTLO
: /* Move to HI/LO */
17349 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17352 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17353 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17354 check_cp1_enabled(ctx
);
17355 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17356 (ctx
->opcode
>> 16) & 1);
17358 generate_exception_err(ctx
, EXCP_CpU
, 1);
17364 check_insn(ctx
, INSN_VR54XX
);
17365 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17366 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17368 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17373 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17375 #if defined(TARGET_MIPS64)
17376 case OPC_DMULT
... OPC_DDIVU
:
17377 check_insn(ctx
, ISA_MIPS3
);
17378 check_mips_64(ctx
);
17379 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17383 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17386 #ifdef MIPS_STRICT_STANDARD
17387 MIPS_INVAL("SPIM");
17388 generate_exception_end(ctx
, EXCP_RI
);
17390 /* Implemented as RI exception for now. */
17391 MIPS_INVAL("spim (unofficial)");
17392 generate_exception_end(ctx
, EXCP_RI
);
17395 default: /* Invalid */
17396 MIPS_INVAL("special_legacy");
17397 generate_exception_end(ctx
, EXCP_RI
);
17402 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17404 int rs
, rt
, rd
, sa
;
17407 rs
= (ctx
->opcode
>> 21) & 0x1f;
17408 rt
= (ctx
->opcode
>> 16) & 0x1f;
17409 rd
= (ctx
->opcode
>> 11) & 0x1f;
17410 sa
= (ctx
->opcode
>> 6) & 0x1f;
17412 op1
= MASK_SPECIAL(ctx
->opcode
);
17414 case OPC_SLL
: /* Shift with immediate */
17415 if (sa
== 5 && rd
== 0 &&
17416 rs
== 0 && rt
== 0) { /* PAUSE */
17417 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17418 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17419 generate_exception_end(ctx
, EXCP_RI
);
17425 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17428 switch ((ctx
->opcode
>> 21) & 0x1f) {
17430 /* rotr is decoded as srl on non-R2 CPUs */
17431 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17436 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17439 generate_exception_end(ctx
, EXCP_RI
);
17443 case OPC_ADD
... OPC_SUBU
:
17444 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17446 case OPC_SLLV
: /* Shifts */
17448 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17451 switch ((ctx
->opcode
>> 6) & 0x1f) {
17453 /* rotrv is decoded as srlv on non-R2 CPUs */
17454 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17459 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17462 generate_exception_end(ctx
, EXCP_RI
);
17466 case OPC_SLT
: /* Set on less than */
17468 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17470 case OPC_AND
: /* Logic*/
17474 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17477 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17479 case OPC_TGE
... OPC_TEQ
: /* Traps */
17481 check_insn(ctx
, ISA_MIPS2
);
17482 gen_trap(ctx
, op1
, rs
, rt
, -1);
17484 case OPC_LSA
: /* OPC_PMON */
17485 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17486 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17487 decode_opc_special_r6(env
, ctx
);
17489 /* Pmon entry point, also R4010 selsl */
17490 #ifdef MIPS_STRICT_STANDARD
17491 MIPS_INVAL("PMON / selsl");
17492 generate_exception_end(ctx
, EXCP_RI
);
17494 gen_helper_0e0i(pmon
, sa
);
17499 generate_exception_end(ctx
, EXCP_SYSCALL
);
17502 generate_exception_end(ctx
, EXCP_BREAK
);
17505 check_insn(ctx
, ISA_MIPS2
);
17506 gen_sync(extract32(ctx
->opcode
, 6, 5));
17509 #if defined(TARGET_MIPS64)
17510 /* MIPS64 specific opcodes */
17515 check_insn(ctx
, ISA_MIPS3
);
17516 check_mips_64(ctx
);
17517 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17520 switch ((ctx
->opcode
>> 21) & 0x1f) {
17522 /* drotr is decoded as dsrl on non-R2 CPUs */
17523 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17528 check_insn(ctx
, ISA_MIPS3
);
17529 check_mips_64(ctx
);
17530 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17533 generate_exception_end(ctx
, EXCP_RI
);
17538 switch ((ctx
->opcode
>> 21) & 0x1f) {
17540 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17541 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17546 check_insn(ctx
, ISA_MIPS3
);
17547 check_mips_64(ctx
);
17548 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17551 generate_exception_end(ctx
, EXCP_RI
);
17555 case OPC_DADD
... OPC_DSUBU
:
17556 check_insn(ctx
, ISA_MIPS3
);
17557 check_mips_64(ctx
);
17558 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17562 check_insn(ctx
, ISA_MIPS3
);
17563 check_mips_64(ctx
);
17564 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17567 switch ((ctx
->opcode
>> 6) & 0x1f) {
17569 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17570 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17575 check_insn(ctx
, ISA_MIPS3
);
17576 check_mips_64(ctx
);
17577 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17580 generate_exception_end(ctx
, EXCP_RI
);
17585 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17586 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17587 decode_opc_special_r6(env
, ctx
);
17592 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17593 decode_opc_special_r6(env
, ctx
);
17595 decode_opc_special_legacy(env
, ctx
);
17600 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17605 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17607 rs
= (ctx
->opcode
>> 21) & 0x1f;
17608 rt
= (ctx
->opcode
>> 16) & 0x1f;
17609 rd
= (ctx
->opcode
>> 11) & 0x1f;
17611 op1
= MASK_SPECIAL2(ctx
->opcode
);
17613 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17614 case OPC_MSUB
... OPC_MSUBU
:
17615 check_insn(ctx
, ISA_MIPS32
);
17616 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17619 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17622 case OPC_DIVU_G_2F
:
17623 case OPC_MULT_G_2F
:
17624 case OPC_MULTU_G_2F
:
17626 case OPC_MODU_G_2F
:
17627 check_insn(ctx
, INSN_LOONGSON2F
);
17628 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17632 check_insn(ctx
, ISA_MIPS32
);
17633 gen_cl(ctx
, op1
, rd
, rs
);
17636 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17637 gen_helper_do_semihosting(cpu_env
);
17639 /* XXX: not clear which exception should be raised
17640 * when in debug mode...
17642 check_insn(ctx
, ISA_MIPS32
);
17643 generate_exception_end(ctx
, EXCP_DBp
);
17646 #if defined(TARGET_MIPS64)
17649 check_insn(ctx
, ISA_MIPS64
);
17650 check_mips_64(ctx
);
17651 gen_cl(ctx
, op1
, rd
, rs
);
17653 case OPC_DMULT_G_2F
:
17654 case OPC_DMULTU_G_2F
:
17655 case OPC_DDIV_G_2F
:
17656 case OPC_DDIVU_G_2F
:
17657 case OPC_DMOD_G_2F
:
17658 case OPC_DMODU_G_2F
:
17659 check_insn(ctx
, INSN_LOONGSON2F
);
17660 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17663 default: /* Invalid */
17664 MIPS_INVAL("special2_legacy");
17665 generate_exception_end(ctx
, EXCP_RI
);
17670 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17672 int rs
, rt
, rd
, sa
;
17676 rs
= (ctx
->opcode
>> 21) & 0x1f;
17677 rt
= (ctx
->opcode
>> 16) & 0x1f;
17678 rd
= (ctx
->opcode
>> 11) & 0x1f;
17679 sa
= (ctx
->opcode
>> 6) & 0x1f;
17680 imm
= (int16_t)ctx
->opcode
>> 7;
17682 op1
= MASK_SPECIAL3(ctx
->opcode
);
17686 /* hint codes 24-31 are reserved and signal RI */
17687 generate_exception_end(ctx
, EXCP_RI
);
17689 /* Treat as NOP. */
17692 check_cp0_enabled(ctx
);
17693 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17694 gen_cache_operation(ctx
, rt
, rs
, imm
);
17698 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17701 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17706 /* Treat as NOP. */
17709 op2
= MASK_BSHFL(ctx
->opcode
);
17711 case OPC_ALIGN
... OPC_ALIGN_END
:
17712 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17715 gen_bitswap(ctx
, op2
, rd
, rt
);
17720 #if defined(TARGET_MIPS64)
17722 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17725 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17728 check_mips_64(ctx
);
17731 /* Treat as NOP. */
17734 op2
= MASK_DBSHFL(ctx
->opcode
);
17736 case OPC_DALIGN
... OPC_DALIGN_END
:
17737 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17740 gen_bitswap(ctx
, op2
, rd
, rt
);
17747 default: /* Invalid */
17748 MIPS_INVAL("special3_r6");
17749 generate_exception_end(ctx
, EXCP_RI
);
17754 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17759 rs
= (ctx
->opcode
>> 21) & 0x1f;
17760 rt
= (ctx
->opcode
>> 16) & 0x1f;
17761 rd
= (ctx
->opcode
>> 11) & 0x1f;
17763 op1
= MASK_SPECIAL3(ctx
->opcode
);
17765 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17766 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17767 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17768 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17769 * the same mask and op1. */
17770 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17771 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17774 case OPC_ADDUH_R_QB
:
17776 case OPC_ADDQH_R_PH
:
17778 case OPC_ADDQH_R_W
:
17780 case OPC_SUBUH_R_QB
:
17782 case OPC_SUBQH_R_PH
:
17784 case OPC_SUBQH_R_W
:
17785 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17790 case OPC_MULQ_RS_W
:
17791 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17794 MIPS_INVAL("MASK ADDUH.QB");
17795 generate_exception_end(ctx
, EXCP_RI
);
17798 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17799 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17801 generate_exception_end(ctx
, EXCP_RI
);
17805 op2
= MASK_LX(ctx
->opcode
);
17807 #if defined(TARGET_MIPS64)
17813 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17815 default: /* Invalid */
17816 MIPS_INVAL("MASK LX");
17817 generate_exception_end(ctx
, EXCP_RI
);
17821 case OPC_ABSQ_S_PH_DSP
:
17822 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17824 case OPC_ABSQ_S_QB
:
17825 case OPC_ABSQ_S_PH
:
17827 case OPC_PRECEQ_W_PHL
:
17828 case OPC_PRECEQ_W_PHR
:
17829 case OPC_PRECEQU_PH_QBL
:
17830 case OPC_PRECEQU_PH_QBR
:
17831 case OPC_PRECEQU_PH_QBLA
:
17832 case OPC_PRECEQU_PH_QBRA
:
17833 case OPC_PRECEU_PH_QBL
:
17834 case OPC_PRECEU_PH_QBR
:
17835 case OPC_PRECEU_PH_QBLA
:
17836 case OPC_PRECEU_PH_QBRA
:
17837 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17844 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17847 MIPS_INVAL("MASK ABSQ_S.PH");
17848 generate_exception_end(ctx
, EXCP_RI
);
17852 case OPC_ADDU_QB_DSP
:
17853 op2
= MASK_ADDU_QB(ctx
->opcode
);
17856 case OPC_ADDQ_S_PH
:
17859 case OPC_ADDU_S_QB
:
17861 case OPC_ADDU_S_PH
:
17863 case OPC_SUBQ_S_PH
:
17866 case OPC_SUBU_S_QB
:
17868 case OPC_SUBU_S_PH
:
17872 case OPC_RADDU_W_QB
:
17873 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17875 case OPC_MULEU_S_PH_QBL
:
17876 case OPC_MULEU_S_PH_QBR
:
17877 case OPC_MULQ_RS_PH
:
17878 case OPC_MULEQ_S_W_PHL
:
17879 case OPC_MULEQ_S_W_PHR
:
17880 case OPC_MULQ_S_PH
:
17881 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17883 default: /* Invalid */
17884 MIPS_INVAL("MASK ADDU.QB");
17885 generate_exception_end(ctx
, EXCP_RI
);
17890 case OPC_CMPU_EQ_QB_DSP
:
17891 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17893 case OPC_PRECR_SRA_PH_W
:
17894 case OPC_PRECR_SRA_R_PH_W
:
17895 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17897 case OPC_PRECR_QB_PH
:
17898 case OPC_PRECRQ_QB_PH
:
17899 case OPC_PRECRQ_PH_W
:
17900 case OPC_PRECRQ_RS_PH_W
:
17901 case OPC_PRECRQU_S_QB_PH
:
17902 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17904 case OPC_CMPU_EQ_QB
:
17905 case OPC_CMPU_LT_QB
:
17906 case OPC_CMPU_LE_QB
:
17907 case OPC_CMP_EQ_PH
:
17908 case OPC_CMP_LT_PH
:
17909 case OPC_CMP_LE_PH
:
17910 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17912 case OPC_CMPGU_EQ_QB
:
17913 case OPC_CMPGU_LT_QB
:
17914 case OPC_CMPGU_LE_QB
:
17915 case OPC_CMPGDU_EQ_QB
:
17916 case OPC_CMPGDU_LT_QB
:
17917 case OPC_CMPGDU_LE_QB
:
17920 case OPC_PACKRL_PH
:
17921 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17923 default: /* Invalid */
17924 MIPS_INVAL("MASK CMPU.EQ.QB");
17925 generate_exception_end(ctx
, EXCP_RI
);
17929 case OPC_SHLL_QB_DSP
:
17930 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17932 case OPC_DPA_W_PH_DSP
:
17933 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17935 case OPC_DPAU_H_QBL
:
17936 case OPC_DPAU_H_QBR
:
17937 case OPC_DPSU_H_QBL
:
17938 case OPC_DPSU_H_QBR
:
17940 case OPC_DPAX_W_PH
:
17941 case OPC_DPAQ_S_W_PH
:
17942 case OPC_DPAQX_S_W_PH
:
17943 case OPC_DPAQX_SA_W_PH
:
17945 case OPC_DPSX_W_PH
:
17946 case OPC_DPSQ_S_W_PH
:
17947 case OPC_DPSQX_S_W_PH
:
17948 case OPC_DPSQX_SA_W_PH
:
17949 case OPC_MULSAQ_S_W_PH
:
17950 case OPC_DPAQ_SA_L_W
:
17951 case OPC_DPSQ_SA_L_W
:
17952 case OPC_MAQ_S_W_PHL
:
17953 case OPC_MAQ_S_W_PHR
:
17954 case OPC_MAQ_SA_W_PHL
:
17955 case OPC_MAQ_SA_W_PHR
:
17956 case OPC_MULSA_W_PH
:
17957 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17959 default: /* Invalid */
17960 MIPS_INVAL("MASK DPAW.PH");
17961 generate_exception_end(ctx
, EXCP_RI
);
17966 op2
= MASK_INSV(ctx
->opcode
);
17977 t0
= tcg_temp_new();
17978 t1
= tcg_temp_new();
17980 gen_load_gpr(t0
, rt
);
17981 gen_load_gpr(t1
, rs
);
17983 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17989 default: /* Invalid */
17990 MIPS_INVAL("MASK INSV");
17991 generate_exception_end(ctx
, EXCP_RI
);
17995 case OPC_APPEND_DSP
:
17996 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17998 case OPC_EXTR_W_DSP
:
17999 op2
= MASK_EXTR_W(ctx
->opcode
);
18003 case OPC_EXTR_RS_W
:
18005 case OPC_EXTRV_S_H
:
18007 case OPC_EXTRV_R_W
:
18008 case OPC_EXTRV_RS_W
:
18013 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18016 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18022 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18024 default: /* Invalid */
18025 MIPS_INVAL("MASK EXTR.W");
18026 generate_exception_end(ctx
, EXCP_RI
);
18030 #if defined(TARGET_MIPS64)
18031 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
18032 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
18033 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
18034 check_insn(ctx
, INSN_LOONGSON2E
);
18035 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18037 case OPC_ABSQ_S_QH_DSP
:
18038 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18040 case OPC_PRECEQ_L_PWL
:
18041 case OPC_PRECEQ_L_PWR
:
18042 case OPC_PRECEQ_PW_QHL
:
18043 case OPC_PRECEQ_PW_QHR
:
18044 case OPC_PRECEQ_PW_QHLA
:
18045 case OPC_PRECEQ_PW_QHRA
:
18046 case OPC_PRECEQU_QH_OBL
:
18047 case OPC_PRECEQU_QH_OBR
:
18048 case OPC_PRECEQU_QH_OBLA
:
18049 case OPC_PRECEQU_QH_OBRA
:
18050 case OPC_PRECEU_QH_OBL
:
18051 case OPC_PRECEU_QH_OBR
:
18052 case OPC_PRECEU_QH_OBLA
:
18053 case OPC_PRECEU_QH_OBRA
:
18054 case OPC_ABSQ_S_OB
:
18055 case OPC_ABSQ_S_PW
:
18056 case OPC_ABSQ_S_QH
:
18057 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18065 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18067 default: /* Invalid */
18068 MIPS_INVAL("MASK ABSQ_S.QH");
18069 generate_exception_end(ctx
, EXCP_RI
);
18073 case OPC_ADDU_OB_DSP
:
18074 op2
= MASK_ADDU_OB(ctx
->opcode
);
18076 case OPC_RADDU_L_OB
:
18078 case OPC_SUBQ_S_PW
:
18080 case OPC_SUBQ_S_QH
:
18082 case OPC_SUBU_S_OB
:
18084 case OPC_SUBU_S_QH
:
18086 case OPC_SUBUH_R_OB
:
18088 case OPC_ADDQ_S_PW
:
18090 case OPC_ADDQ_S_QH
:
18092 case OPC_ADDU_S_OB
:
18094 case OPC_ADDU_S_QH
:
18096 case OPC_ADDUH_R_OB
:
18097 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18099 case OPC_MULEQ_S_PW_QHL
:
18100 case OPC_MULEQ_S_PW_QHR
:
18101 case OPC_MULEU_S_QH_OBL
:
18102 case OPC_MULEU_S_QH_OBR
:
18103 case OPC_MULQ_RS_QH
:
18104 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18106 default: /* Invalid */
18107 MIPS_INVAL("MASK ADDU.OB");
18108 generate_exception_end(ctx
, EXCP_RI
);
18112 case OPC_CMPU_EQ_OB_DSP
:
18113 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18115 case OPC_PRECR_SRA_QH_PW
:
18116 case OPC_PRECR_SRA_R_QH_PW
:
18117 /* Return value is rt. */
18118 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18120 case OPC_PRECR_OB_QH
:
18121 case OPC_PRECRQ_OB_QH
:
18122 case OPC_PRECRQ_PW_L
:
18123 case OPC_PRECRQ_QH_PW
:
18124 case OPC_PRECRQ_RS_QH_PW
:
18125 case OPC_PRECRQU_S_OB_QH
:
18126 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18128 case OPC_CMPU_EQ_OB
:
18129 case OPC_CMPU_LT_OB
:
18130 case OPC_CMPU_LE_OB
:
18131 case OPC_CMP_EQ_QH
:
18132 case OPC_CMP_LT_QH
:
18133 case OPC_CMP_LE_QH
:
18134 case OPC_CMP_EQ_PW
:
18135 case OPC_CMP_LT_PW
:
18136 case OPC_CMP_LE_PW
:
18137 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18139 case OPC_CMPGDU_EQ_OB
:
18140 case OPC_CMPGDU_LT_OB
:
18141 case OPC_CMPGDU_LE_OB
:
18142 case OPC_CMPGU_EQ_OB
:
18143 case OPC_CMPGU_LT_OB
:
18144 case OPC_CMPGU_LE_OB
:
18145 case OPC_PACKRL_PW
:
18149 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18151 default: /* Invalid */
18152 MIPS_INVAL("MASK CMPU_EQ.OB");
18153 generate_exception_end(ctx
, EXCP_RI
);
18157 case OPC_DAPPEND_DSP
:
18158 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18160 case OPC_DEXTR_W_DSP
:
18161 op2
= MASK_DEXTR_W(ctx
->opcode
);
18168 case OPC_DEXTR_R_L
:
18169 case OPC_DEXTR_RS_L
:
18171 case OPC_DEXTR_R_W
:
18172 case OPC_DEXTR_RS_W
:
18173 case OPC_DEXTR_S_H
:
18175 case OPC_DEXTRV_R_L
:
18176 case OPC_DEXTRV_RS_L
:
18177 case OPC_DEXTRV_S_H
:
18179 case OPC_DEXTRV_R_W
:
18180 case OPC_DEXTRV_RS_W
:
18181 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18186 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18188 default: /* Invalid */
18189 MIPS_INVAL("MASK EXTR.W");
18190 generate_exception_end(ctx
, EXCP_RI
);
18194 case OPC_DPAQ_W_QH_DSP
:
18195 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18197 case OPC_DPAU_H_OBL
:
18198 case OPC_DPAU_H_OBR
:
18199 case OPC_DPSU_H_OBL
:
18200 case OPC_DPSU_H_OBR
:
18202 case OPC_DPAQ_S_W_QH
:
18204 case OPC_DPSQ_S_W_QH
:
18205 case OPC_MULSAQ_S_W_QH
:
18206 case OPC_DPAQ_SA_L_PW
:
18207 case OPC_DPSQ_SA_L_PW
:
18208 case OPC_MULSAQ_S_L_PW
:
18209 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18211 case OPC_MAQ_S_W_QHLL
:
18212 case OPC_MAQ_S_W_QHLR
:
18213 case OPC_MAQ_S_W_QHRL
:
18214 case OPC_MAQ_S_W_QHRR
:
18215 case OPC_MAQ_SA_W_QHLL
:
18216 case OPC_MAQ_SA_W_QHLR
:
18217 case OPC_MAQ_SA_W_QHRL
:
18218 case OPC_MAQ_SA_W_QHRR
:
18219 case OPC_MAQ_S_L_PWL
:
18220 case OPC_MAQ_S_L_PWR
:
18225 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18227 default: /* Invalid */
18228 MIPS_INVAL("MASK DPAQ.W.QH");
18229 generate_exception_end(ctx
, EXCP_RI
);
18233 case OPC_DINSV_DSP
:
18234 op2
= MASK_INSV(ctx
->opcode
);
18245 t0
= tcg_temp_new();
18246 t1
= tcg_temp_new();
18248 gen_load_gpr(t0
, rt
);
18249 gen_load_gpr(t1
, rs
);
18251 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18257 default: /* Invalid */
18258 MIPS_INVAL("MASK DINSV");
18259 generate_exception_end(ctx
, EXCP_RI
);
18263 case OPC_SHLL_OB_DSP
:
18264 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18267 default: /* Invalid */
18268 MIPS_INVAL("special3_legacy");
18269 generate_exception_end(ctx
, EXCP_RI
);
18274 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18276 int rs
, rt
, rd
, sa
;
18280 rs
= (ctx
->opcode
>> 21) & 0x1f;
18281 rt
= (ctx
->opcode
>> 16) & 0x1f;
18282 rd
= (ctx
->opcode
>> 11) & 0x1f;
18283 sa
= (ctx
->opcode
>> 6) & 0x1f;
18284 imm
= sextract32(ctx
->opcode
, 7, 9);
18286 op1
= MASK_SPECIAL3(ctx
->opcode
);
18289 * EVA loads and stores overlap Loongson 2E instructions decoded by
18290 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18295 case OPC_LWLE
... OPC_LWRE
:
18296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18298 case OPC_LBUE
... OPC_LHUE
:
18299 case OPC_LBE
... OPC_LWE
:
18300 check_cp0_enabled(ctx
);
18301 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18303 case OPC_SWLE
... OPC_SWRE
:
18304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18306 case OPC_SBE
... OPC_SHE
:
18308 check_cp0_enabled(ctx
);
18309 gen_st(ctx
, op1
, rt
, rs
, imm
);
18312 check_cp0_enabled(ctx
);
18313 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18316 check_cp0_enabled(ctx
);
18317 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18318 gen_cache_operation(ctx
, rt
, rs
, imm
);
18320 /* Treat as NOP. */
18323 check_cp0_enabled(ctx
);
18324 /* Treat as NOP. */
18332 check_insn(ctx
, ISA_MIPS32R2
);
18333 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18336 op2
= MASK_BSHFL(ctx
->opcode
);
18338 case OPC_ALIGN
... OPC_ALIGN_END
:
18340 check_insn(ctx
, ISA_MIPS32R6
);
18341 decode_opc_special3_r6(env
, ctx
);
18344 check_insn(ctx
, ISA_MIPS32R2
);
18345 gen_bshfl(ctx
, op2
, rt
, rd
);
18349 #if defined(TARGET_MIPS64)
18350 case OPC_DEXTM
... OPC_DEXT
:
18351 case OPC_DINSM
... OPC_DINS
:
18352 check_insn(ctx
, ISA_MIPS64R2
);
18353 check_mips_64(ctx
);
18354 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18357 op2
= MASK_DBSHFL(ctx
->opcode
);
18359 case OPC_DALIGN
... OPC_DALIGN_END
:
18361 check_insn(ctx
, ISA_MIPS32R6
);
18362 decode_opc_special3_r6(env
, ctx
);
18365 check_insn(ctx
, ISA_MIPS64R2
);
18366 check_mips_64(ctx
);
18367 op2
= MASK_DBSHFL(ctx
->opcode
);
18368 gen_bshfl(ctx
, op2
, rt
, rd
);
18374 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18377 check_insn(ctx
, ASE_MT
);
18379 TCGv t0
= tcg_temp_new();
18380 TCGv t1
= tcg_temp_new();
18382 gen_load_gpr(t0
, rt
);
18383 gen_load_gpr(t1
, rs
);
18384 gen_helper_fork(t0
, t1
);
18390 check_insn(ctx
, ASE_MT
);
18392 TCGv t0
= tcg_temp_new();
18394 gen_load_gpr(t0
, rs
);
18395 gen_helper_yield(t0
, cpu_env
, t0
);
18396 gen_store_gpr(t0
, rd
);
18401 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18402 decode_opc_special3_r6(env
, ctx
);
18404 decode_opc_special3_legacy(env
, ctx
);
18409 /* MIPS SIMD Architecture (MSA) */
18410 static inline int check_msa_access(DisasContext
*ctx
)
18412 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18413 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18414 generate_exception_end(ctx
, EXCP_RI
);
18418 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18419 if (ctx
->insn_flags
& ASE_MSA
) {
18420 generate_exception_end(ctx
, EXCP_MSADIS
);
18423 generate_exception_end(ctx
, EXCP_RI
);
18430 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18432 /* generates tcg ops to check if any element is 0 */
18433 /* Note this function only works with MSA_WRLEN = 128 */
18434 uint64_t eval_zero_or_big
= 0;
18435 uint64_t eval_big
= 0;
18436 TCGv_i64 t0
= tcg_temp_new_i64();
18437 TCGv_i64 t1
= tcg_temp_new_i64();
18440 eval_zero_or_big
= 0x0101010101010101ULL
;
18441 eval_big
= 0x8080808080808080ULL
;
18444 eval_zero_or_big
= 0x0001000100010001ULL
;
18445 eval_big
= 0x8000800080008000ULL
;
18448 eval_zero_or_big
= 0x0000000100000001ULL
;
18449 eval_big
= 0x8000000080000000ULL
;
18452 eval_zero_or_big
= 0x0000000000000001ULL
;
18453 eval_big
= 0x8000000000000000ULL
;
18456 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18457 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18458 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18459 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18460 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18461 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18462 tcg_gen_or_i64(t0
, t0
, t1
);
18463 /* if all bits are zero then all elements are not zero */
18464 /* if some bit is non-zero then some element is zero */
18465 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18466 tcg_gen_trunc_i64_tl(tresult
, t0
);
18467 tcg_temp_free_i64(t0
);
18468 tcg_temp_free_i64(t1
);
18471 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18473 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18474 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18475 int64_t s16
= (int16_t)ctx
->opcode
;
18477 check_msa_access(ctx
);
18479 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18480 generate_exception_end(ctx
, EXCP_RI
);
18487 TCGv_i64 t0
= tcg_temp_new_i64();
18488 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18489 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18490 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18491 tcg_gen_trunc_i64_tl(bcond
, t0
);
18492 tcg_temp_free_i64(t0
);
18499 gen_check_zero_element(bcond
, df
, wt
);
18505 gen_check_zero_element(bcond
, df
, wt
);
18506 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18510 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18512 ctx
->hflags
|= MIPS_HFLAG_BC
;
18513 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18516 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18518 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18519 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18520 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18521 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18523 TCGv_i32 twd
= tcg_const_i32(wd
);
18524 TCGv_i32 tws
= tcg_const_i32(ws
);
18525 TCGv_i32 ti8
= tcg_const_i32(i8
);
18527 switch (MASK_MSA_I8(ctx
->opcode
)) {
18529 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18532 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18535 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18538 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18541 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18544 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18547 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18553 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18554 if (df
== DF_DOUBLE
) {
18555 generate_exception_end(ctx
, EXCP_RI
);
18557 TCGv_i32 tdf
= tcg_const_i32(df
);
18558 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18559 tcg_temp_free_i32(tdf
);
18564 MIPS_INVAL("MSA instruction");
18565 generate_exception_end(ctx
, EXCP_RI
);
18569 tcg_temp_free_i32(twd
);
18570 tcg_temp_free_i32(tws
);
18571 tcg_temp_free_i32(ti8
);
18574 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18576 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18577 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18578 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18579 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18580 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18581 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18583 TCGv_i32 tdf
= tcg_const_i32(df
);
18584 TCGv_i32 twd
= tcg_const_i32(wd
);
18585 TCGv_i32 tws
= tcg_const_i32(ws
);
18586 TCGv_i32 timm
= tcg_temp_new_i32();
18587 tcg_gen_movi_i32(timm
, u5
);
18589 switch (MASK_MSA_I5(ctx
->opcode
)) {
18591 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18594 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18596 case OPC_MAXI_S_df
:
18597 tcg_gen_movi_i32(timm
, s5
);
18598 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18600 case OPC_MAXI_U_df
:
18601 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18603 case OPC_MINI_S_df
:
18604 tcg_gen_movi_i32(timm
, s5
);
18605 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18607 case OPC_MINI_U_df
:
18608 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18611 tcg_gen_movi_i32(timm
, s5
);
18612 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18614 case OPC_CLTI_S_df
:
18615 tcg_gen_movi_i32(timm
, s5
);
18616 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18618 case OPC_CLTI_U_df
:
18619 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18621 case OPC_CLEI_S_df
:
18622 tcg_gen_movi_i32(timm
, s5
);
18623 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18625 case OPC_CLEI_U_df
:
18626 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18630 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18631 tcg_gen_movi_i32(timm
, s10
);
18632 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18636 MIPS_INVAL("MSA instruction");
18637 generate_exception_end(ctx
, EXCP_RI
);
18641 tcg_temp_free_i32(tdf
);
18642 tcg_temp_free_i32(twd
);
18643 tcg_temp_free_i32(tws
);
18644 tcg_temp_free_i32(timm
);
18647 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18649 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18650 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18651 uint32_t df
= 0, m
= 0;
18652 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18653 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18660 if ((dfm
& 0x40) == 0x00) {
18663 } else if ((dfm
& 0x60) == 0x40) {
18666 } else if ((dfm
& 0x70) == 0x60) {
18669 } else if ((dfm
& 0x78) == 0x70) {
18673 generate_exception_end(ctx
, EXCP_RI
);
18677 tdf
= tcg_const_i32(df
);
18678 tm
= tcg_const_i32(m
);
18679 twd
= tcg_const_i32(wd
);
18680 tws
= tcg_const_i32(ws
);
18682 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18684 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18687 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18690 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18693 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18696 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18699 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18701 case OPC_BINSLI_df
:
18702 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18704 case OPC_BINSRI_df
:
18705 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18708 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18711 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18714 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18717 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18720 MIPS_INVAL("MSA instruction");
18721 generate_exception_end(ctx
, EXCP_RI
);
18725 tcg_temp_free_i32(tdf
);
18726 tcg_temp_free_i32(tm
);
18727 tcg_temp_free_i32(twd
);
18728 tcg_temp_free_i32(tws
);
18731 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18733 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18734 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18735 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18736 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18737 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18739 TCGv_i32 tdf
= tcg_const_i32(df
);
18740 TCGv_i32 twd
= tcg_const_i32(wd
);
18741 TCGv_i32 tws
= tcg_const_i32(ws
);
18742 TCGv_i32 twt
= tcg_const_i32(wt
);
18744 switch (MASK_MSA_3R(ctx
->opcode
)) {
18746 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18749 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18752 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18755 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18757 case OPC_SUBS_S_df
:
18758 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18761 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18764 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18767 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18770 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18773 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18775 case OPC_ADDS_A_df
:
18776 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18778 case OPC_SUBS_U_df
:
18779 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18782 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18785 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18788 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18791 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18794 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18797 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18799 case OPC_ADDS_S_df
:
18800 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18802 case OPC_SUBSUS_U_df
:
18803 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18806 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18809 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18812 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18815 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18818 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18821 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18823 case OPC_ADDS_U_df
:
18824 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18826 case OPC_SUBSUU_S_df
:
18827 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18830 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18833 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18836 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18839 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18842 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18844 case OPC_ASUB_S_df
:
18845 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18848 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18851 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18854 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18857 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18860 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18863 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18865 case OPC_ASUB_U_df
:
18866 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18869 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18872 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18875 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18878 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18880 case OPC_AVER_S_df
:
18881 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18884 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18887 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18890 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18893 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18895 case OPC_AVER_U_df
:
18896 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18899 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18902 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18905 case OPC_DOTP_S_df
:
18906 case OPC_DOTP_U_df
:
18907 case OPC_DPADD_S_df
:
18908 case OPC_DPADD_U_df
:
18909 case OPC_DPSUB_S_df
:
18910 case OPC_HADD_S_df
:
18911 case OPC_DPSUB_U_df
:
18912 case OPC_HADD_U_df
:
18913 case OPC_HSUB_S_df
:
18914 case OPC_HSUB_U_df
:
18915 if (df
== DF_BYTE
) {
18916 generate_exception_end(ctx
, EXCP_RI
);
18919 switch (MASK_MSA_3R(ctx
->opcode
)) {
18920 case OPC_DOTP_S_df
:
18921 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18923 case OPC_DOTP_U_df
:
18924 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18926 case OPC_DPADD_S_df
:
18927 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18929 case OPC_DPADD_U_df
:
18930 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18932 case OPC_DPSUB_S_df
:
18933 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18935 case OPC_HADD_S_df
:
18936 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18938 case OPC_DPSUB_U_df
:
18939 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18941 case OPC_HADD_U_df
:
18942 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18944 case OPC_HSUB_S_df
:
18945 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18947 case OPC_HSUB_U_df
:
18948 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18953 MIPS_INVAL("MSA instruction");
18954 generate_exception_end(ctx
, EXCP_RI
);
18957 tcg_temp_free_i32(twd
);
18958 tcg_temp_free_i32(tws
);
18959 tcg_temp_free_i32(twt
);
18960 tcg_temp_free_i32(tdf
);
18963 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18965 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18966 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18967 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18968 TCGv telm
= tcg_temp_new();
18969 TCGv_i32 tsr
= tcg_const_i32(source
);
18970 TCGv_i32 tdt
= tcg_const_i32(dest
);
18972 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18974 gen_load_gpr(telm
, source
);
18975 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18978 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18979 gen_store_gpr(telm
, dest
);
18982 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18985 MIPS_INVAL("MSA instruction");
18986 generate_exception_end(ctx
, EXCP_RI
);
18990 tcg_temp_free(telm
);
18991 tcg_temp_free_i32(tdt
);
18992 tcg_temp_free_i32(tsr
);
18995 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18998 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18999 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19000 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19002 TCGv_i32 tws
= tcg_const_i32(ws
);
19003 TCGv_i32 twd
= tcg_const_i32(wd
);
19004 TCGv_i32 tn
= tcg_const_i32(n
);
19005 TCGv_i32 tdf
= tcg_const_i32(df
);
19007 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19009 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
19011 case OPC_SPLATI_df
:
19012 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
19015 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
19017 case OPC_COPY_S_df
:
19018 case OPC_COPY_U_df
:
19019 case OPC_INSERT_df
:
19020 #if !defined(TARGET_MIPS64)
19021 /* Double format valid only for MIPS64 */
19022 if (df
== DF_DOUBLE
) {
19023 generate_exception_end(ctx
, EXCP_RI
);
19027 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19028 case OPC_COPY_S_df
:
19029 if (likely(wd
!= 0)) {
19030 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19033 case OPC_COPY_U_df
:
19034 if (likely(wd
!= 0)) {
19035 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19038 case OPC_INSERT_df
:
19039 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19044 MIPS_INVAL("MSA instruction");
19045 generate_exception_end(ctx
, EXCP_RI
);
19047 tcg_temp_free_i32(twd
);
19048 tcg_temp_free_i32(tws
);
19049 tcg_temp_free_i32(tn
);
19050 tcg_temp_free_i32(tdf
);
19053 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19055 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19056 uint32_t df
= 0, n
= 0;
19058 if ((dfn
& 0x30) == 0x00) {
19061 } else if ((dfn
& 0x38) == 0x20) {
19064 } else if ((dfn
& 0x3c) == 0x30) {
19067 } else if ((dfn
& 0x3e) == 0x38) {
19070 } else if (dfn
== 0x3E) {
19071 /* CTCMSA, CFCMSA, MOVE.V */
19072 gen_msa_elm_3e(env
, ctx
);
19075 generate_exception_end(ctx
, EXCP_RI
);
19079 gen_msa_elm_df(env
, ctx
, df
, n
);
19082 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19084 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19085 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19086 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19087 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19088 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19090 TCGv_i32 twd
= tcg_const_i32(wd
);
19091 TCGv_i32 tws
= tcg_const_i32(ws
);
19092 TCGv_i32 twt
= tcg_const_i32(wt
);
19093 TCGv_i32 tdf
= tcg_temp_new_i32();
19095 /* adjust df value for floating-point instruction */
19096 tcg_gen_movi_i32(tdf
, df
+ 2);
19098 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19100 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19103 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19106 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19109 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19112 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19115 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19118 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19121 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19124 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19127 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19130 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19133 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19136 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19139 tcg_gen_movi_i32(tdf
, df
+ 1);
19140 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19143 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19146 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19148 case OPC_MADD_Q_df
:
19149 tcg_gen_movi_i32(tdf
, df
+ 1);
19150 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19153 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19155 case OPC_MSUB_Q_df
:
19156 tcg_gen_movi_i32(tdf
, df
+ 1);
19157 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19160 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19163 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19166 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19169 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19172 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19175 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19178 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19181 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19184 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19187 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19190 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19193 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19196 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19198 case OPC_MULR_Q_df
:
19199 tcg_gen_movi_i32(tdf
, df
+ 1);
19200 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19203 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19205 case OPC_FMIN_A_df
:
19206 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19208 case OPC_MADDR_Q_df
:
19209 tcg_gen_movi_i32(tdf
, df
+ 1);
19210 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19213 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19216 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19218 case OPC_MSUBR_Q_df
:
19219 tcg_gen_movi_i32(tdf
, df
+ 1);
19220 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19223 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19225 case OPC_FMAX_A_df
:
19226 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19229 MIPS_INVAL("MSA instruction");
19230 generate_exception_end(ctx
, EXCP_RI
);
19234 tcg_temp_free_i32(twd
);
19235 tcg_temp_free_i32(tws
);
19236 tcg_temp_free_i32(twt
);
19237 tcg_temp_free_i32(tdf
);
19240 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19242 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19243 (op & (0x7 << 18)))
19244 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19245 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19246 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19247 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19248 TCGv_i32 twd
= tcg_const_i32(wd
);
19249 TCGv_i32 tws
= tcg_const_i32(ws
);
19250 TCGv_i32 twt
= tcg_const_i32(wt
);
19251 TCGv_i32 tdf
= tcg_const_i32(df
);
19253 switch (MASK_MSA_2R(ctx
->opcode
)) {
19255 #if !defined(TARGET_MIPS64)
19256 /* Double format valid only for MIPS64 */
19257 if (df
== DF_DOUBLE
) {
19258 generate_exception_end(ctx
, EXCP_RI
);
19262 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19265 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19268 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19271 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19274 MIPS_INVAL("MSA instruction");
19275 generate_exception_end(ctx
, EXCP_RI
);
19279 tcg_temp_free_i32(twd
);
19280 tcg_temp_free_i32(tws
);
19281 tcg_temp_free_i32(twt
);
19282 tcg_temp_free_i32(tdf
);
19285 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19287 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19288 (op & (0xf << 17)))
19289 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19290 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19291 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19292 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19293 TCGv_i32 twd
= tcg_const_i32(wd
);
19294 TCGv_i32 tws
= tcg_const_i32(ws
);
19295 TCGv_i32 twt
= tcg_const_i32(wt
);
19296 /* adjust df value for floating-point instruction */
19297 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19299 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19300 case OPC_FCLASS_df
:
19301 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19303 case OPC_FTRUNC_S_df
:
19304 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19306 case OPC_FTRUNC_U_df
:
19307 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19310 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19312 case OPC_FRSQRT_df
:
19313 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19316 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19319 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19322 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19324 case OPC_FEXUPL_df
:
19325 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19327 case OPC_FEXUPR_df
:
19328 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19331 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19334 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19336 case OPC_FTINT_S_df
:
19337 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19339 case OPC_FTINT_U_df
:
19340 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19342 case OPC_FFINT_S_df
:
19343 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19345 case OPC_FFINT_U_df
:
19346 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19350 tcg_temp_free_i32(twd
);
19351 tcg_temp_free_i32(tws
);
19352 tcg_temp_free_i32(twt
);
19353 tcg_temp_free_i32(tdf
);
19356 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19358 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19359 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19360 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19361 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19362 TCGv_i32 twd
= tcg_const_i32(wd
);
19363 TCGv_i32 tws
= tcg_const_i32(ws
);
19364 TCGv_i32 twt
= tcg_const_i32(wt
);
19366 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19368 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19371 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19374 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19377 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19380 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19383 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19386 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19389 MIPS_INVAL("MSA instruction");
19390 generate_exception_end(ctx
, EXCP_RI
);
19394 tcg_temp_free_i32(twd
);
19395 tcg_temp_free_i32(tws
);
19396 tcg_temp_free_i32(twt
);
19399 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19401 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19409 gen_msa_vec_v(env
, ctx
);
19412 gen_msa_2r(env
, ctx
);
19415 gen_msa_2rf(env
, ctx
);
19418 MIPS_INVAL("MSA instruction");
19419 generate_exception_end(ctx
, EXCP_RI
);
19424 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19426 uint32_t opcode
= ctx
->opcode
;
19427 check_insn(ctx
, ASE_MSA
);
19428 check_msa_access(ctx
);
19430 switch (MASK_MSA_MINOR(opcode
)) {
19431 case OPC_MSA_I8_00
:
19432 case OPC_MSA_I8_01
:
19433 case OPC_MSA_I8_02
:
19434 gen_msa_i8(env
, ctx
);
19436 case OPC_MSA_I5_06
:
19437 case OPC_MSA_I5_07
:
19438 gen_msa_i5(env
, ctx
);
19440 case OPC_MSA_BIT_09
:
19441 case OPC_MSA_BIT_0A
:
19442 gen_msa_bit(env
, ctx
);
19444 case OPC_MSA_3R_0D
:
19445 case OPC_MSA_3R_0E
:
19446 case OPC_MSA_3R_0F
:
19447 case OPC_MSA_3R_10
:
19448 case OPC_MSA_3R_11
:
19449 case OPC_MSA_3R_12
:
19450 case OPC_MSA_3R_13
:
19451 case OPC_MSA_3R_14
:
19452 case OPC_MSA_3R_15
:
19453 gen_msa_3r(env
, ctx
);
19456 gen_msa_elm(env
, ctx
);
19458 case OPC_MSA_3RF_1A
:
19459 case OPC_MSA_3RF_1B
:
19460 case OPC_MSA_3RF_1C
:
19461 gen_msa_3rf(env
, ctx
);
19464 gen_msa_vec(env
, ctx
);
19475 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19476 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19477 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19478 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19480 TCGv_i32 twd
= tcg_const_i32(wd
);
19481 TCGv taddr
= tcg_temp_new();
19482 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19484 switch (MASK_MSA_MINOR(opcode
)) {
19486 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19489 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19492 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19495 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19498 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19501 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19504 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19507 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19511 tcg_temp_free_i32(twd
);
19512 tcg_temp_free(taddr
);
19516 MIPS_INVAL("MSA instruction");
19517 generate_exception_end(ctx
, EXCP_RI
);
19523 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19526 int rs
, rt
, rd
, sa
;
19530 /* make sure instructions are on a word boundary */
19531 if (ctx
->pc
& 0x3) {
19532 env
->CP0_BadVAddr
= ctx
->pc
;
19533 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19537 /* Handle blikely not taken case */
19538 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19539 TCGLabel
*l1
= gen_new_label();
19541 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19542 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19543 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19547 op
= MASK_OP_MAJOR(ctx
->opcode
);
19548 rs
= (ctx
->opcode
>> 21) & 0x1f;
19549 rt
= (ctx
->opcode
>> 16) & 0x1f;
19550 rd
= (ctx
->opcode
>> 11) & 0x1f;
19551 sa
= (ctx
->opcode
>> 6) & 0x1f;
19552 imm
= (int16_t)ctx
->opcode
;
19555 decode_opc_special(env
, ctx
);
19558 decode_opc_special2_legacy(env
, ctx
);
19561 decode_opc_special3(env
, ctx
);
19564 op1
= MASK_REGIMM(ctx
->opcode
);
19566 case OPC_BLTZL
: /* REGIMM branches */
19570 check_insn(ctx
, ISA_MIPS2
);
19571 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19575 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19579 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19581 /* OPC_NAL, OPC_BAL */
19582 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19584 generate_exception_end(ctx
, EXCP_RI
);
19587 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19590 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19592 check_insn(ctx
, ISA_MIPS2
);
19593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19594 gen_trap(ctx
, op1
, rs
, -1, imm
);
19597 check_insn(ctx
, ISA_MIPS32R6
);
19598 generate_exception_end(ctx
, EXCP_RI
);
19601 check_insn(ctx
, ISA_MIPS32R2
);
19602 /* Break the TB to be able to sync copied instructions
19604 ctx
->bstate
= BS_STOP
;
19606 case OPC_BPOSGE32
: /* MIPS DSP branch */
19607 #if defined(TARGET_MIPS64)
19611 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19613 #if defined(TARGET_MIPS64)
19615 check_insn(ctx
, ISA_MIPS32R6
);
19616 check_mips_64(ctx
);
19618 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19622 check_insn(ctx
, ISA_MIPS32R6
);
19623 check_mips_64(ctx
);
19625 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19629 default: /* Invalid */
19630 MIPS_INVAL("regimm");
19631 generate_exception_end(ctx
, EXCP_RI
);
19636 check_cp0_enabled(ctx
);
19637 op1
= MASK_CP0(ctx
->opcode
);
19645 #if defined(TARGET_MIPS64)
19649 #ifndef CONFIG_USER_ONLY
19650 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19651 #endif /* !CONFIG_USER_ONLY */
19653 case OPC_C0_FIRST
... OPC_C0_LAST
:
19654 #ifndef CONFIG_USER_ONLY
19655 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19656 #endif /* !CONFIG_USER_ONLY */
19659 #ifndef CONFIG_USER_ONLY
19662 TCGv t0
= tcg_temp_new();
19664 op2
= MASK_MFMC0(ctx
->opcode
);
19667 check_insn(ctx
, ASE_MT
);
19668 gen_helper_dmt(t0
);
19669 gen_store_gpr(t0
, rt
);
19672 check_insn(ctx
, ASE_MT
);
19673 gen_helper_emt(t0
);
19674 gen_store_gpr(t0
, rt
);
19677 check_insn(ctx
, ASE_MT
);
19678 gen_helper_dvpe(t0
, cpu_env
);
19679 gen_store_gpr(t0
, rt
);
19682 check_insn(ctx
, ASE_MT
);
19683 gen_helper_evpe(t0
, cpu_env
);
19684 gen_store_gpr(t0
, rt
);
19687 check_insn(ctx
, ISA_MIPS32R6
);
19689 gen_helper_dvp(t0
, cpu_env
);
19690 gen_store_gpr(t0
, rt
);
19694 check_insn(ctx
, ISA_MIPS32R6
);
19696 gen_helper_evp(t0
, cpu_env
);
19697 gen_store_gpr(t0
, rt
);
19701 check_insn(ctx
, ISA_MIPS32R2
);
19702 save_cpu_state(ctx
, 1);
19703 gen_helper_di(t0
, cpu_env
);
19704 gen_store_gpr(t0
, rt
);
19705 /* Stop translation as we may have switched
19706 the execution mode. */
19707 ctx
->bstate
= BS_STOP
;
19710 check_insn(ctx
, ISA_MIPS32R2
);
19711 save_cpu_state(ctx
, 1);
19712 gen_helper_ei(t0
, cpu_env
);
19713 gen_store_gpr(t0
, rt
);
19714 /* BS_STOP isn't sufficient, we need to ensure we break out
19715 of translated code to check for pending interrupts. */
19716 gen_save_pc(ctx
->pc
+ 4);
19717 ctx
->bstate
= BS_EXCP
;
19719 default: /* Invalid */
19720 MIPS_INVAL("mfmc0");
19721 generate_exception_end(ctx
, EXCP_RI
);
19726 #endif /* !CONFIG_USER_ONLY */
19729 check_insn(ctx
, ISA_MIPS32R2
);
19730 gen_load_srsgpr(rt
, rd
);
19733 check_insn(ctx
, ISA_MIPS32R2
);
19734 gen_store_srsgpr(rt
, rd
);
19738 generate_exception_end(ctx
, EXCP_RI
);
19742 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19743 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19744 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19745 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19748 /* Arithmetic with immediate opcode */
19749 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19753 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19755 case OPC_SLTI
: /* Set on less than with immediate opcode */
19757 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19759 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19760 case OPC_LUI
: /* OPC_AUI */
19763 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19765 case OPC_J
... OPC_JAL
: /* Jump */
19766 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19767 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19770 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19771 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19773 generate_exception_end(ctx
, EXCP_RI
);
19776 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19777 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19780 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19783 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19784 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19786 generate_exception_end(ctx
, EXCP_RI
);
19789 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19790 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19793 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19796 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19799 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19801 check_insn(ctx
, ISA_MIPS32R6
);
19802 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19803 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19806 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19809 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19811 check_insn(ctx
, ISA_MIPS32R6
);
19812 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19813 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19818 check_insn(ctx
, ISA_MIPS2
);
19819 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19823 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19825 case OPC_LL
: /* Load and stores */
19826 check_insn(ctx
, ISA_MIPS2
);
19830 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19832 case OPC_LB
... OPC_LH
:
19833 case OPC_LW
... OPC_LHU
:
19834 gen_ld(ctx
, op
, rt
, rs
, imm
);
19838 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19840 case OPC_SB
... OPC_SH
:
19842 gen_st(ctx
, op
, rt
, rs
, imm
);
19845 check_insn(ctx
, ISA_MIPS2
);
19846 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19847 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19850 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19851 check_cp0_enabled(ctx
);
19852 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19853 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19854 gen_cache_operation(ctx
, rt
, rs
, imm
);
19856 /* Treat as NOP. */
19859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19860 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19861 /* Treat as NOP. */
19864 /* Floating point (COP1). */
19869 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19873 op1
= MASK_CP1(ctx
->opcode
);
19878 check_cp1_enabled(ctx
);
19879 check_insn(ctx
, ISA_MIPS32R2
);
19884 check_cp1_enabled(ctx
);
19885 gen_cp1(ctx
, op1
, rt
, rd
);
19887 #if defined(TARGET_MIPS64)
19890 check_cp1_enabled(ctx
);
19891 check_insn(ctx
, ISA_MIPS3
);
19892 check_mips_64(ctx
);
19893 gen_cp1(ctx
, op1
, rt
, rd
);
19896 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19897 check_cp1_enabled(ctx
);
19898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19900 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19905 check_insn(ctx
, ASE_MIPS3D
);
19906 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19907 (rt
>> 2) & 0x7, imm
<< 2);
19911 check_cp1_enabled(ctx
);
19912 check_insn(ctx
, ISA_MIPS32R6
);
19913 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19917 check_cp1_enabled(ctx
);
19918 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19920 check_insn(ctx
, ASE_MIPS3D
);
19923 check_cp1_enabled(ctx
);
19924 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19925 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19926 (rt
>> 2) & 0x7, imm
<< 2);
19933 check_cp1_enabled(ctx
);
19934 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19940 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19941 check_cp1_enabled(ctx
);
19942 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19944 case R6_OPC_CMP_AF_S
:
19945 case R6_OPC_CMP_UN_S
:
19946 case R6_OPC_CMP_EQ_S
:
19947 case R6_OPC_CMP_UEQ_S
:
19948 case R6_OPC_CMP_LT_S
:
19949 case R6_OPC_CMP_ULT_S
:
19950 case R6_OPC_CMP_LE_S
:
19951 case R6_OPC_CMP_ULE_S
:
19952 case R6_OPC_CMP_SAF_S
:
19953 case R6_OPC_CMP_SUN_S
:
19954 case R6_OPC_CMP_SEQ_S
:
19955 case R6_OPC_CMP_SEUQ_S
:
19956 case R6_OPC_CMP_SLT_S
:
19957 case R6_OPC_CMP_SULT_S
:
19958 case R6_OPC_CMP_SLE_S
:
19959 case R6_OPC_CMP_SULE_S
:
19960 case R6_OPC_CMP_OR_S
:
19961 case R6_OPC_CMP_UNE_S
:
19962 case R6_OPC_CMP_NE_S
:
19963 case R6_OPC_CMP_SOR_S
:
19964 case R6_OPC_CMP_SUNE_S
:
19965 case R6_OPC_CMP_SNE_S
:
19966 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19968 case R6_OPC_CMP_AF_D
:
19969 case R6_OPC_CMP_UN_D
:
19970 case R6_OPC_CMP_EQ_D
:
19971 case R6_OPC_CMP_UEQ_D
:
19972 case R6_OPC_CMP_LT_D
:
19973 case R6_OPC_CMP_ULT_D
:
19974 case R6_OPC_CMP_LE_D
:
19975 case R6_OPC_CMP_ULE_D
:
19976 case R6_OPC_CMP_SAF_D
:
19977 case R6_OPC_CMP_SUN_D
:
19978 case R6_OPC_CMP_SEQ_D
:
19979 case R6_OPC_CMP_SEUQ_D
:
19980 case R6_OPC_CMP_SLT_D
:
19981 case R6_OPC_CMP_SULT_D
:
19982 case R6_OPC_CMP_SLE_D
:
19983 case R6_OPC_CMP_SULE_D
:
19984 case R6_OPC_CMP_OR_D
:
19985 case R6_OPC_CMP_UNE_D
:
19986 case R6_OPC_CMP_NE_D
:
19987 case R6_OPC_CMP_SOR_D
:
19988 case R6_OPC_CMP_SUNE_D
:
19989 case R6_OPC_CMP_SNE_D
:
19990 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19993 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19994 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19999 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20014 check_insn(ctx
, ASE_MSA
);
20015 gen_msa_branch(env
, ctx
, op1
);
20019 generate_exception_end(ctx
, EXCP_RI
);
20024 /* Compact branches [R6] and COP2 [non-R6] */
20025 case OPC_BC
: /* OPC_LWC2 */
20026 case OPC_BALC
: /* OPC_SWC2 */
20027 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20028 /* OPC_BC, OPC_BALC */
20029 gen_compute_compact_branch(ctx
, op
, 0, 0,
20030 sextract32(ctx
->opcode
<< 2, 0, 28));
20032 /* OPC_LWC2, OPC_SWC2 */
20033 /* COP2: Not implemented. */
20034 generate_exception_err(ctx
, EXCP_CpU
, 2);
20037 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20038 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20039 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20041 /* OPC_BEQZC, OPC_BNEZC */
20042 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20043 sextract32(ctx
->opcode
<< 2, 0, 23));
20045 /* OPC_JIC, OPC_JIALC */
20046 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20049 /* OPC_LWC2, OPC_SWC2 */
20050 /* COP2: Not implemented. */
20051 generate_exception_err(ctx
, EXCP_CpU
, 2);
20055 check_insn(ctx
, INSN_LOONGSON2F
);
20056 /* Note that these instructions use different fields. */
20057 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20062 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20063 check_cp1_enabled(ctx
);
20064 op1
= MASK_CP3(ctx
->opcode
);
20068 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20074 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20075 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20078 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20079 /* Treat as NOP. */
20082 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20096 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20097 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20101 generate_exception_end(ctx
, EXCP_RI
);
20105 generate_exception_err(ctx
, EXCP_CpU
, 1);
20109 #if defined(TARGET_MIPS64)
20110 /* MIPS64 opcodes */
20111 case OPC_LDL
... OPC_LDR
:
20113 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20117 check_insn(ctx
, ISA_MIPS3
);
20118 check_mips_64(ctx
);
20119 gen_ld(ctx
, op
, rt
, rs
, imm
);
20121 case OPC_SDL
... OPC_SDR
:
20122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20125 check_insn(ctx
, ISA_MIPS3
);
20126 check_mips_64(ctx
);
20127 gen_st(ctx
, op
, rt
, rs
, imm
);
20130 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20131 check_insn(ctx
, ISA_MIPS3
);
20132 check_mips_64(ctx
);
20133 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20135 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20136 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20137 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20138 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20141 check_insn(ctx
, ISA_MIPS3
);
20142 check_mips_64(ctx
);
20143 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20147 check_insn(ctx
, ISA_MIPS3
);
20148 check_mips_64(ctx
);
20149 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20152 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20153 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20154 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20156 MIPS_INVAL("major opcode");
20157 generate_exception_end(ctx
, EXCP_RI
);
20161 case OPC_DAUI
: /* OPC_JALX */
20162 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20163 #if defined(TARGET_MIPS64)
20165 check_mips_64(ctx
);
20167 generate_exception(ctx
, EXCP_RI
);
20168 } else if (rt
!= 0) {
20169 TCGv t0
= tcg_temp_new();
20170 gen_load_gpr(t0
, rs
);
20171 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20175 generate_exception_end(ctx
, EXCP_RI
);
20176 MIPS_INVAL("major opcode");
20180 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20181 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20182 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20185 case OPC_MSA
: /* OPC_MDMX */
20186 /* MDMX: Not implemented. */
20190 check_insn(ctx
, ISA_MIPS32R6
);
20191 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
20193 default: /* Invalid */
20194 MIPS_INVAL("major opcode");
20195 generate_exception_end(ctx
, EXCP_RI
);
20200 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20202 CPUMIPSState
*env
= cs
->env_ptr
;
20204 target_ulong pc_start
;
20205 target_ulong next_page_start
;
20212 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
20215 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
20216 ctx
.insn_flags
= env
->insn_flags
;
20217 ctx
.CP0_Config1
= env
->CP0_Config1
;
20219 ctx
.bstate
= BS_NONE
;
20221 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20222 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20223 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20224 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20225 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20226 ctx
.PAMask
= env
->PAMask
;
20227 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20228 ctx
.eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20229 ctx
.sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20230 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20231 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20232 /* Restore delay slot state from the tb context. */
20233 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
20234 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20235 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20236 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20237 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20238 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20239 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20240 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20241 restore_cpu_state(env
, &ctx
);
20242 #ifdef CONFIG_USER_ONLY
20243 ctx
.mem_idx
= MIPS_HFLAG_UM
;
20245 ctx
.mem_idx
= hflags_mmu_index(ctx
.hflags
);
20247 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
20248 MO_UNALN
: MO_ALIGN
;
20250 max_insns
= tb_cflags(tb
) & CF_COUNT_MASK
;
20251 if (max_insns
== 0) {
20252 max_insns
= CF_COUNT_MASK
;
20254 if (max_insns
> TCG_MAX_INSNS
) {
20255 max_insns
= TCG_MAX_INSNS
;
20258 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
20260 while (ctx
.bstate
== BS_NONE
) {
20261 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
20264 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
20265 save_cpu_state(&ctx
, 1);
20266 ctx
.bstate
= BS_BRANCH
;
20267 gen_helper_raise_exception_debug(cpu_env
);
20268 /* The address covered by the breakpoint must be included in
20269 [tb->pc, tb->pc + tb->size) in order to for it to be
20270 properly cleared -- thus we increment the PC here so that
20271 the logic setting tb->size below does the right thing. */
20273 goto done_generating
;
20276 if (num_insns
== max_insns
&& (tb_cflags(tb
) & CF_LAST_IO
)) {
20280 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
20281 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
20282 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
20284 decode_opc(env
, &ctx
);
20285 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
20286 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20287 insn_bytes
= decode_micromips_opc(env
, &ctx
);
20288 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
20289 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20290 insn_bytes
= decode_mips16_opc(env
, &ctx
);
20292 generate_exception_end(&ctx
, EXCP_RI
);
20296 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
20297 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20298 MIPS_HFLAG_FBNSLOT
))) {
20299 /* force to generate branch as there is neither delay nor
20303 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
20304 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
20305 /* Force to generate branch as microMIPS R6 doesn't restrict
20306 branches in the forbidden slot. */
20311 gen_branch(&ctx
, insn_bytes
);
20313 ctx
.pc
+= insn_bytes
;
20315 /* Execute a branch and its delay slot as a single instruction.
20316 This is what GDB expects and is consistent with what the
20317 hardware does (e.g. if a delay slot instruction faults, the
20318 reported PC is the PC of the branch). */
20319 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20323 if (ctx
.pc
>= next_page_start
) {
20327 if (tcg_op_buf_full()) {
20331 if (num_insns
>= max_insns
)
20337 if (tb_cflags(tb
) & CF_LAST_IO
) {
20340 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20341 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20342 gen_helper_raise_exception_debug(cpu_env
);
20344 switch (ctx
.bstate
) {
20346 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20349 save_cpu_state(&ctx
, 0);
20350 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20353 tcg_gen_exit_tb(0);
20361 gen_tb_end(tb
, num_insns
);
20363 tb
->size
= ctx
.pc
- pc_start
;
20364 tb
->icount
= num_insns
;
20368 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20369 && qemu_log_in_addr_range(pc_start
)) {
20371 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20372 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20379 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20383 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20385 #define printfpr(fp) \
20388 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20389 " fd:%13g fs:%13g psu: %13g\n", \
20390 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20391 (double)(fp)->fd, \
20392 (double)(fp)->fs[FP_ENDIAN_IDX], \
20393 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20396 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20397 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20398 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20399 " fd:%13g fs:%13g psu:%13g\n", \
20400 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20402 (double)tmp.fs[FP_ENDIAN_IDX], \
20403 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20408 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20409 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20410 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20411 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20412 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20413 printfpr(&env
->active_fpu
.fpr
[i
]);
20419 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20422 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20423 CPUMIPSState
*env
= &cpu
->env
;
20426 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20427 " LO=0x" TARGET_FMT_lx
" ds %04x "
20428 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20429 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20430 env
->hflags
, env
->btarget
, env
->bcond
);
20431 for (i
= 0; i
< 32; i
++) {
20433 cpu_fprintf(f
, "GPR%02d:", i
);
20434 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20436 cpu_fprintf(f
, "\n");
20439 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20440 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20441 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20443 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20444 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20445 env
->CP0_Config2
, env
->CP0_Config3
);
20446 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20447 env
->CP0_Config4
, env
->CP0_Config5
);
20448 if (env
->hflags
& MIPS_HFLAG_FPU
)
20449 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20452 void mips_tcg_init(void)
20456 TCGV_UNUSED(cpu_gpr
[0]);
20457 for (i
= 1; i
< 32; i
++)
20458 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20459 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20462 for (i
= 0; i
< 32; i
++) {
20463 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20465 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20466 /* The scalar floating-point unit (FPU) registers are mapped on
20467 * the MSA vector registers. */
20468 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20469 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20470 msa_wr_d
[i
* 2 + 1] =
20471 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20474 cpu_PC
= tcg_global_mem_new(cpu_env
,
20475 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20476 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20477 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20478 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20480 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20481 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20484 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20485 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20487 bcond
= tcg_global_mem_new(cpu_env
,
20488 offsetof(CPUMIPSState
, bcond
), "bcond");
20489 btarget
= tcg_global_mem_new(cpu_env
,
20490 offsetof(CPUMIPSState
, btarget
), "btarget");
20491 hflags
= tcg_global_mem_new_i32(cpu_env
,
20492 offsetof(CPUMIPSState
, hflags
), "hflags");
20494 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20495 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20497 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20498 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20502 #include "translate_init.c"
20504 void cpu_mips_realize_env(CPUMIPSState
*env
)
20506 env
->exception_base
= (int32_t)0xBFC00000;
20508 #ifndef CONFIG_USER_ONLY
20509 mmu_init(env
, env
->cpu_model
);
20511 fpu_init(env
, env
->cpu_model
);
20512 mvp_init(env
, env
->cpu_model
);
20515 bool cpu_supports_cps_smp(const char *cpu_model
)
20517 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20522 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20525 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20527 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20532 return (def
->insn_flags
& isa
) != 0;
20535 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20537 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20538 vp
->env
.exception_base
= address
;
20541 void cpu_state_reset(CPUMIPSState
*env
)
20543 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20544 CPUState
*cs
= CPU(cpu
);
20546 /* Reset registers to their default values */
20547 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20548 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20549 #ifdef TARGET_WORDS_BIGENDIAN
20550 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20552 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20553 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20554 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20555 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20556 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20557 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20558 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20559 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20560 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20561 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20562 << env
->cpu_model
->CP0_LLAddr_shift
;
20563 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20564 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20565 env
->CCRes
= env
->cpu_model
->CCRes
;
20566 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20567 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20568 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20569 env
->current_tc
= 0;
20570 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20571 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20572 #if defined(TARGET_MIPS64)
20573 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20574 env
->SEGMask
|= 3ULL << 62;
20577 env
->PABITS
= env
->cpu_model
->PABITS
;
20578 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20579 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20580 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20581 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20582 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20583 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20584 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20585 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20586 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20587 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20588 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20589 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20590 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20591 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20592 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20593 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20594 env
->msair
= env
->cpu_model
->MSAIR
;
20595 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20597 #if defined(CONFIG_USER_ONLY)
20598 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20599 # ifdef TARGET_MIPS64
20600 /* Enable 64-bit register mode. */
20601 env
->CP0_Status
|= (1 << CP0St_PX
);
20603 # ifdef TARGET_ABI_MIPSN64
20604 /* Enable 64-bit address mode. */
20605 env
->CP0_Status
|= (1 << CP0St_UX
);
20607 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20608 hardware registers. */
20609 env
->CP0_HWREna
|= 0x0000000F;
20610 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20611 env
->CP0_Status
|= (1 << CP0St_CU1
);
20613 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20614 env
->CP0_Status
|= (1 << CP0St_MX
);
20616 # if defined(TARGET_MIPS64)
20617 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20618 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20619 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20620 env
->CP0_Status
|= (1 << CP0St_FR
);
20624 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20625 /* If the exception was raised from a delay slot,
20626 come back to the jump. */
20627 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20628 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20630 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20632 env
->active_tc
.PC
= env
->exception_base
;
20633 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20634 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20635 env
->CP0_Wired
= 0;
20636 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20637 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20638 if (mips_um_ksegs_enabled()) {
20639 env
->CP0_EBase
|= 0x40000000;
20641 env
->CP0_EBase
|= (int32_t)0x80000000;
20643 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20644 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20646 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20648 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20649 /* vectored interrupts not implemented, timer on int 7,
20650 no performance counters. */
20651 env
->CP0_IntCtl
= 0xe0000000;
20655 for (i
= 0; i
< 7; i
++) {
20656 env
->CP0_WatchLo
[i
] = 0;
20657 env
->CP0_WatchHi
[i
] = 0x80000000;
20659 env
->CP0_WatchLo
[7] = 0;
20660 env
->CP0_WatchHi
[7] = 0;
20662 /* Count register increments in debug mode, EJTAG version 1 */
20663 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20665 cpu_mips_store_count(env
, 1);
20667 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20670 /* Only TC0 on VPE 0 starts as active. */
20671 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20672 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20673 env
->tcs
[i
].CP0_TCHalt
= 1;
20675 env
->active_tc
.CP0_TCHalt
= 1;
20678 if (cs
->cpu_index
== 0) {
20679 /* VPE0 starts up enabled. */
20680 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20681 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20683 /* TC0 starts up unhalted. */
20685 env
->active_tc
.CP0_TCHalt
= 0;
20686 env
->tcs
[0].CP0_TCHalt
= 0;
20687 /* With thread 0 active. */
20688 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20689 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20694 * Configure default legacy segmentation control. We use this regardless of
20695 * whether segmentation control is presented to the guest.
20697 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20698 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20699 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20700 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20701 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20702 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20704 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20705 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20706 (3 << CP0SC_C
)) << 16;
20707 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20708 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20709 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20710 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20711 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20712 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20713 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20714 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20716 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20717 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20718 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20719 env
->CP0_Status
|= (1 << CP0St_FR
);
20723 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20727 compute_hflags(env
);
20728 restore_fp_status(env
);
20729 restore_pamask(env
);
20730 cs
->exception_index
= EXCP_NONE
;
20732 if (semihosting_get_argc()) {
20733 /* UHI interface can be used to obtain argc and argv */
20734 env
->active_tc
.gpr
[4] = -1;
20738 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20739 target_ulong
*data
)
20741 env
->active_tc
.PC
= data
[0];
20742 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20743 env
->hflags
|= data
[1];
20744 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20745 case MIPS_HFLAG_BR
:
20747 case MIPS_HFLAG_BC
:
20748 case MIPS_HFLAG_BL
:
20750 env
->btarget
= data
[2];