2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
41 #define MIPS_DEBUG_DISAS 0
43 /* MIPS major opcodes */
44 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
47 /* indirect opcode tables */
48 OPC_SPECIAL
= (0x00 << 26),
49 OPC_REGIMM
= (0x01 << 26),
50 OPC_CP0
= (0x10 << 26),
51 OPC_CP1
= (0x11 << 26),
52 OPC_CP2
= (0x12 << 26),
53 OPC_CP3
= (0x13 << 26),
54 OPC_SPECIAL2
= (0x1C << 26),
55 OPC_SPECIAL3
= (0x1F << 26),
56 /* arithmetic with immediate */
57 OPC_ADDI
= (0x08 << 26),
58 OPC_ADDIU
= (0x09 << 26),
59 OPC_SLTI
= (0x0A << 26),
60 OPC_SLTIU
= (0x0B << 26),
61 /* logic with immediate */
62 OPC_ANDI
= (0x0C << 26),
63 OPC_ORI
= (0x0D << 26),
64 OPC_XORI
= (0x0E << 26),
65 OPC_LUI
= (0x0F << 26),
66 /* arithmetic with immediate */
67 OPC_DADDI
= (0x18 << 26),
68 OPC_DADDIU
= (0x19 << 26),
69 /* Jump and branches */
71 OPC_JAL
= (0x03 << 26),
72 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
73 OPC_BEQL
= (0x14 << 26),
74 OPC_BNE
= (0x05 << 26),
75 OPC_BNEL
= (0x15 << 26),
76 OPC_BLEZ
= (0x06 << 26),
77 OPC_BLEZL
= (0x16 << 26),
78 OPC_BGTZ
= (0x07 << 26),
79 OPC_BGTZL
= (0x17 << 26),
80 OPC_JALX
= (0x1D << 26),
81 OPC_DAUI
= (0x1D << 26),
83 OPC_LDL
= (0x1A << 26),
84 OPC_LDR
= (0x1B << 26),
85 OPC_LB
= (0x20 << 26),
86 OPC_LH
= (0x21 << 26),
87 OPC_LWL
= (0x22 << 26),
88 OPC_LW
= (0x23 << 26),
89 OPC_LWPC
= OPC_LW
| 0x5,
90 OPC_LBU
= (0x24 << 26),
91 OPC_LHU
= (0x25 << 26),
92 OPC_LWR
= (0x26 << 26),
93 OPC_LWU
= (0x27 << 26),
94 OPC_SB
= (0x28 << 26),
95 OPC_SH
= (0x29 << 26),
96 OPC_SWL
= (0x2A << 26),
97 OPC_SW
= (0x2B << 26),
98 OPC_SDL
= (0x2C << 26),
99 OPC_SDR
= (0x2D << 26),
100 OPC_SWR
= (0x2E << 26),
101 OPC_LL
= (0x30 << 26),
102 OPC_LLD
= (0x34 << 26),
103 OPC_LD
= (0x37 << 26),
104 OPC_LDPC
= OPC_LD
| 0x5,
105 OPC_SC
= (0x38 << 26),
106 OPC_SCD
= (0x3C << 26),
107 OPC_SD
= (0x3F << 26),
108 /* Floating point load/store */
109 OPC_LWC1
= (0x31 << 26),
110 OPC_LWC2
= (0x32 << 26),
111 OPC_LDC1
= (0x35 << 26),
112 OPC_LDC2
= (0x36 << 26),
113 OPC_SWC1
= (0x39 << 26),
114 OPC_SWC2
= (0x3A << 26),
115 OPC_SDC1
= (0x3D << 26),
116 OPC_SDC2
= (0x3E << 26),
117 /* Compact Branches */
118 OPC_BLEZALC
= (0x06 << 26),
119 OPC_BGEZALC
= (0x06 << 26),
120 OPC_BGEUC
= (0x06 << 26),
121 OPC_BGTZALC
= (0x07 << 26),
122 OPC_BLTZALC
= (0x07 << 26),
123 OPC_BLTUC
= (0x07 << 26),
124 OPC_BOVC
= (0x08 << 26),
125 OPC_BEQZALC
= (0x08 << 26),
126 OPC_BEQC
= (0x08 << 26),
127 OPC_BLEZC
= (0x16 << 26),
128 OPC_BGEZC
= (0x16 << 26),
129 OPC_BGEC
= (0x16 << 26),
130 OPC_BGTZC
= (0x17 << 26),
131 OPC_BLTZC
= (0x17 << 26),
132 OPC_BLTC
= (0x17 << 26),
133 OPC_BNVC
= (0x18 << 26),
134 OPC_BNEZALC
= (0x18 << 26),
135 OPC_BNEC
= (0x18 << 26),
136 OPC_BC
= (0x32 << 26),
137 OPC_BEQZC
= (0x36 << 26),
138 OPC_JIC
= (0x36 << 26),
139 OPC_BALC
= (0x3A << 26),
140 OPC_BNEZC
= (0x3E << 26),
141 OPC_JIALC
= (0x3E << 26),
142 /* MDMX ASE specific */
143 OPC_MDMX
= (0x1E << 26),
144 /* MSA ASE, same as MDMX */
146 /* Cache and prefetch */
147 OPC_CACHE
= (0x2F << 26),
148 OPC_PREF
= (0x33 << 26),
149 /* PC-relative address computation / loads */
150 OPC_PCREL
= (0x3B << 26),
153 /* PC-relative address computation / loads */
154 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
155 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
157 /* Instructions determined by bits 19 and 20 */
158 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
159 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
160 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
162 /* Instructions determined by bits 16 ... 20 */
163 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
164 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
167 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
170 /* MIPS special opcodes */
171 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
175 OPC_SLL
= 0x00 | OPC_SPECIAL
,
176 /* NOP is SLL r0, r0, 0 */
177 /* SSNOP is SLL r0, r0, 1 */
178 /* EHB is SLL r0, r0, 3 */
179 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
180 OPC_ROTR
= OPC_SRL
| (1 << 21),
181 OPC_SRA
= 0x03 | OPC_SPECIAL
,
182 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
183 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
184 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
185 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
186 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
187 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
188 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
189 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
190 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
191 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
192 OPC_DROTR
= OPC_DSRL
| (1 << 21),
193 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
194 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
195 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
196 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
197 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
198 /* Multiplication / division */
199 OPC_MULT
= 0x18 | OPC_SPECIAL
,
200 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
201 OPC_DIV
= 0x1A | OPC_SPECIAL
,
202 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
203 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
204 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
205 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
206 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
208 /* 2 registers arithmetic / logic */
209 OPC_ADD
= 0x20 | OPC_SPECIAL
,
210 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
211 OPC_SUB
= 0x22 | OPC_SPECIAL
,
212 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
213 OPC_AND
= 0x24 | OPC_SPECIAL
,
214 OPC_OR
= 0x25 | OPC_SPECIAL
,
215 OPC_XOR
= 0x26 | OPC_SPECIAL
,
216 OPC_NOR
= 0x27 | OPC_SPECIAL
,
217 OPC_SLT
= 0x2A | OPC_SPECIAL
,
218 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
219 OPC_DADD
= 0x2C | OPC_SPECIAL
,
220 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
221 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
222 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
224 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
225 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
227 OPC_TGE
= 0x30 | OPC_SPECIAL
,
228 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
229 OPC_TLT
= 0x32 | OPC_SPECIAL
,
230 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
231 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
232 OPC_TNE
= 0x36 | OPC_SPECIAL
,
233 /* HI / LO registers load & stores */
234 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
235 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
236 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
237 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
238 /* Conditional moves */
239 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
240 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
242 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
243 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
245 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
248 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
249 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
250 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
251 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
252 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
254 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
255 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
256 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
257 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
260 /* R6 Multiply and Divide instructions have the same Opcode
261 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
262 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
265 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
266 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
267 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
268 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
269 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
270 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
271 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
272 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
274 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
275 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
276 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
277 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
278 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
279 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
280 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
281 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
283 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
284 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
285 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
286 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
287 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
289 OPC_LSA
= 0x05 | OPC_SPECIAL
,
290 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
293 /* Multiplication variants of the vr54xx. */
294 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
297 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
298 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
300 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
302 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
304 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
306 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
307 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
308 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
309 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
310 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
313 /* REGIMM (rt field) opcodes */
314 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
317 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
318 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
319 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
320 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
321 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
322 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
323 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
324 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
325 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
326 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
327 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
328 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
329 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
330 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
331 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
332 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
334 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
335 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
338 /* Special2 opcodes */
339 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
342 /* Multiply & xxx operations */
343 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
344 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
345 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
346 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
347 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
349 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
350 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
351 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
352 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
353 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
354 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
355 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
356 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
357 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
358 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
359 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
360 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
362 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
363 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
364 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
365 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
367 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
370 /* Special3 opcodes */
371 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
374 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
375 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
376 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
377 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
378 OPC_INS
= 0x04 | OPC_SPECIAL3
,
379 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
380 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
381 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
382 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
383 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
384 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
385 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
386 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
389 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
390 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
391 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
392 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
393 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
394 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
395 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
396 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
397 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
398 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
399 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
400 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
403 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
404 /* MIPS DSP Arithmetic */
405 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
406 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
407 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
409 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
410 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
411 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
412 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
413 /* MIPS DSP GPR-Based Shift Sub-class */
414 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
415 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
416 /* MIPS DSP Multiply Sub-class insns */
417 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
418 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
419 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
420 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
421 /* DSP Bit/Manipulation Sub-class */
422 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
423 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
424 /* MIPS DSP Append Sub-class */
425 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
426 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
427 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
428 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
429 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
432 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
433 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
434 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
435 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
436 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
437 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
438 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
439 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
440 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
441 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
442 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
443 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
444 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
445 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
446 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
447 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
450 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
451 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
452 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
453 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
454 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
455 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
459 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
462 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
463 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
464 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
465 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
466 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
467 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
471 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
474 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
475 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
476 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
477 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
478 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
481 /* MIPS DSP REGIMM opcodes */
483 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
484 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
487 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
490 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
491 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
492 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
493 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
496 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
498 /* MIPS DSP Arithmetic Sub-class */
499 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
500 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
506 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
517 /* MIPS DSP Multiply Sub-class insns */
518 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
526 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
527 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
529 /* MIPS DSP Arithmetic Sub-class */
530 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
531 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
542 /* MIPS DSP Multiply Sub-class insns */
543 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
549 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
551 /* MIPS DSP Arithmetic Sub-class */
552 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
553 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
565 /* DSP Bit/Manipulation Sub-class */
566 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
573 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
575 /* MIPS DSP Arithmetic Sub-class */
576 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
583 /* DSP Compare-Pick Sub-class */
584 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
585 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
601 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
603 /* MIPS DSP GPR-Based Shift Sub-class */
604 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
628 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
630 /* MIPS DSP Multiply Sub-class insns */
631 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
655 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
657 /* DSP Bit/Manipulation Sub-class */
658 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
661 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
663 /* MIPS DSP Append Sub-class */
664 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
665 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
666 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
669 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
671 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
672 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
673 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
684 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
686 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
687 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
688 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
691 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
693 /* MIPS DSP Arithmetic Sub-class */
694 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
711 /* DSP Bit/Manipulation Sub-class */
712 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
720 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
722 /* MIPS DSP Multiply Sub-class insns */
723 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
724 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
728 /* MIPS DSP Arithmetic Sub-class */
729 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
730 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
740 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
752 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
754 /* DSP Compare-Pick Sub-class */
755 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
774 /* MIPS DSP Arithmetic Sub-class */
775 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
785 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
787 /* DSP Append Sub-class */
788 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
789 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
791 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
794 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
797 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
798 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
820 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
822 /* DSP Bit/Manipulation Sub-class */
823 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
826 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
828 /* MIPS DSP Multiply Sub-class insns */
829 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
857 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
859 /* MIPS DSP GPR-Based Shift Sub-class */
860 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
888 /* Coprocessor 0 (rs field) */
889 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
892 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
893 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
894 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
895 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
896 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
897 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
898 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
899 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
900 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
901 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
902 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
903 OPC_C0
= (0x10 << 21) | OPC_CP0
,
904 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
909 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
912 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
913 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
914 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
915 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
916 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
917 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
918 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
919 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
922 /* Coprocessor 0 (with rs == C0) */
923 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
926 OPC_TLBR
= 0x01 | OPC_C0
,
927 OPC_TLBWI
= 0x02 | OPC_C0
,
928 OPC_TLBINV
= 0x03 | OPC_C0
,
929 OPC_TLBINVF
= 0x04 | OPC_C0
,
930 OPC_TLBWR
= 0x06 | OPC_C0
,
931 OPC_TLBP
= 0x08 | OPC_C0
,
932 OPC_RFE
= 0x10 | OPC_C0
,
933 OPC_ERET
= 0x18 | OPC_C0
,
934 OPC_DERET
= 0x1F | OPC_C0
,
935 OPC_WAIT
= 0x20 | OPC_C0
,
938 /* Coprocessor 1 (rs field) */
939 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
941 /* Values for the fmt field in FP instructions */
943 /* 0 - 15 are reserved */
944 FMT_S
= 16, /* single fp */
945 FMT_D
= 17, /* double fp */
946 FMT_E
= 18, /* extended fp */
947 FMT_Q
= 19, /* quad fp */
948 FMT_W
= 20, /* 32-bit fixed */
949 FMT_L
= 21, /* 64-bit fixed */
950 FMT_PS
= 22, /* paired single fp */
951 /* 23 - 31 are reserved */
955 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
956 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
957 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
958 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
959 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
960 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
961 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
962 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
963 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
964 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
965 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
966 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
967 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
968 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
969 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
970 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
971 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
972 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
973 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
974 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
975 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
976 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
977 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
978 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
979 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
980 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
981 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
982 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
983 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
984 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
987 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
988 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
991 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
992 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
993 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
994 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
998 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
999 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1003 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1004 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1007 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1010 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1011 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1012 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1013 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1014 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1015 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1016 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1017 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1018 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1019 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1020 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1023 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1026 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1027 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1028 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1029 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1030 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1031 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1032 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1033 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1035 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1036 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1037 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1038 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1039 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1040 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1041 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1042 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1044 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1045 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1046 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1047 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1048 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1049 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1050 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1051 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1053 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1054 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1055 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1056 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1057 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1058 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1059 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1060 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1062 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1063 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1064 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1065 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1066 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1067 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1069 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1070 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1071 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1072 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1073 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1074 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1076 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1077 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1078 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1079 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1080 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1081 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1083 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1084 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1085 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1086 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1087 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1088 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1090 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1091 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1092 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1093 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1094 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1095 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1097 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1098 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1099 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1100 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1101 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1102 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1104 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1105 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1106 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1107 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1108 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1109 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1111 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1112 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1113 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1114 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1115 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1116 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1120 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1123 OPC_LWXC1
= 0x00 | OPC_CP3
,
1124 OPC_LDXC1
= 0x01 | OPC_CP3
,
1125 OPC_LUXC1
= 0x05 | OPC_CP3
,
1126 OPC_SWXC1
= 0x08 | OPC_CP3
,
1127 OPC_SDXC1
= 0x09 | OPC_CP3
,
1128 OPC_SUXC1
= 0x0D | OPC_CP3
,
1129 OPC_PREFX
= 0x0F | OPC_CP3
,
1130 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1131 OPC_MADD_S
= 0x20 | OPC_CP3
,
1132 OPC_MADD_D
= 0x21 | OPC_CP3
,
1133 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1134 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1135 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1136 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1137 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1138 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1139 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1140 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1141 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1142 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1146 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1148 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1149 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1150 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1151 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1152 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1153 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1154 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1155 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1156 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1157 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1158 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1159 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1160 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1161 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1162 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1163 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1164 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1165 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1166 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1167 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1168 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1170 /* MI10 instruction */
1171 OPC_LD_B
= (0x20) | OPC_MSA
,
1172 OPC_LD_H
= (0x21) | OPC_MSA
,
1173 OPC_LD_W
= (0x22) | OPC_MSA
,
1174 OPC_LD_D
= (0x23) | OPC_MSA
,
1175 OPC_ST_B
= (0x24) | OPC_MSA
,
1176 OPC_ST_H
= (0x25) | OPC_MSA
,
1177 OPC_ST_W
= (0x26) | OPC_MSA
,
1178 OPC_ST_D
= (0x27) | OPC_MSA
,
1182 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1183 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1184 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1185 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1186 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1187 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1188 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1189 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1190 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1191 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1192 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1193 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1194 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1196 /* I8 instruction */
1197 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1198 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1199 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1200 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1201 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1202 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1203 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1204 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1205 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1206 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1208 /* VEC/2R/2RF instruction */
1209 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1210 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1211 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1212 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1213 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1214 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1215 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1217 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1218 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1220 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1221 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1222 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1223 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1224 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1226 /* 2RF instruction df(bit 16) = _w, _d */
1227 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1228 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1229 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1230 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1231 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1232 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1233 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1234 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1235 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1236 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1237 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1238 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1239 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1240 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1241 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1242 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1244 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1245 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1246 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1247 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1248 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1249 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1250 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1251 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1252 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1253 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1254 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1255 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1256 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1257 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1258 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1259 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1260 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1261 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1262 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1263 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1264 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1265 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1266 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1267 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1268 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1269 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1270 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1271 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1272 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1273 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1274 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1275 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1276 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1277 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1278 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1279 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1280 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1281 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1282 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1283 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1284 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1285 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1286 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1287 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1288 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1289 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1290 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1291 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1292 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1293 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1294 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1295 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1296 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1297 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1298 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1299 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1300 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1301 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1302 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1303 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1304 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1305 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1306 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1307 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1309 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1310 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1311 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1312 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1313 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1314 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1315 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1316 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1317 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1318 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1320 /* 3RF instruction _df(bit 21) = _w, _d */
1321 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1325 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1331 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1332 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1333 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1334 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1335 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1337 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1338 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1339 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1342 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1343 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1344 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1345 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1346 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1353 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1363 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1364 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1365 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1366 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1367 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1368 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1369 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1370 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1371 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1372 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1373 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1374 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1375 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1378 /* global register indices */
1379 static TCGv_env cpu_env
;
1380 static TCGv cpu_gpr
[32], cpu_PC
;
1381 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1382 static TCGv cpu_dspctrl
, btarget
, bcond
;
1383 static TCGv_i32 hflags
;
1384 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1385 static TCGv_i64 fpu_f64
[32];
1386 static TCGv_i64 msa_wr_d
[64];
1388 #include "exec/gen-icount.h"
1390 #define gen_helper_0e0i(name, arg) do { \
1391 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1392 gen_helper_##name(cpu_env, helper_tmp); \
1393 tcg_temp_free_i32(helper_tmp); \
1396 #define gen_helper_0e1i(name, arg1, arg2) do { \
1397 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1398 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1399 tcg_temp_free_i32(helper_tmp); \
1402 #define gen_helper_1e0i(name, ret, arg1) do { \
1403 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1404 gen_helper_##name(ret, cpu_env, helper_tmp); \
1405 tcg_temp_free_i32(helper_tmp); \
1408 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1409 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1410 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1411 tcg_temp_free_i32(helper_tmp); \
1414 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1415 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1416 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1417 tcg_temp_free_i32(helper_tmp); \
1420 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1421 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1422 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1423 tcg_temp_free_i32(helper_tmp); \
1426 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1427 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1428 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1429 tcg_temp_free_i32(helper_tmp); \
1432 typedef struct DisasContext
{
1433 struct TranslationBlock
*tb
;
1434 target_ulong pc
, saved_pc
;
1436 int singlestep_enabled
;
1438 int32_t CP0_Config1
;
1439 /* Routine used to access memory */
1441 TCGMemOp default_tcg_memop_mask
;
1442 uint32_t hflags
, saved_hflags
;
1444 target_ulong btarget
;
1455 int CP0_LLAddr_shift
;
1465 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1466 * exception condition */
1467 BS_STOP
= 1, /* We want to stop translation for any reason */
1468 BS_BRANCH
= 2, /* We reached a branch condition */
1469 BS_EXCP
= 3, /* We reached an exception condition */
1472 static const char * const regnames
[] = {
1473 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1474 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1475 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1476 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1479 static const char * const regnames_HI
[] = {
1480 "HI0", "HI1", "HI2", "HI3",
1483 static const char * const regnames_LO
[] = {
1484 "LO0", "LO1", "LO2", "LO3",
1487 static const char * const fregnames
[] = {
1488 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1489 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1490 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1491 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1494 static const char * const msaregnames
[] = {
1495 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1496 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1497 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1498 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1499 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1500 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1501 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1502 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1503 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1504 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1505 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1506 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1507 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1508 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1509 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1510 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1513 #define LOG_DISAS(...) \
1515 if (MIPS_DEBUG_DISAS) { \
1516 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1520 #define MIPS_INVAL(op) \
1522 if (MIPS_DEBUG_DISAS) { \
1523 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1524 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1525 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1526 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1530 /* General purpose registers moves. */
1531 static inline void gen_load_gpr (TCGv t
, int reg
)
1534 tcg_gen_movi_tl(t
, 0);
1536 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1539 static inline void gen_store_gpr (TCGv t
, int reg
)
1542 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1545 /* Moves to/from shadow registers. */
1546 static inline void gen_load_srsgpr (int from
, int to
)
1548 TCGv t0
= tcg_temp_new();
1551 tcg_gen_movi_tl(t0
, 0);
1553 TCGv_i32 t2
= tcg_temp_new_i32();
1554 TCGv_ptr addr
= tcg_temp_new_ptr();
1556 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1557 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1558 tcg_gen_andi_i32(t2
, t2
, 0xf);
1559 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1560 tcg_gen_ext_i32_ptr(addr
, t2
);
1561 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1563 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1564 tcg_temp_free_ptr(addr
);
1565 tcg_temp_free_i32(t2
);
1567 gen_store_gpr(t0
, to
);
1571 static inline void gen_store_srsgpr (int from
, int to
)
1574 TCGv t0
= tcg_temp_new();
1575 TCGv_i32 t2
= tcg_temp_new_i32();
1576 TCGv_ptr addr
= tcg_temp_new_ptr();
1578 gen_load_gpr(t0
, from
);
1579 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1580 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1581 tcg_gen_andi_i32(t2
, t2
, 0xf);
1582 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1583 tcg_gen_ext_i32_ptr(addr
, t2
);
1584 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1586 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1587 tcg_temp_free_ptr(addr
);
1588 tcg_temp_free_i32(t2
);
1594 static inline void gen_save_pc(target_ulong pc
)
1596 tcg_gen_movi_tl(cpu_PC
, pc
);
1599 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1601 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1602 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1603 gen_save_pc(ctx
->pc
);
1604 ctx
->saved_pc
= ctx
->pc
;
1606 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1607 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1608 ctx
->saved_hflags
= ctx
->hflags
;
1609 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1615 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1621 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1623 ctx
->saved_hflags
= ctx
->hflags
;
1624 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1630 ctx
->btarget
= env
->btarget
;
1635 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1637 TCGv_i32 texcp
= tcg_const_i32(excp
);
1638 TCGv_i32 terr
= tcg_const_i32(err
);
1639 save_cpu_state(ctx
, 1);
1640 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1641 tcg_temp_free_i32(terr
);
1642 tcg_temp_free_i32(texcp
);
1643 ctx
->bstate
= BS_EXCP
;
1646 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1648 gen_helper_0e0i(raise_exception
, excp
);
1651 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1653 generate_exception_err(ctx
, excp
, 0);
1656 /* Floating point register moves. */
1657 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1659 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1660 generate_exception(ctx
, EXCP_RI
);
1662 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1665 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1668 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1669 generate_exception(ctx
, EXCP_RI
);
1671 t64
= tcg_temp_new_i64();
1672 tcg_gen_extu_i32_i64(t64
, t
);
1673 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1674 tcg_temp_free_i64(t64
);
1677 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1679 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1680 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1682 gen_load_fpr32(ctx
, t
, reg
| 1);
1686 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1688 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1689 TCGv_i64 t64
= tcg_temp_new_i64();
1690 tcg_gen_extu_i32_i64(t64
, t
);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1692 tcg_temp_free_i64(t64
);
1694 gen_store_fpr32(ctx
, t
, reg
| 1);
1698 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1700 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1701 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1703 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1707 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1709 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1710 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1713 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1714 t0
= tcg_temp_new_i64();
1715 tcg_gen_shri_i64(t0
, t
, 32);
1716 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1717 tcg_temp_free_i64(t0
);
1721 static inline int get_fp_bit (int cc
)
1729 /* Addresses computation */
1730 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1732 tcg_gen_add_tl(ret
, arg0
, arg1
);
1734 #if defined(TARGET_MIPS64)
1735 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1736 tcg_gen_ext32s_i64(ret
, ret
);
1741 /* Addresses computation (translation time) */
1742 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1745 target_long sum
= base
+ offset
;
1747 #if defined(TARGET_MIPS64)
1748 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1755 /* Sign-extract the low 32-bits to a target_long. */
1756 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1758 #if defined(TARGET_MIPS64)
1759 tcg_gen_ext32s_i64(ret
, arg
);
1761 tcg_gen_extrl_i64_i32(ret
, arg
);
1765 /* Sign-extract the high 32-bits to a target_long. */
1766 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1768 #if defined(TARGET_MIPS64)
1769 tcg_gen_sari_i64(ret
, arg
, 32);
1771 tcg_gen_extrh_i64_i32(ret
, arg
);
1775 static inline void check_cp0_enabled(DisasContext
*ctx
)
1777 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1778 generate_exception_err(ctx
, EXCP_CpU
, 0);
1781 static inline void check_cp1_enabled(DisasContext
*ctx
)
1783 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1784 generate_exception_err(ctx
, EXCP_CpU
, 1);
1787 /* Verify that the processor is running with COP1X instructions enabled.
1788 This is associated with the nabla symbol in the MIPS32 and MIPS64
1791 static inline void check_cop1x(DisasContext
*ctx
)
1793 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1794 generate_exception_end(ctx
, EXCP_RI
);
1797 /* Verify that the processor is running with 64-bit floating-point
1798 operations enabled. */
1800 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1802 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1803 generate_exception_end(ctx
, EXCP_RI
);
1807 * Verify if floating point register is valid; an operation is not defined
1808 * if bit 0 of any register specification is set and the FR bit in the
1809 * Status register equals zero, since the register numbers specify an
1810 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1811 * in the Status register equals one, both even and odd register numbers
1812 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1814 * Multiple 64 bit wide registers can be checked by calling
1815 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1817 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1819 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1820 generate_exception_end(ctx
, EXCP_RI
);
1823 /* Verify that the processor is running with DSP instructions enabled.
1824 This is enabled by CP0 Status register MX(24) bit.
1827 static inline void check_dsp(DisasContext
*ctx
)
1829 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1830 if (ctx
->insn_flags
& ASE_DSP
) {
1831 generate_exception_end(ctx
, EXCP_DSPDIS
);
1833 generate_exception_end(ctx
, EXCP_RI
);
1838 static inline void check_dspr2(DisasContext
*ctx
)
1840 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1841 if (ctx
->insn_flags
& ASE_DSP
) {
1842 generate_exception_end(ctx
, EXCP_DSPDIS
);
1844 generate_exception_end(ctx
, EXCP_RI
);
1849 /* This code generates a "reserved instruction" exception if the
1850 CPU does not support the instruction set corresponding to flags. */
1851 static inline void check_insn(DisasContext
*ctx
, int flags
)
1853 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1854 generate_exception_end(ctx
, EXCP_RI
);
1858 /* This code generates a "reserved instruction" exception if the
1859 CPU has corresponding flag set which indicates that the instruction
1860 has been removed. */
1861 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1863 if (unlikely(ctx
->insn_flags
& flags
)) {
1864 generate_exception_end(ctx
, EXCP_RI
);
1868 /* This code generates a "reserved instruction" exception if the
1869 CPU does not support 64-bit paired-single (PS) floating point data type */
1870 static inline void check_ps(DisasContext
*ctx
)
1872 if (unlikely(!ctx
->ps
)) {
1873 generate_exception(ctx
, EXCP_RI
);
1875 check_cp1_64bitmode(ctx
);
1878 #ifdef TARGET_MIPS64
1879 /* This code generates a "reserved instruction" exception if 64-bit
1880 instructions are not enabled. */
1881 static inline void check_mips_64(DisasContext
*ctx
)
1883 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1884 generate_exception_end(ctx
, EXCP_RI
);
1888 #ifndef CONFIG_USER_ONLY
1889 static inline void check_mvh(DisasContext
*ctx
)
1891 if (unlikely(!ctx
->mvh
)) {
1892 generate_exception(ctx
, EXCP_RI
);
1897 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1898 calling interface for 32 and 64-bit FPRs. No sense in changing
1899 all callers for gen_load_fpr32 when we need the CTX parameter for
1901 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1902 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1903 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1904 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1905 int ft, int fs, int cc) \
1907 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1908 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1917 check_cp1_registers(ctx, fs | ft); \
1925 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1926 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1928 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1929 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1930 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1931 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1932 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1933 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1934 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1935 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1936 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1937 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1938 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1939 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1940 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1941 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1942 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1943 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1946 tcg_temp_free_i##bits (fp0); \
1947 tcg_temp_free_i##bits (fp1); \
1950 FOP_CONDS(, 0, d
, FMT_D
, 64)
1951 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1952 FOP_CONDS(, 0, s
, FMT_S
, 32)
1953 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1954 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1955 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1958 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1959 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1960 int ft, int fs, int fd) \
1962 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1963 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1964 if (ifmt == FMT_D) { \
1965 check_cp1_registers(ctx, fs | ft | fd); \
1967 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1968 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1971 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2007 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2010 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2013 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2016 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2019 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2022 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2025 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2028 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2031 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2034 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2040 tcg_temp_free_i ## bits (fp0); \
2041 tcg_temp_free_i ## bits (fp1); \
2044 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2045 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2047 #undef gen_ldcmp_fpr32
2048 #undef gen_ldcmp_fpr64
2050 /* load/store instructions. */
2051 #ifdef CONFIG_USER_ONLY
2052 #define OP_LD_ATOMIC(insn,fname) \
2053 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2054 DisasContext *ctx) \
2056 TCGv t0 = tcg_temp_new(); \
2057 tcg_gen_mov_tl(t0, arg1); \
2058 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2059 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2060 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2061 tcg_temp_free(t0); \
2064 #define OP_LD_ATOMIC(insn,fname) \
2065 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2066 DisasContext *ctx) \
2068 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2071 OP_LD_ATOMIC(ll
,ld32s
);
2072 #if defined(TARGET_MIPS64)
2073 OP_LD_ATOMIC(lld
,ld64
);
2077 #ifdef CONFIG_USER_ONLY
2078 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2079 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2080 DisasContext *ctx) \
2082 TCGv t0 = tcg_temp_new(); \
2083 TCGLabel *l1 = gen_new_label(); \
2084 TCGLabel *l2 = gen_new_label(); \
2086 tcg_gen_andi_tl(t0, arg2, almask); \
2087 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2088 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2089 generate_exception(ctx, EXCP_AdES); \
2090 gen_set_label(l1); \
2091 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2092 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2093 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2094 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2095 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2096 generate_exception_end(ctx, EXCP_SC); \
2097 gen_set_label(l2); \
2098 tcg_gen_movi_tl(t0, 0); \
2099 gen_store_gpr(t0, rt); \
2100 tcg_temp_free(t0); \
2103 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2104 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2105 DisasContext *ctx) \
2107 TCGv t0 = tcg_temp_new(); \
2108 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2109 gen_store_gpr(t0, rt); \
2110 tcg_temp_free(t0); \
2113 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2114 #if defined(TARGET_MIPS64)
2115 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2119 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2120 int base
, int16_t offset
)
2123 tcg_gen_movi_tl(addr
, offset
);
2124 } else if (offset
== 0) {
2125 gen_load_gpr(addr
, base
);
2127 tcg_gen_movi_tl(addr
, offset
);
2128 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2132 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2134 target_ulong pc
= ctx
->pc
;
2136 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2137 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2142 pc
&= ~(target_ulong
)3;
2147 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2148 int rt
, int base
, int16_t offset
)
2151 int mem_idx
= ctx
->mem_idx
;
2153 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2154 /* Loongson CPU uses a load to zero register for prefetch.
2155 We emulate it as a NOP. On other CPU we must perform the
2156 actual memory access. */
2160 t0
= tcg_temp_new();
2161 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2164 #if defined(TARGET_MIPS64)
2166 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2167 ctx
->default_tcg_memop_mask
);
2168 gen_store_gpr(t0
, rt
);
2171 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2172 ctx
->default_tcg_memop_mask
);
2173 gen_store_gpr(t0
, rt
);
2177 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2178 gen_store_gpr(t0
, rt
);
2181 t1
= tcg_temp_new();
2182 /* Do a byte access to possibly trigger a page
2183 fault with the unaligned address. */
2184 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2185 tcg_gen_andi_tl(t1
, t0
, 7);
2186 #ifndef TARGET_WORDS_BIGENDIAN
2187 tcg_gen_xori_tl(t1
, t1
, 7);
2189 tcg_gen_shli_tl(t1
, t1
, 3);
2190 tcg_gen_andi_tl(t0
, t0
, ~7);
2191 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2192 tcg_gen_shl_tl(t0
, t0
, t1
);
2193 t2
= tcg_const_tl(-1);
2194 tcg_gen_shl_tl(t2
, t2
, t1
);
2195 gen_load_gpr(t1
, rt
);
2196 tcg_gen_andc_tl(t1
, t1
, t2
);
2198 tcg_gen_or_tl(t0
, t0
, t1
);
2200 gen_store_gpr(t0
, rt
);
2203 t1
= tcg_temp_new();
2204 /* Do a byte access to possibly trigger a page
2205 fault with the unaligned address. */
2206 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2207 tcg_gen_andi_tl(t1
, t0
, 7);
2208 #ifdef TARGET_WORDS_BIGENDIAN
2209 tcg_gen_xori_tl(t1
, t1
, 7);
2211 tcg_gen_shli_tl(t1
, t1
, 3);
2212 tcg_gen_andi_tl(t0
, t0
, ~7);
2213 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2214 tcg_gen_shr_tl(t0
, t0
, t1
);
2215 tcg_gen_xori_tl(t1
, t1
, 63);
2216 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2217 tcg_gen_shl_tl(t2
, t2
, t1
);
2218 gen_load_gpr(t1
, rt
);
2219 tcg_gen_and_tl(t1
, t1
, t2
);
2221 tcg_gen_or_tl(t0
, t0
, t1
);
2223 gen_store_gpr(t0
, rt
);
2226 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2227 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2230 gen_store_gpr(t0
, rt
);
2234 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2235 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2237 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2238 gen_store_gpr(t0
, rt
);
2241 mem_idx
= MIPS_HFLAG_UM
;
2244 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2245 ctx
->default_tcg_memop_mask
);
2246 gen_store_gpr(t0
, rt
);
2249 mem_idx
= MIPS_HFLAG_UM
;
2252 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2253 ctx
->default_tcg_memop_mask
);
2254 gen_store_gpr(t0
, rt
);
2257 mem_idx
= MIPS_HFLAG_UM
;
2260 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2261 ctx
->default_tcg_memop_mask
);
2262 gen_store_gpr(t0
, rt
);
2265 mem_idx
= MIPS_HFLAG_UM
;
2268 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2269 gen_store_gpr(t0
, rt
);
2272 mem_idx
= MIPS_HFLAG_UM
;
2275 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2276 gen_store_gpr(t0
, rt
);
2279 mem_idx
= MIPS_HFLAG_UM
;
2282 t1
= tcg_temp_new();
2283 /* Do a byte access to possibly trigger a page
2284 fault with the unaligned address. */
2285 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2286 tcg_gen_andi_tl(t1
, t0
, 3);
2287 #ifndef TARGET_WORDS_BIGENDIAN
2288 tcg_gen_xori_tl(t1
, t1
, 3);
2290 tcg_gen_shli_tl(t1
, t1
, 3);
2291 tcg_gen_andi_tl(t0
, t0
, ~3);
2292 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2293 tcg_gen_shl_tl(t0
, t0
, t1
);
2294 t2
= tcg_const_tl(-1);
2295 tcg_gen_shl_tl(t2
, t2
, t1
);
2296 gen_load_gpr(t1
, rt
);
2297 tcg_gen_andc_tl(t1
, t1
, t2
);
2299 tcg_gen_or_tl(t0
, t0
, t1
);
2301 tcg_gen_ext32s_tl(t0
, t0
);
2302 gen_store_gpr(t0
, rt
);
2305 mem_idx
= MIPS_HFLAG_UM
;
2308 t1
= tcg_temp_new();
2309 /* Do a byte access to possibly trigger a page
2310 fault with the unaligned address. */
2311 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2312 tcg_gen_andi_tl(t1
, t0
, 3);
2313 #ifdef TARGET_WORDS_BIGENDIAN
2314 tcg_gen_xori_tl(t1
, t1
, 3);
2316 tcg_gen_shli_tl(t1
, t1
, 3);
2317 tcg_gen_andi_tl(t0
, t0
, ~3);
2318 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2319 tcg_gen_shr_tl(t0
, t0
, t1
);
2320 tcg_gen_xori_tl(t1
, t1
, 31);
2321 t2
= tcg_const_tl(0xfffffffeull
);
2322 tcg_gen_shl_tl(t2
, t2
, t1
);
2323 gen_load_gpr(t1
, rt
);
2324 tcg_gen_and_tl(t1
, t1
, t2
);
2326 tcg_gen_or_tl(t0
, t0
, t1
);
2328 tcg_gen_ext32s_tl(t0
, t0
);
2329 gen_store_gpr(t0
, rt
);
2332 mem_idx
= MIPS_HFLAG_UM
;
2336 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2337 gen_store_gpr(t0
, rt
);
2344 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2345 int base
, int16_t offset
)
2347 TCGv t0
= tcg_temp_new();
2348 TCGv t1
= tcg_temp_new();
2349 int mem_idx
= ctx
->mem_idx
;
2351 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2352 gen_load_gpr(t1
, rt
);
2354 #if defined(TARGET_MIPS64)
2356 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2357 ctx
->default_tcg_memop_mask
);
2360 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2363 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2367 mem_idx
= MIPS_HFLAG_UM
;
2370 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2371 ctx
->default_tcg_memop_mask
);
2374 mem_idx
= MIPS_HFLAG_UM
;
2377 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2378 ctx
->default_tcg_memop_mask
);
2381 mem_idx
= MIPS_HFLAG_UM
;
2384 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2387 mem_idx
= MIPS_HFLAG_UM
;
2390 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2393 mem_idx
= MIPS_HFLAG_UM
;
2396 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2404 /* Store conditional */
2405 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2406 int base
, int16_t offset
)
2409 int mem_idx
= ctx
->mem_idx
;
2411 #ifdef CONFIG_USER_ONLY
2412 t0
= tcg_temp_local_new();
2413 t1
= tcg_temp_local_new();
2415 t0
= tcg_temp_new();
2416 t1
= tcg_temp_new();
2418 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2419 gen_load_gpr(t1
, rt
);
2421 #if defined(TARGET_MIPS64)
2424 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2428 mem_idx
= MIPS_HFLAG_UM
;
2432 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2439 /* Load and store */
2440 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2441 int base
, int16_t offset
)
2443 TCGv t0
= tcg_temp_new();
2445 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2446 /* Don't do NOP if destination is zero: we must perform the actual
2451 TCGv_i32 fp0
= tcg_temp_new_i32();
2452 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2453 ctx
->default_tcg_memop_mask
);
2454 gen_store_fpr32(ctx
, fp0
, ft
);
2455 tcg_temp_free_i32(fp0
);
2460 TCGv_i32 fp0
= tcg_temp_new_i32();
2461 gen_load_fpr32(ctx
, fp0
, ft
);
2462 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2463 ctx
->default_tcg_memop_mask
);
2464 tcg_temp_free_i32(fp0
);
2469 TCGv_i64 fp0
= tcg_temp_new_i64();
2470 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2471 ctx
->default_tcg_memop_mask
);
2472 gen_store_fpr64(ctx
, fp0
, ft
);
2473 tcg_temp_free_i64(fp0
);
2478 TCGv_i64 fp0
= tcg_temp_new_i64();
2479 gen_load_fpr64(ctx
, fp0
, ft
);
2480 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2481 ctx
->default_tcg_memop_mask
);
2482 tcg_temp_free_i64(fp0
);
2486 MIPS_INVAL("flt_ldst");
2487 generate_exception_end(ctx
, EXCP_RI
);
2494 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2495 int rs
, int16_t imm
)
2497 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2498 check_cp1_enabled(ctx
);
2502 check_insn(ctx
, ISA_MIPS2
);
2505 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2508 generate_exception_err(ctx
, EXCP_CpU
, 1);
2512 /* Arithmetic with immediate operand */
2513 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2514 int rt
, int rs
, int16_t imm
)
2516 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2518 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2519 /* If no destination, treat it as a NOP.
2520 For addi, we must generate the overflow exception when needed. */
2526 TCGv t0
= tcg_temp_local_new();
2527 TCGv t1
= tcg_temp_new();
2528 TCGv t2
= tcg_temp_new();
2529 TCGLabel
*l1
= gen_new_label();
2531 gen_load_gpr(t1
, rs
);
2532 tcg_gen_addi_tl(t0
, t1
, uimm
);
2533 tcg_gen_ext32s_tl(t0
, t0
);
2535 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2536 tcg_gen_xori_tl(t2
, t0
, uimm
);
2537 tcg_gen_and_tl(t1
, t1
, t2
);
2539 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2541 /* operands of same sign, result different sign */
2542 generate_exception(ctx
, EXCP_OVERFLOW
);
2544 tcg_gen_ext32s_tl(t0
, t0
);
2545 gen_store_gpr(t0
, rt
);
2551 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2552 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2554 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2557 #if defined(TARGET_MIPS64)
2560 TCGv t0
= tcg_temp_local_new();
2561 TCGv t1
= tcg_temp_new();
2562 TCGv t2
= tcg_temp_new();
2563 TCGLabel
*l1
= gen_new_label();
2565 gen_load_gpr(t1
, rs
);
2566 tcg_gen_addi_tl(t0
, t1
, uimm
);
2568 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2569 tcg_gen_xori_tl(t2
, t0
, uimm
);
2570 tcg_gen_and_tl(t1
, t1
, t2
);
2572 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2574 /* operands of same sign, result different sign */
2575 generate_exception(ctx
, EXCP_OVERFLOW
);
2577 gen_store_gpr(t0
, rt
);
2583 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2585 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2592 /* Logic with immediate operand */
2593 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2594 int rt
, int rs
, int16_t imm
)
2599 /* If no destination, treat it as a NOP. */
2602 uimm
= (uint16_t)imm
;
2605 if (likely(rs
!= 0))
2606 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2608 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2612 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2614 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2617 if (likely(rs
!= 0))
2618 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2620 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2623 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2625 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2626 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2628 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2637 /* Set on less than with immediate operand */
2638 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2639 int rt
, int rs
, int16_t imm
)
2641 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2645 /* If no destination, treat it as a NOP. */
2648 t0
= tcg_temp_new();
2649 gen_load_gpr(t0
, rs
);
2652 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2655 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2661 /* Shifts with immediate operand */
2662 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2663 int rt
, int rs
, int16_t imm
)
2665 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2669 /* If no destination, treat it as a NOP. */
2673 t0
= tcg_temp_new();
2674 gen_load_gpr(t0
, rs
);
2677 tcg_gen_shli_tl(t0
, t0
, uimm
);
2678 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2681 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2685 tcg_gen_ext32u_tl(t0
, t0
);
2686 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2688 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2693 TCGv_i32 t1
= tcg_temp_new_i32();
2695 tcg_gen_trunc_tl_i32(t1
, t0
);
2696 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2697 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2698 tcg_temp_free_i32(t1
);
2700 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2703 #if defined(TARGET_MIPS64)
2705 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2708 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2711 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2715 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2717 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2721 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2724 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2727 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2730 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2738 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2739 int rd
, int rs
, int rt
)
2741 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2742 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2743 /* If no destination, treat it as a NOP.
2744 For add & sub, we must generate the overflow exception when needed. */
2751 TCGv t0
= tcg_temp_local_new();
2752 TCGv t1
= tcg_temp_new();
2753 TCGv t2
= tcg_temp_new();
2754 TCGLabel
*l1
= gen_new_label();
2756 gen_load_gpr(t1
, rs
);
2757 gen_load_gpr(t2
, rt
);
2758 tcg_gen_add_tl(t0
, t1
, t2
);
2759 tcg_gen_ext32s_tl(t0
, t0
);
2760 tcg_gen_xor_tl(t1
, t1
, t2
);
2761 tcg_gen_xor_tl(t2
, t0
, t2
);
2762 tcg_gen_andc_tl(t1
, t2
, t1
);
2764 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2766 /* operands of same sign, result different sign */
2767 generate_exception(ctx
, EXCP_OVERFLOW
);
2769 gen_store_gpr(t0
, rd
);
2774 if (rs
!= 0 && rt
!= 0) {
2775 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2776 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2777 } else if (rs
== 0 && rt
!= 0) {
2778 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2779 } else if (rs
!= 0 && rt
== 0) {
2780 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2782 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2787 TCGv t0
= tcg_temp_local_new();
2788 TCGv t1
= tcg_temp_new();
2789 TCGv t2
= tcg_temp_new();
2790 TCGLabel
*l1
= gen_new_label();
2792 gen_load_gpr(t1
, rs
);
2793 gen_load_gpr(t2
, rt
);
2794 tcg_gen_sub_tl(t0
, t1
, t2
);
2795 tcg_gen_ext32s_tl(t0
, t0
);
2796 tcg_gen_xor_tl(t2
, t1
, t2
);
2797 tcg_gen_xor_tl(t1
, t0
, t1
);
2798 tcg_gen_and_tl(t1
, t1
, t2
);
2800 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2802 /* operands of different sign, first operand and result different sign */
2803 generate_exception(ctx
, EXCP_OVERFLOW
);
2805 gen_store_gpr(t0
, rd
);
2810 if (rs
!= 0 && rt
!= 0) {
2811 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2812 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2813 } else if (rs
== 0 && rt
!= 0) {
2814 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2815 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2816 } else if (rs
!= 0 && rt
== 0) {
2817 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2819 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2822 #if defined(TARGET_MIPS64)
2825 TCGv t0
= tcg_temp_local_new();
2826 TCGv t1
= tcg_temp_new();
2827 TCGv t2
= tcg_temp_new();
2828 TCGLabel
*l1
= gen_new_label();
2830 gen_load_gpr(t1
, rs
);
2831 gen_load_gpr(t2
, rt
);
2832 tcg_gen_add_tl(t0
, t1
, t2
);
2833 tcg_gen_xor_tl(t1
, t1
, t2
);
2834 tcg_gen_xor_tl(t2
, t0
, t2
);
2835 tcg_gen_andc_tl(t1
, t2
, t1
);
2837 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2839 /* operands of same sign, result different sign */
2840 generate_exception(ctx
, EXCP_OVERFLOW
);
2842 gen_store_gpr(t0
, rd
);
2847 if (rs
!= 0 && rt
!= 0) {
2848 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2849 } else if (rs
== 0 && rt
!= 0) {
2850 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2851 } else if (rs
!= 0 && rt
== 0) {
2852 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2854 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2859 TCGv t0
= tcg_temp_local_new();
2860 TCGv t1
= tcg_temp_new();
2861 TCGv t2
= tcg_temp_new();
2862 TCGLabel
*l1
= gen_new_label();
2864 gen_load_gpr(t1
, rs
);
2865 gen_load_gpr(t2
, rt
);
2866 tcg_gen_sub_tl(t0
, t1
, t2
);
2867 tcg_gen_xor_tl(t2
, t1
, t2
);
2868 tcg_gen_xor_tl(t1
, t0
, t1
);
2869 tcg_gen_and_tl(t1
, t1
, t2
);
2871 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2873 /* operands of different sign, first operand and result different sign */
2874 generate_exception(ctx
, EXCP_OVERFLOW
);
2876 gen_store_gpr(t0
, rd
);
2881 if (rs
!= 0 && rt
!= 0) {
2882 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2883 } else if (rs
== 0 && rt
!= 0) {
2884 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2885 } else if (rs
!= 0 && rt
== 0) {
2886 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2888 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2893 if (likely(rs
!= 0 && rt
!= 0)) {
2894 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2895 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2897 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2903 /* Conditional move */
2904 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2905 int rd
, int rs
, int rt
)
2910 /* If no destination, treat it as a NOP. */
2914 t0
= tcg_temp_new();
2915 gen_load_gpr(t0
, rt
);
2916 t1
= tcg_const_tl(0);
2917 t2
= tcg_temp_new();
2918 gen_load_gpr(t2
, rs
);
2921 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2924 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2927 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2930 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2939 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2940 int rd
, int rs
, int rt
)
2943 /* If no destination, treat it as a NOP. */
2949 if (likely(rs
!= 0 && rt
!= 0)) {
2950 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2952 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2956 if (rs
!= 0 && rt
!= 0) {
2957 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2958 } else if (rs
== 0 && rt
!= 0) {
2959 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2960 } else if (rs
!= 0 && rt
== 0) {
2961 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2963 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2967 if (likely(rs
!= 0 && rt
!= 0)) {
2968 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2969 } else if (rs
== 0 && rt
!= 0) {
2970 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2971 } else if (rs
!= 0 && rt
== 0) {
2972 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2974 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2978 if (likely(rs
!= 0 && rt
!= 0)) {
2979 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2980 } else if (rs
== 0 && rt
!= 0) {
2981 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2982 } else if (rs
!= 0 && rt
== 0) {
2983 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2985 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2991 /* Set on lower than */
2992 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2993 int rd
, int rs
, int rt
)
2998 /* If no destination, treat it as a NOP. */
3002 t0
= tcg_temp_new();
3003 t1
= tcg_temp_new();
3004 gen_load_gpr(t0
, rs
);
3005 gen_load_gpr(t1
, rt
);
3008 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3011 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3019 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3020 int rd
, int rs
, int rt
)
3025 /* If no destination, treat it as a NOP.
3026 For add & sub, we must generate the overflow exception when needed. */
3030 t0
= tcg_temp_new();
3031 t1
= tcg_temp_new();
3032 gen_load_gpr(t0
, rs
);
3033 gen_load_gpr(t1
, rt
);
3036 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3037 tcg_gen_shl_tl(t0
, t1
, t0
);
3038 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3041 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3042 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3045 tcg_gen_ext32u_tl(t1
, t1
);
3046 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3047 tcg_gen_shr_tl(t0
, t1
, t0
);
3048 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3052 TCGv_i32 t2
= tcg_temp_new_i32();
3053 TCGv_i32 t3
= tcg_temp_new_i32();
3055 tcg_gen_trunc_tl_i32(t2
, t0
);
3056 tcg_gen_trunc_tl_i32(t3
, t1
);
3057 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3058 tcg_gen_rotr_i32(t2
, t3
, t2
);
3059 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3060 tcg_temp_free_i32(t2
);
3061 tcg_temp_free_i32(t3
);
3064 #if defined(TARGET_MIPS64)
3066 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3067 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3070 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3071 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3074 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3075 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3078 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3079 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3087 /* Arithmetic on HI/LO registers */
3088 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3090 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3101 #if defined(TARGET_MIPS64)
3103 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3107 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3111 #if defined(TARGET_MIPS64)
3113 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3117 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3122 #if defined(TARGET_MIPS64)
3124 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3128 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3131 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3136 #if defined(TARGET_MIPS64)
3138 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3142 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3145 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3151 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3154 TCGv t0
= tcg_const_tl(addr
);
3155 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3156 gen_store_gpr(t0
, reg
);
3160 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3166 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3169 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3170 addr
= addr_add(ctx
, pc
, offset
);
3171 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3175 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3176 addr
= addr_add(ctx
, pc
, offset
);
3177 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3179 #if defined(TARGET_MIPS64)
3182 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3183 addr
= addr_add(ctx
, pc
, offset
);
3184 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3188 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3191 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3192 addr
= addr_add(ctx
, pc
, offset
);
3193 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3198 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3199 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3200 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3203 #if defined(TARGET_MIPS64)
3204 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3205 case R6_OPC_LDPC
+ (1 << 16):
3206 case R6_OPC_LDPC
+ (2 << 16):
3207 case R6_OPC_LDPC
+ (3 << 16):
3209 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3210 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3211 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3215 MIPS_INVAL("OPC_PCREL");
3216 generate_exception_end(ctx
, EXCP_RI
);
3223 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3232 t0
= tcg_temp_new();
3233 t1
= tcg_temp_new();
3235 gen_load_gpr(t0
, rs
);
3236 gen_load_gpr(t1
, rt
);
3241 TCGv t2
= tcg_temp_new();
3242 TCGv t3
= tcg_temp_new();
3243 tcg_gen_ext32s_tl(t0
, t0
);
3244 tcg_gen_ext32s_tl(t1
, t1
);
3245 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3246 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3247 tcg_gen_and_tl(t2
, t2
, t3
);
3248 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3249 tcg_gen_or_tl(t2
, t2
, t3
);
3250 tcg_gen_movi_tl(t3
, 0);
3251 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3252 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3253 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3260 TCGv t2
= tcg_temp_new();
3261 TCGv t3
= tcg_temp_new();
3262 tcg_gen_ext32s_tl(t0
, t0
);
3263 tcg_gen_ext32s_tl(t1
, t1
);
3264 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3265 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3266 tcg_gen_and_tl(t2
, t2
, t3
);
3267 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3268 tcg_gen_or_tl(t2
, t2
, t3
);
3269 tcg_gen_movi_tl(t3
, 0);
3270 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3271 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3272 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3279 TCGv t2
= tcg_const_tl(0);
3280 TCGv t3
= tcg_const_tl(1);
3281 tcg_gen_ext32u_tl(t0
, t0
);
3282 tcg_gen_ext32u_tl(t1
, t1
);
3283 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3284 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3285 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3292 TCGv t2
= tcg_const_tl(0);
3293 TCGv t3
= tcg_const_tl(1);
3294 tcg_gen_ext32u_tl(t0
, t0
);
3295 tcg_gen_ext32u_tl(t1
, t1
);
3296 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3297 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3298 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3305 TCGv_i32 t2
= tcg_temp_new_i32();
3306 TCGv_i32 t3
= tcg_temp_new_i32();
3307 tcg_gen_trunc_tl_i32(t2
, t0
);
3308 tcg_gen_trunc_tl_i32(t3
, t1
);
3309 tcg_gen_mul_i32(t2
, t2
, t3
);
3310 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3311 tcg_temp_free_i32(t2
);
3312 tcg_temp_free_i32(t3
);
3317 TCGv_i32 t2
= tcg_temp_new_i32();
3318 TCGv_i32 t3
= tcg_temp_new_i32();
3319 tcg_gen_trunc_tl_i32(t2
, t0
);
3320 tcg_gen_trunc_tl_i32(t3
, t1
);
3321 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3322 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3323 tcg_temp_free_i32(t2
);
3324 tcg_temp_free_i32(t3
);
3329 TCGv_i32 t2
= tcg_temp_new_i32();
3330 TCGv_i32 t3
= tcg_temp_new_i32();
3331 tcg_gen_trunc_tl_i32(t2
, t0
);
3332 tcg_gen_trunc_tl_i32(t3
, t1
);
3333 tcg_gen_mul_i32(t2
, t2
, t3
);
3334 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3335 tcg_temp_free_i32(t2
);
3336 tcg_temp_free_i32(t3
);
3341 TCGv_i32 t2
= tcg_temp_new_i32();
3342 TCGv_i32 t3
= tcg_temp_new_i32();
3343 tcg_gen_trunc_tl_i32(t2
, t0
);
3344 tcg_gen_trunc_tl_i32(t3
, t1
);
3345 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3346 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3347 tcg_temp_free_i32(t2
);
3348 tcg_temp_free_i32(t3
);
3351 #if defined(TARGET_MIPS64)
3354 TCGv t2
= tcg_temp_new();
3355 TCGv t3
= tcg_temp_new();
3356 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3357 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3358 tcg_gen_and_tl(t2
, t2
, t3
);
3359 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3360 tcg_gen_or_tl(t2
, t2
, t3
);
3361 tcg_gen_movi_tl(t3
, 0);
3362 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3363 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3370 TCGv t2
= tcg_temp_new();
3371 TCGv t3
= tcg_temp_new();
3372 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3373 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3374 tcg_gen_and_tl(t2
, t2
, t3
);
3375 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3376 tcg_gen_or_tl(t2
, t2
, t3
);
3377 tcg_gen_movi_tl(t3
, 0);
3378 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3379 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3386 TCGv t2
= tcg_const_tl(0);
3387 TCGv t3
= tcg_const_tl(1);
3388 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3389 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3396 TCGv t2
= tcg_const_tl(0);
3397 TCGv t3
= tcg_const_tl(1);
3398 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3399 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3405 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3409 TCGv t2
= tcg_temp_new();
3410 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3415 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3419 TCGv t2
= tcg_temp_new();
3420 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3426 MIPS_INVAL("r6 mul/div");
3427 generate_exception_end(ctx
, EXCP_RI
);
3435 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3436 int acc
, int rs
, int rt
)
3440 t0
= tcg_temp_new();
3441 t1
= tcg_temp_new();
3443 gen_load_gpr(t0
, rs
);
3444 gen_load_gpr(t1
, rt
);
3453 TCGv t2
= tcg_temp_new();
3454 TCGv t3
= tcg_temp_new();
3455 tcg_gen_ext32s_tl(t0
, t0
);
3456 tcg_gen_ext32s_tl(t1
, t1
);
3457 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3458 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3459 tcg_gen_and_tl(t2
, t2
, t3
);
3460 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3461 tcg_gen_or_tl(t2
, t2
, t3
);
3462 tcg_gen_movi_tl(t3
, 0);
3463 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3464 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3465 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3466 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3467 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3474 TCGv t2
= tcg_const_tl(0);
3475 TCGv t3
= tcg_const_tl(1);
3476 tcg_gen_ext32u_tl(t0
, t0
);
3477 tcg_gen_ext32u_tl(t1
, t1
);
3478 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3479 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3480 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3481 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3482 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3489 TCGv_i32 t2
= tcg_temp_new_i32();
3490 TCGv_i32 t3
= tcg_temp_new_i32();
3491 tcg_gen_trunc_tl_i32(t2
, t0
);
3492 tcg_gen_trunc_tl_i32(t3
, t1
);
3493 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3494 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3495 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3496 tcg_temp_free_i32(t2
);
3497 tcg_temp_free_i32(t3
);
3502 TCGv_i32 t2
= tcg_temp_new_i32();
3503 TCGv_i32 t3
= tcg_temp_new_i32();
3504 tcg_gen_trunc_tl_i32(t2
, t0
);
3505 tcg_gen_trunc_tl_i32(t3
, t1
);
3506 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3507 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3508 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3509 tcg_temp_free_i32(t2
);
3510 tcg_temp_free_i32(t3
);
3513 #if defined(TARGET_MIPS64)
3516 TCGv t2
= tcg_temp_new();
3517 TCGv t3
= tcg_temp_new();
3518 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3519 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3520 tcg_gen_and_tl(t2
, t2
, t3
);
3521 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3522 tcg_gen_or_tl(t2
, t2
, t3
);
3523 tcg_gen_movi_tl(t3
, 0);
3524 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3525 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3526 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3533 TCGv t2
= tcg_const_tl(0);
3534 TCGv t3
= tcg_const_tl(1);
3535 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3536 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3537 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3543 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3546 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3551 TCGv_i64 t2
= tcg_temp_new_i64();
3552 TCGv_i64 t3
= tcg_temp_new_i64();
3554 tcg_gen_ext_tl_i64(t2
, t0
);
3555 tcg_gen_ext_tl_i64(t3
, t1
);
3556 tcg_gen_mul_i64(t2
, t2
, t3
);
3557 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3558 tcg_gen_add_i64(t2
, t2
, t3
);
3559 tcg_temp_free_i64(t3
);
3560 gen_move_low32(cpu_LO
[acc
], t2
);
3561 gen_move_high32(cpu_HI
[acc
], t2
);
3562 tcg_temp_free_i64(t2
);
3567 TCGv_i64 t2
= tcg_temp_new_i64();
3568 TCGv_i64 t3
= tcg_temp_new_i64();
3570 tcg_gen_ext32u_tl(t0
, t0
);
3571 tcg_gen_ext32u_tl(t1
, t1
);
3572 tcg_gen_extu_tl_i64(t2
, t0
);
3573 tcg_gen_extu_tl_i64(t3
, t1
);
3574 tcg_gen_mul_i64(t2
, t2
, t3
);
3575 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3576 tcg_gen_add_i64(t2
, t2
, t3
);
3577 tcg_temp_free_i64(t3
);
3578 gen_move_low32(cpu_LO
[acc
], t2
);
3579 gen_move_high32(cpu_HI
[acc
], t2
);
3580 tcg_temp_free_i64(t2
);
3585 TCGv_i64 t2
= tcg_temp_new_i64();
3586 TCGv_i64 t3
= tcg_temp_new_i64();
3588 tcg_gen_ext_tl_i64(t2
, t0
);
3589 tcg_gen_ext_tl_i64(t3
, t1
);
3590 tcg_gen_mul_i64(t2
, t2
, t3
);
3591 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3592 tcg_gen_sub_i64(t2
, t3
, t2
);
3593 tcg_temp_free_i64(t3
);
3594 gen_move_low32(cpu_LO
[acc
], t2
);
3595 gen_move_high32(cpu_HI
[acc
], t2
);
3596 tcg_temp_free_i64(t2
);
3601 TCGv_i64 t2
= tcg_temp_new_i64();
3602 TCGv_i64 t3
= tcg_temp_new_i64();
3604 tcg_gen_ext32u_tl(t0
, t0
);
3605 tcg_gen_ext32u_tl(t1
, t1
);
3606 tcg_gen_extu_tl_i64(t2
, t0
);
3607 tcg_gen_extu_tl_i64(t3
, t1
);
3608 tcg_gen_mul_i64(t2
, t2
, t3
);
3609 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3610 tcg_gen_sub_i64(t2
, t3
, t2
);
3611 tcg_temp_free_i64(t3
);
3612 gen_move_low32(cpu_LO
[acc
], t2
);
3613 gen_move_high32(cpu_HI
[acc
], t2
);
3614 tcg_temp_free_i64(t2
);
3618 MIPS_INVAL("mul/div");
3619 generate_exception_end(ctx
, EXCP_RI
);
3627 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3628 int rd
, int rs
, int rt
)
3630 TCGv t0
= tcg_temp_new();
3631 TCGv t1
= tcg_temp_new();
3633 gen_load_gpr(t0
, rs
);
3634 gen_load_gpr(t1
, rt
);
3637 case OPC_VR54XX_MULS
:
3638 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3640 case OPC_VR54XX_MULSU
:
3641 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3643 case OPC_VR54XX_MACC
:
3644 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3646 case OPC_VR54XX_MACCU
:
3647 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3649 case OPC_VR54XX_MSAC
:
3650 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3652 case OPC_VR54XX_MSACU
:
3653 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3655 case OPC_VR54XX_MULHI
:
3656 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3658 case OPC_VR54XX_MULHIU
:
3659 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3661 case OPC_VR54XX_MULSHI
:
3662 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3664 case OPC_VR54XX_MULSHIU
:
3665 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3667 case OPC_VR54XX_MACCHI
:
3668 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3670 case OPC_VR54XX_MACCHIU
:
3671 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3673 case OPC_VR54XX_MSACHI
:
3674 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3676 case OPC_VR54XX_MSACHIU
:
3677 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3680 MIPS_INVAL("mul vr54xx");
3681 generate_exception_end(ctx
, EXCP_RI
);
3684 gen_store_gpr(t0
, rd
);
3691 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3701 gen_load_gpr(t0
, rs
);
3706 #if defined(TARGET_MIPS64)
3710 tcg_gen_not_tl(t0
, t0
);
3719 tcg_gen_ext32u_tl(t0
, t0
);
3720 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3721 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3723 #if defined(TARGET_MIPS64)
3728 tcg_gen_clzi_i64(t0
, t0
, 64);
3734 /* Godson integer instructions */
3735 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3736 int rd
, int rs
, int rt
)
3748 case OPC_MULTU_G_2E
:
3749 case OPC_MULTU_G_2F
:
3750 #if defined(TARGET_MIPS64)
3751 case OPC_DMULT_G_2E
:
3752 case OPC_DMULT_G_2F
:
3753 case OPC_DMULTU_G_2E
:
3754 case OPC_DMULTU_G_2F
:
3756 t0
= tcg_temp_new();
3757 t1
= tcg_temp_new();
3760 t0
= tcg_temp_local_new();
3761 t1
= tcg_temp_local_new();
3765 gen_load_gpr(t0
, rs
);
3766 gen_load_gpr(t1
, rt
);
3771 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3772 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3774 case OPC_MULTU_G_2E
:
3775 case OPC_MULTU_G_2F
:
3776 tcg_gen_ext32u_tl(t0
, t0
);
3777 tcg_gen_ext32u_tl(t1
, t1
);
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3779 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3784 TCGLabel
*l1
= gen_new_label();
3785 TCGLabel
*l2
= gen_new_label();
3786 TCGLabel
*l3
= gen_new_label();
3787 tcg_gen_ext32s_tl(t0
, t0
);
3788 tcg_gen_ext32s_tl(t1
, t1
);
3789 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3790 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3793 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3794 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3795 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3798 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3799 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3806 TCGLabel
*l1
= gen_new_label();
3807 TCGLabel
*l2
= gen_new_label();
3808 tcg_gen_ext32u_tl(t0
, t0
);
3809 tcg_gen_ext32u_tl(t1
, t1
);
3810 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3811 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3814 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3815 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3822 TCGLabel
*l1
= gen_new_label();
3823 TCGLabel
*l2
= gen_new_label();
3824 TCGLabel
*l3
= gen_new_label();
3825 tcg_gen_ext32u_tl(t0
, t0
);
3826 tcg_gen_ext32u_tl(t1
, t1
);
3827 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3829 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3831 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3834 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3835 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3842 TCGLabel
*l1
= gen_new_label();
3843 TCGLabel
*l2
= gen_new_label();
3844 tcg_gen_ext32u_tl(t0
, t0
);
3845 tcg_gen_ext32u_tl(t1
, t1
);
3846 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3847 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3850 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3851 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3855 #if defined(TARGET_MIPS64)
3856 case OPC_DMULT_G_2E
:
3857 case OPC_DMULT_G_2F
:
3858 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3860 case OPC_DMULTU_G_2E
:
3861 case OPC_DMULTU_G_2F
:
3862 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3867 TCGLabel
*l1
= gen_new_label();
3868 TCGLabel
*l2
= gen_new_label();
3869 TCGLabel
*l3
= gen_new_label();
3870 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3871 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3874 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3875 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3876 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3879 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3883 case OPC_DDIVU_G_2E
:
3884 case OPC_DDIVU_G_2F
:
3886 TCGLabel
*l1
= gen_new_label();
3887 TCGLabel
*l2
= gen_new_label();
3888 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3889 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3892 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3899 TCGLabel
*l1
= gen_new_label();
3900 TCGLabel
*l2
= gen_new_label();
3901 TCGLabel
*l3
= gen_new_label();
3902 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3903 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3904 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3906 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3909 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3913 case OPC_DMODU_G_2E
:
3914 case OPC_DMODU_G_2F
:
3916 TCGLabel
*l1
= gen_new_label();
3917 TCGLabel
*l2
= gen_new_label();
3918 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3919 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3922 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3933 /* Loongson multimedia instructions */
3934 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3936 uint32_t opc
, shift_max
;
3939 opc
= MASK_LMI(ctx
->opcode
);
3945 t0
= tcg_temp_local_new_i64();
3946 t1
= tcg_temp_local_new_i64();
3949 t0
= tcg_temp_new_i64();
3950 t1
= tcg_temp_new_i64();
3954 check_cp1_enabled(ctx
);
3955 gen_load_fpr64(ctx
, t0
, rs
);
3956 gen_load_fpr64(ctx
, t1
, rt
);
3958 #define LMI_HELPER(UP, LO) \
3959 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3960 #define LMI_HELPER_1(UP, LO) \
3961 case OPC_##UP: gen_helper_##LO(t0, t0); break
3962 #define LMI_DIRECT(UP, LO, OP) \
3963 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3966 LMI_HELPER(PADDSH
, paddsh
);
3967 LMI_HELPER(PADDUSH
, paddush
);
3968 LMI_HELPER(PADDH
, paddh
);
3969 LMI_HELPER(PADDW
, paddw
);
3970 LMI_HELPER(PADDSB
, paddsb
);
3971 LMI_HELPER(PADDUSB
, paddusb
);
3972 LMI_HELPER(PADDB
, paddb
);
3974 LMI_HELPER(PSUBSH
, psubsh
);
3975 LMI_HELPER(PSUBUSH
, psubush
);
3976 LMI_HELPER(PSUBH
, psubh
);
3977 LMI_HELPER(PSUBW
, psubw
);
3978 LMI_HELPER(PSUBSB
, psubsb
);
3979 LMI_HELPER(PSUBUSB
, psubusb
);
3980 LMI_HELPER(PSUBB
, psubb
);
3982 LMI_HELPER(PSHUFH
, pshufh
);
3983 LMI_HELPER(PACKSSWH
, packsswh
);
3984 LMI_HELPER(PACKSSHB
, packsshb
);
3985 LMI_HELPER(PACKUSHB
, packushb
);
3987 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3988 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3989 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3990 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3991 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3992 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3994 LMI_HELPER(PAVGH
, pavgh
);
3995 LMI_HELPER(PAVGB
, pavgb
);
3996 LMI_HELPER(PMAXSH
, pmaxsh
);
3997 LMI_HELPER(PMINSH
, pminsh
);
3998 LMI_HELPER(PMAXUB
, pmaxub
);
3999 LMI_HELPER(PMINUB
, pminub
);
4001 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4002 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4003 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4004 LMI_HELPER(PCMPGTH
, pcmpgth
);
4005 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4006 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4008 LMI_HELPER(PSLLW
, psllw
);
4009 LMI_HELPER(PSLLH
, psllh
);
4010 LMI_HELPER(PSRLW
, psrlw
);
4011 LMI_HELPER(PSRLH
, psrlh
);
4012 LMI_HELPER(PSRAW
, psraw
);
4013 LMI_HELPER(PSRAH
, psrah
);
4015 LMI_HELPER(PMULLH
, pmullh
);
4016 LMI_HELPER(PMULHH
, pmulhh
);
4017 LMI_HELPER(PMULHUH
, pmulhuh
);
4018 LMI_HELPER(PMADDHW
, pmaddhw
);
4020 LMI_HELPER(PASUBUB
, pasubub
);
4021 LMI_HELPER_1(BIADD
, biadd
);
4022 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4024 LMI_DIRECT(PADDD
, paddd
, add
);
4025 LMI_DIRECT(PSUBD
, psubd
, sub
);
4026 LMI_DIRECT(XOR_CP2
, xor, xor);
4027 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4028 LMI_DIRECT(AND_CP2
, and, and);
4029 LMI_DIRECT(OR_CP2
, or, or);
4032 tcg_gen_andc_i64(t0
, t1
, t0
);
4036 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4039 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4042 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4045 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4049 tcg_gen_andi_i64(t1
, t1
, 3);
4050 tcg_gen_shli_i64(t1
, t1
, 4);
4051 tcg_gen_shr_i64(t0
, t0
, t1
);
4052 tcg_gen_ext16u_i64(t0
, t0
);
4056 tcg_gen_add_i64(t0
, t0
, t1
);
4057 tcg_gen_ext32s_i64(t0
, t0
);
4060 tcg_gen_sub_i64(t0
, t0
, t1
);
4061 tcg_gen_ext32s_i64(t0
, t0
);
4083 /* Make sure shift count isn't TCG undefined behaviour. */
4084 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4089 tcg_gen_shl_i64(t0
, t0
, t1
);
4093 /* Since SRA is UndefinedResult without sign-extended inputs,
4094 we can treat SRA and DSRA the same. */
4095 tcg_gen_sar_i64(t0
, t0
, t1
);
4098 /* We want to shift in zeros for SRL; zero-extend first. */
4099 tcg_gen_ext32u_i64(t0
, t0
);
4102 tcg_gen_shr_i64(t0
, t0
, t1
);
4106 if (shift_max
== 32) {
4107 tcg_gen_ext32s_i64(t0
, t0
);
4110 /* Shifts larger than MAX produce zero. */
4111 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4112 tcg_gen_neg_i64(t1
, t1
);
4113 tcg_gen_and_i64(t0
, t0
, t1
);
4119 TCGv_i64 t2
= tcg_temp_new_i64();
4120 TCGLabel
*lab
= gen_new_label();
4122 tcg_gen_mov_i64(t2
, t0
);
4123 tcg_gen_add_i64(t0
, t1
, t2
);
4124 if (opc
== OPC_ADD_CP2
) {
4125 tcg_gen_ext32s_i64(t0
, t0
);
4127 tcg_gen_xor_i64(t1
, t1
, t2
);
4128 tcg_gen_xor_i64(t2
, t2
, t0
);
4129 tcg_gen_andc_i64(t1
, t2
, t1
);
4130 tcg_temp_free_i64(t2
);
4131 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4132 generate_exception(ctx
, EXCP_OVERFLOW
);
4140 TCGv_i64 t2
= tcg_temp_new_i64();
4141 TCGLabel
*lab
= gen_new_label();
4143 tcg_gen_mov_i64(t2
, t0
);
4144 tcg_gen_sub_i64(t0
, t1
, t2
);
4145 if (opc
== OPC_SUB_CP2
) {
4146 tcg_gen_ext32s_i64(t0
, t0
);
4148 tcg_gen_xor_i64(t1
, t1
, t2
);
4149 tcg_gen_xor_i64(t2
, t2
, t0
);
4150 tcg_gen_and_i64(t1
, t1
, t2
);
4151 tcg_temp_free_i64(t2
);
4152 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4153 generate_exception(ctx
, EXCP_OVERFLOW
);
4159 tcg_gen_ext32u_i64(t0
, t0
);
4160 tcg_gen_ext32u_i64(t1
, t1
);
4161 tcg_gen_mul_i64(t0
, t0
, t1
);
4170 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4171 FD field is the CC field? */
4173 MIPS_INVAL("loongson_cp2");
4174 generate_exception_end(ctx
, EXCP_RI
);
4181 gen_store_fpr64(ctx
, t0
, rd
);
4183 tcg_temp_free_i64(t0
);
4184 tcg_temp_free_i64(t1
);
4188 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4189 int rs
, int rt
, int16_t imm
)
4192 TCGv t0
= tcg_temp_new();
4193 TCGv t1
= tcg_temp_new();
4196 /* Load needed operands */
4204 /* Compare two registers */
4206 gen_load_gpr(t0
, rs
);
4207 gen_load_gpr(t1
, rt
);
4217 /* Compare register to immediate */
4218 if (rs
!= 0 || imm
!= 0) {
4219 gen_load_gpr(t0
, rs
);
4220 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4227 case OPC_TEQ
: /* rs == rs */
4228 case OPC_TEQI
: /* r0 == 0 */
4229 case OPC_TGE
: /* rs >= rs */
4230 case OPC_TGEI
: /* r0 >= 0 */
4231 case OPC_TGEU
: /* rs >= rs unsigned */
4232 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4234 generate_exception_end(ctx
, EXCP_TRAP
);
4236 case OPC_TLT
: /* rs < rs */
4237 case OPC_TLTI
: /* r0 < 0 */
4238 case OPC_TLTU
: /* rs < rs unsigned */
4239 case OPC_TLTIU
: /* r0 < 0 unsigned */
4240 case OPC_TNE
: /* rs != rs */
4241 case OPC_TNEI
: /* r0 != 0 */
4242 /* Never trap: treat as NOP. */
4246 TCGLabel
*l1
= gen_new_label();
4251 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4255 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4259 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4263 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4267 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4271 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4274 generate_exception(ctx
, EXCP_TRAP
);
4281 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4283 if (unlikely(ctx
->singlestep_enabled
)) {
4287 #ifndef CONFIG_USER_ONLY
4288 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4294 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4296 if (use_goto_tb(ctx
, dest
)) {
4299 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4302 if (ctx
->singlestep_enabled
) {
4303 save_cpu_state(ctx
, 0);
4304 gen_helper_raise_exception_debug(cpu_env
);
4306 tcg_gen_lookup_and_goto_ptr();
4310 /* Branches (before delay slot) */
4311 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4313 int rs
, int rt
, int32_t offset
,
4316 target_ulong btgt
= -1;
4318 int bcond_compute
= 0;
4319 TCGv t0
= tcg_temp_new();
4320 TCGv t1
= tcg_temp_new();
4322 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4323 #ifdef MIPS_DEBUG_DISAS
4324 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4325 TARGET_FMT_lx
"\n", ctx
->pc
);
4327 generate_exception_end(ctx
, EXCP_RI
);
4331 /* Load needed operands */
4337 /* Compare two registers */
4339 gen_load_gpr(t0
, rs
);
4340 gen_load_gpr(t1
, rt
);
4343 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4357 /* Compare to zero */
4359 gen_load_gpr(t0
, rs
);
4362 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4365 #if defined(TARGET_MIPS64)
4367 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4369 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4372 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4377 /* Jump to immediate */
4378 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4382 /* Jump to register */
4383 if (offset
!= 0 && offset
!= 16) {
4384 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4385 others are reserved. */
4386 MIPS_INVAL("jump hint");
4387 generate_exception_end(ctx
, EXCP_RI
);
4390 gen_load_gpr(btarget
, rs
);
4393 MIPS_INVAL("branch/jump");
4394 generate_exception_end(ctx
, EXCP_RI
);
4397 if (bcond_compute
== 0) {
4398 /* No condition to be computed */
4400 case OPC_BEQ
: /* rx == rx */
4401 case OPC_BEQL
: /* rx == rx likely */
4402 case OPC_BGEZ
: /* 0 >= 0 */
4403 case OPC_BGEZL
: /* 0 >= 0 likely */
4404 case OPC_BLEZ
: /* 0 <= 0 */
4405 case OPC_BLEZL
: /* 0 <= 0 likely */
4407 ctx
->hflags
|= MIPS_HFLAG_B
;
4409 case OPC_BGEZAL
: /* 0 >= 0 */
4410 case OPC_BGEZALL
: /* 0 >= 0 likely */
4411 /* Always take and link */
4413 ctx
->hflags
|= MIPS_HFLAG_B
;
4415 case OPC_BNE
: /* rx != rx */
4416 case OPC_BGTZ
: /* 0 > 0 */
4417 case OPC_BLTZ
: /* 0 < 0 */
4420 case OPC_BLTZAL
: /* 0 < 0 */
4421 /* Handle as an unconditional branch to get correct delay
4424 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4425 ctx
->hflags
|= MIPS_HFLAG_B
;
4427 case OPC_BLTZALL
: /* 0 < 0 likely */
4428 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4429 /* Skip the instruction in the delay slot */
4432 case OPC_BNEL
: /* rx != rx likely */
4433 case OPC_BGTZL
: /* 0 > 0 likely */
4434 case OPC_BLTZL
: /* 0 < 0 likely */
4435 /* Skip the instruction in the delay slot */
4439 ctx
->hflags
|= MIPS_HFLAG_B
;
4442 ctx
->hflags
|= MIPS_HFLAG_BX
;
4446 ctx
->hflags
|= MIPS_HFLAG_B
;
4449 ctx
->hflags
|= MIPS_HFLAG_BR
;
4453 ctx
->hflags
|= MIPS_HFLAG_BR
;
4456 MIPS_INVAL("branch/jump");
4457 generate_exception_end(ctx
, EXCP_RI
);
4463 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4466 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4469 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4472 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4475 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4478 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4481 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4485 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4489 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4492 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4495 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4498 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4501 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4504 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4507 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4509 #if defined(TARGET_MIPS64)
4511 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4515 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4518 ctx
->hflags
|= MIPS_HFLAG_BC
;
4521 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4524 ctx
->hflags
|= MIPS_HFLAG_BL
;
4527 MIPS_INVAL("conditional branch/jump");
4528 generate_exception_end(ctx
, EXCP_RI
);
4533 ctx
->btarget
= btgt
;
4535 switch (delayslot_size
) {
4537 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4540 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4545 int post_delay
= insn_bytes
+ delayslot_size
;
4546 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4548 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4552 if (insn_bytes
== 2)
4553 ctx
->hflags
|= MIPS_HFLAG_B16
;
4558 /* special3 bitfield operations */
4559 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4560 int rs
, int lsb
, int msb
)
4562 TCGv t0
= tcg_temp_new();
4563 TCGv t1
= tcg_temp_new();
4565 gen_load_gpr(t1
, rs
);
4568 if (lsb
+ msb
> 31) {
4572 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4574 /* The two checks together imply that lsb == 0,
4575 so this is a simple sign-extension. */
4576 tcg_gen_ext32s_tl(t0
, t1
);
4579 #if defined(TARGET_MIPS64)
4588 if (lsb
+ msb
> 63) {
4591 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4598 gen_load_gpr(t0
, rt
);
4599 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4600 tcg_gen_ext32s_tl(t0
, t0
);
4602 #if defined(TARGET_MIPS64)
4613 gen_load_gpr(t0
, rt
);
4614 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4619 MIPS_INVAL("bitops");
4620 generate_exception_end(ctx
, EXCP_RI
);
4625 gen_store_gpr(t0
, rt
);
4630 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4635 /* If no destination, treat it as a NOP. */
4639 t0
= tcg_temp_new();
4640 gen_load_gpr(t0
, rt
);
4644 TCGv t1
= tcg_temp_new();
4645 TCGv t2
= tcg_const_tl(0x00FF00FF);
4647 tcg_gen_shri_tl(t1
, t0
, 8);
4648 tcg_gen_and_tl(t1
, t1
, t2
);
4649 tcg_gen_and_tl(t0
, t0
, t2
);
4650 tcg_gen_shli_tl(t0
, t0
, 8);
4651 tcg_gen_or_tl(t0
, t0
, t1
);
4654 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4658 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4661 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4663 #if defined(TARGET_MIPS64)
4666 TCGv t1
= tcg_temp_new();
4667 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4669 tcg_gen_shri_tl(t1
, t0
, 8);
4670 tcg_gen_and_tl(t1
, t1
, t2
);
4671 tcg_gen_and_tl(t0
, t0
, t2
);
4672 tcg_gen_shli_tl(t0
, t0
, 8);
4673 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4680 TCGv t1
= tcg_temp_new();
4681 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4683 tcg_gen_shri_tl(t1
, t0
, 16);
4684 tcg_gen_and_tl(t1
, t1
, t2
);
4685 tcg_gen_and_tl(t0
, t0
, t2
);
4686 tcg_gen_shli_tl(t0
, t0
, 16);
4687 tcg_gen_or_tl(t0
, t0
, t1
);
4688 tcg_gen_shri_tl(t1
, t0
, 32);
4689 tcg_gen_shli_tl(t0
, t0
, 32);
4690 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4697 MIPS_INVAL("bsfhl");
4698 generate_exception_end(ctx
, EXCP_RI
);
4705 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4714 t0
= tcg_temp_new();
4715 t1
= tcg_temp_new();
4716 gen_load_gpr(t0
, rs
);
4717 gen_load_gpr(t1
, rt
);
4718 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4719 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4720 if (opc
== OPC_LSA
) {
4721 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4730 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4738 t0
= tcg_temp_new();
4739 gen_load_gpr(t0
, rt
);
4743 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4745 #if defined(TARGET_MIPS64)
4747 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4752 TCGv t1
= tcg_temp_new();
4753 gen_load_gpr(t1
, rs
);
4757 TCGv_i64 t2
= tcg_temp_new_i64();
4758 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4759 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4760 gen_move_low32(cpu_gpr
[rd
], t2
);
4761 tcg_temp_free_i64(t2
);
4764 #if defined(TARGET_MIPS64)
4766 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4767 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4768 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4778 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4785 t0
= tcg_temp_new();
4786 gen_load_gpr(t0
, rt
);
4789 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4791 #if defined(TARGET_MIPS64)
4793 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4800 #ifndef CONFIG_USER_ONLY
4801 /* CP0 (MMU and control) */
4802 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4804 TCGv_i64 t0
= tcg_temp_new_i64();
4805 TCGv_i64 t1
= tcg_temp_new_i64();
4807 tcg_gen_ext_tl_i64(t0
, arg
);
4808 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4809 #if defined(TARGET_MIPS64)
4810 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4812 tcg_gen_concat32_i64(t1
, t1
, t0
);
4814 tcg_gen_st_i64(t1
, cpu_env
, off
);
4815 tcg_temp_free_i64(t1
);
4816 tcg_temp_free_i64(t0
);
4819 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4821 TCGv_i64 t0
= tcg_temp_new_i64();
4822 TCGv_i64 t1
= tcg_temp_new_i64();
4824 tcg_gen_ext_tl_i64(t0
, arg
);
4825 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4826 tcg_gen_concat32_i64(t1
, t1
, t0
);
4827 tcg_gen_st_i64(t1
, cpu_env
, off
);
4828 tcg_temp_free_i64(t1
);
4829 tcg_temp_free_i64(t0
);
4832 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4834 TCGv_i64 t0
= tcg_temp_new_i64();
4836 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4837 #if defined(TARGET_MIPS64)
4838 tcg_gen_shri_i64(t0
, t0
, 30);
4840 tcg_gen_shri_i64(t0
, t0
, 32);
4842 gen_move_low32(arg
, t0
);
4843 tcg_temp_free_i64(t0
);
4846 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4848 TCGv_i64 t0
= tcg_temp_new_i64();
4850 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4851 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4852 gen_move_low32(arg
, t0
);
4853 tcg_temp_free_i64(t0
);
4856 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4858 TCGv_i32 t0
= tcg_temp_new_i32();
4860 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4861 tcg_gen_ext_i32_tl(arg
, t0
);
4862 tcg_temp_free_i32(t0
);
4865 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4867 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4868 tcg_gen_ext32s_tl(arg
, arg
);
4871 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4873 TCGv_i32 t0
= tcg_temp_new_i32();
4875 tcg_gen_trunc_tl_i32(t0
, arg
);
4876 tcg_gen_st_i32(t0
, cpu_env
, off
);
4877 tcg_temp_free_i32(t0
);
4880 #define CP0_CHECK(c) \
4883 goto cp0_unimplemented; \
4887 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4889 const char *rn
= "invalid";
4891 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4897 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4901 goto cp0_unimplemented
;
4907 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4911 goto cp0_unimplemented
;
4917 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4918 ctx
->CP0_LLAddr_shift
);
4922 CP0_CHECK(ctx
->mrp
);
4923 gen_helper_mfhc0_maar(arg
, cpu_env
);
4927 goto cp0_unimplemented
;
4936 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4940 goto cp0_unimplemented
;
4944 goto cp0_unimplemented
;
4946 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4950 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4951 tcg_gen_movi_tl(arg
, 0);
4954 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4956 const char *rn
= "invalid";
4957 uint64_t mask
= ctx
->PAMask
>> 36;
4959 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4965 tcg_gen_andi_tl(arg
, arg
, mask
);
4966 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4970 goto cp0_unimplemented
;
4976 tcg_gen_andi_tl(arg
, arg
, mask
);
4977 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4981 goto cp0_unimplemented
;
4987 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4988 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4989 relevant for modern MIPS cores supporting MTHC0, therefore
4990 treating MTHC0 to LLAddr as NOP. */
4994 CP0_CHECK(ctx
->mrp
);
4995 gen_helper_mthc0_maar(cpu_env
, arg
);
4999 goto cp0_unimplemented
;
5008 tcg_gen_andi_tl(arg
, arg
, mask
);
5009 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5013 goto cp0_unimplemented
;
5017 goto cp0_unimplemented
;
5019 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5022 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5025 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5027 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5028 tcg_gen_movi_tl(arg
, 0);
5030 tcg_gen_movi_tl(arg
, ~0);
5034 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5036 const char *rn
= "invalid";
5039 check_insn(ctx
, ISA_MIPS32
);
5045 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5050 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5054 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5055 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5059 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5060 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5069 goto cp0_unimplemented
;
5075 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5076 gen_helper_mfc0_random(arg
, cpu_env
);
5080 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5081 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5085 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5090 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5095 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5096 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5100 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5101 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5105 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5106 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5107 rn
= "VPEScheFBack";
5110 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5115 goto cp0_unimplemented
;
5122 TCGv_i64 tmp
= tcg_temp_new_i64();
5123 tcg_gen_ld_i64(tmp
, cpu_env
,
5124 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5125 #if defined(TARGET_MIPS64)
5127 /* Move RI/XI fields to bits 31:30 */
5128 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5129 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5132 gen_move_low32(arg
, tmp
);
5133 tcg_temp_free_i64(tmp
);
5138 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5139 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5143 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5144 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5148 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5149 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5153 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5154 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5158 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5159 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5163 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5164 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5168 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5169 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5173 goto cp0_unimplemented
;
5180 TCGv_i64 tmp
= tcg_temp_new_i64();
5181 tcg_gen_ld_i64(tmp
, cpu_env
,
5182 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5183 #if defined(TARGET_MIPS64)
5185 /* Move RI/XI fields to bits 31:30 */
5186 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5187 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5190 gen_move_low32(arg
, tmp
);
5191 tcg_temp_free_i64(tmp
);
5197 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5198 rn
= "GlobalNumber";
5201 goto cp0_unimplemented
;
5207 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5208 tcg_gen_ext32s_tl(arg
, arg
);
5212 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5213 rn
= "ContextConfig";
5214 goto cp0_unimplemented
;
5216 CP0_CHECK(ctx
->ulri
);
5217 tcg_gen_ld_tl(arg
, cpu_env
,
5218 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5219 tcg_gen_ext32s_tl(arg
, arg
);
5223 goto cp0_unimplemented
;
5229 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5233 check_insn(ctx
, ISA_MIPS32R2
);
5234 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5239 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5240 tcg_gen_ext32s_tl(arg
, arg
);
5245 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5246 tcg_gen_ext32s_tl(arg
, arg
);
5251 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5252 tcg_gen_ext32s_tl(arg
, arg
);
5256 goto cp0_unimplemented
;
5262 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5266 check_insn(ctx
, ISA_MIPS32R2
);
5267 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5271 check_insn(ctx
, ISA_MIPS32R2
);
5272 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5276 check_insn(ctx
, ISA_MIPS32R2
);
5277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5281 check_insn(ctx
, ISA_MIPS32R2
);
5282 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5286 check_insn(ctx
, ISA_MIPS32R2
);
5287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5291 goto cp0_unimplemented
;
5297 check_insn(ctx
, ISA_MIPS32R2
);
5298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5302 goto cp0_unimplemented
;
5308 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5309 tcg_gen_ext32s_tl(arg
, arg
);
5314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5319 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5323 goto cp0_unimplemented
;
5329 /* Mark as an IO operation because we read the time. */
5330 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5333 gen_helper_mfc0_count(arg
, cpu_env
);
5334 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5337 /* Break the TB to be able to take timer interrupts immediately
5338 after reading count. BS_STOP isn't sufficient, we need to ensure
5339 we break completely out of translated code. */
5340 gen_save_pc(ctx
->pc
+ 4);
5341 ctx
->bstate
= BS_EXCP
;
5344 /* 6,7 are implementation dependent */
5346 goto cp0_unimplemented
;
5352 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5353 tcg_gen_ext32s_tl(arg
, arg
);
5357 goto cp0_unimplemented
;
5363 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5366 /* 6,7 are implementation dependent */
5368 goto cp0_unimplemented
;
5374 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5378 check_insn(ctx
, ISA_MIPS32R2
);
5379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5383 check_insn(ctx
, ISA_MIPS32R2
);
5384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5388 check_insn(ctx
, ISA_MIPS32R2
);
5389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5393 goto cp0_unimplemented
;
5399 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5403 goto cp0_unimplemented
;
5409 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5410 tcg_gen_ext32s_tl(arg
, arg
);
5414 goto cp0_unimplemented
;
5420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5424 check_insn(ctx
, ISA_MIPS32R2
);
5425 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5426 tcg_gen_ext32s_tl(arg
, arg
);
5430 check_insn(ctx
, ISA_MIPS32R2
);
5431 CP0_CHECK(ctx
->cmgcr
);
5432 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5433 tcg_gen_ext32s_tl(arg
, arg
);
5437 goto cp0_unimplemented
;
5443 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5451 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5455 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5463 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5466 /* 6,7 are implementation dependent */
5468 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5476 goto cp0_unimplemented
;
5482 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5486 CP0_CHECK(ctx
->mrp
);
5487 gen_helper_mfc0_maar(arg
, cpu_env
);
5491 CP0_CHECK(ctx
->mrp
);
5492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5496 goto cp0_unimplemented
;
5502 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5506 goto cp0_unimplemented
;
5512 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5516 goto cp0_unimplemented
;
5522 #if defined(TARGET_MIPS64)
5523 check_insn(ctx
, ISA_MIPS3
);
5524 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5525 tcg_gen_ext32s_tl(arg
, arg
);
5530 goto cp0_unimplemented
;
5534 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5535 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5542 goto cp0_unimplemented
;
5546 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5547 rn
= "'Diagnostic"; /* implementation dependent */
5552 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5556 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5557 rn
= "TraceControl";
5558 goto cp0_unimplemented
;
5560 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5561 rn
= "TraceControl2";
5562 goto cp0_unimplemented
;
5564 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5565 rn
= "UserTraceData";
5566 goto cp0_unimplemented
;
5568 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5570 goto cp0_unimplemented
;
5572 goto cp0_unimplemented
;
5579 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5580 tcg_gen_ext32s_tl(arg
, arg
);
5584 goto cp0_unimplemented
;
5590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5591 rn
= "Performance0";
5594 // gen_helper_mfc0_performance1(arg);
5595 rn
= "Performance1";
5596 goto cp0_unimplemented
;
5598 // gen_helper_mfc0_performance2(arg);
5599 rn
= "Performance2";
5600 goto cp0_unimplemented
;
5602 // gen_helper_mfc0_performance3(arg);
5603 rn
= "Performance3";
5604 goto cp0_unimplemented
;
5606 // gen_helper_mfc0_performance4(arg);
5607 rn
= "Performance4";
5608 goto cp0_unimplemented
;
5610 // gen_helper_mfc0_performance5(arg);
5611 rn
= "Performance5";
5612 goto cp0_unimplemented
;
5614 // gen_helper_mfc0_performance6(arg);
5615 rn
= "Performance6";
5616 goto cp0_unimplemented
;
5618 // gen_helper_mfc0_performance7(arg);
5619 rn
= "Performance7";
5620 goto cp0_unimplemented
;
5622 goto cp0_unimplemented
;
5628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5632 goto cp0_unimplemented
;
5638 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5642 goto cp0_unimplemented
;
5652 TCGv_i64 tmp
= tcg_temp_new_i64();
5653 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5654 gen_move_low32(arg
, tmp
);
5655 tcg_temp_free_i64(tmp
);
5663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5667 goto cp0_unimplemented
;
5676 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5683 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5687 goto cp0_unimplemented
;
5693 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5694 tcg_gen_ext32s_tl(arg
, arg
);
5698 goto cp0_unimplemented
;
5705 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5709 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5710 tcg_gen_ld_tl(arg
, cpu_env
,
5711 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5712 tcg_gen_ext32s_tl(arg
, arg
);
5716 goto cp0_unimplemented
;
5720 goto cp0_unimplemented
;
5722 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5726 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5727 gen_mfc0_unimplemented(ctx
, arg
);
5730 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5732 const char *rn
= "invalid";
5735 check_insn(ctx
, ISA_MIPS32
);
5737 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5745 gen_helper_mtc0_index(cpu_env
, arg
);
5749 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5750 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5754 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5759 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5769 goto cp0_unimplemented
;
5779 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5780 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5784 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5785 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5789 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5790 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5794 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5795 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5799 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5800 tcg_gen_st_tl(arg
, cpu_env
,
5801 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5805 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5806 tcg_gen_st_tl(arg
, cpu_env
,
5807 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5808 rn
= "VPEScheFBack";
5811 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5812 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5816 goto cp0_unimplemented
;
5822 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5826 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5827 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5831 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5832 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5836 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5837 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5841 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5842 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5846 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5847 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5851 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5852 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5856 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5857 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5861 goto cp0_unimplemented
;
5867 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5873 rn
= "GlobalNumber";
5876 goto cp0_unimplemented
;
5882 gen_helper_mtc0_context(cpu_env
, arg
);
5886 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5887 rn
= "ContextConfig";
5888 goto cp0_unimplemented
;
5890 CP0_CHECK(ctx
->ulri
);
5891 tcg_gen_st_tl(arg
, cpu_env
,
5892 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5896 goto cp0_unimplemented
;
5902 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5906 check_insn(ctx
, ISA_MIPS32R2
);
5907 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5909 ctx
->bstate
= BS_STOP
;
5913 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5918 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5923 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5927 goto cp0_unimplemented
;
5933 gen_helper_mtc0_wired(cpu_env
, arg
);
5937 check_insn(ctx
, ISA_MIPS32R2
);
5938 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5942 check_insn(ctx
, ISA_MIPS32R2
);
5943 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5947 check_insn(ctx
, ISA_MIPS32R2
);
5948 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5952 check_insn(ctx
, ISA_MIPS32R2
);
5953 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5957 check_insn(ctx
, ISA_MIPS32R2
);
5958 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5962 goto cp0_unimplemented
;
5968 check_insn(ctx
, ISA_MIPS32R2
);
5969 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5970 ctx
->bstate
= BS_STOP
;
5974 goto cp0_unimplemented
;
5992 goto cp0_unimplemented
;
5998 gen_helper_mtc0_count(cpu_env
, arg
);
6001 /* 6,7 are implementation dependent */
6003 goto cp0_unimplemented
;
6009 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6013 goto cp0_unimplemented
;
6019 gen_helper_mtc0_compare(cpu_env
, arg
);
6022 /* 6,7 are implementation dependent */
6024 goto cp0_unimplemented
;
6030 save_cpu_state(ctx
, 1);
6031 gen_helper_mtc0_status(cpu_env
, arg
);
6032 /* BS_STOP isn't good enough here, hflags may have changed. */
6033 gen_save_pc(ctx
->pc
+ 4);
6034 ctx
->bstate
= BS_EXCP
;
6038 check_insn(ctx
, ISA_MIPS32R2
);
6039 gen_helper_mtc0_intctl(cpu_env
, arg
);
6040 /* Stop translation as we may have switched the execution mode */
6041 ctx
->bstate
= BS_STOP
;
6045 check_insn(ctx
, ISA_MIPS32R2
);
6046 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6047 /* Stop translation as we may have switched the execution mode */
6048 ctx
->bstate
= BS_STOP
;
6052 check_insn(ctx
, ISA_MIPS32R2
);
6053 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6054 /* Stop translation as we may have switched the execution mode */
6055 ctx
->bstate
= BS_STOP
;
6059 goto cp0_unimplemented
;
6065 save_cpu_state(ctx
, 1);
6066 gen_helper_mtc0_cause(cpu_env
, arg
);
6067 /* Stop translation as we may have triggered an interrupt. BS_STOP
6068 * isn't sufficient, we need to ensure we break out of translated
6069 * code to check for pending interrupts. */
6070 gen_save_pc(ctx
->pc
+ 4);
6071 ctx
->bstate
= BS_EXCP
;
6075 goto cp0_unimplemented
;
6081 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6085 goto cp0_unimplemented
;
6095 check_insn(ctx
, ISA_MIPS32R2
);
6096 gen_helper_mtc0_ebase(cpu_env
, arg
);
6100 goto cp0_unimplemented
;
6106 gen_helper_mtc0_config0(cpu_env
, arg
);
6108 /* Stop translation as we may have switched the execution mode */
6109 ctx
->bstate
= BS_STOP
;
6112 /* ignored, read only */
6116 gen_helper_mtc0_config2(cpu_env
, arg
);
6118 /* Stop translation as we may have switched the execution mode */
6119 ctx
->bstate
= BS_STOP
;
6122 gen_helper_mtc0_config3(cpu_env
, arg
);
6124 /* Stop translation as we may have switched the execution mode */
6125 ctx
->bstate
= BS_STOP
;
6128 gen_helper_mtc0_config4(cpu_env
, arg
);
6130 ctx
->bstate
= BS_STOP
;
6133 gen_helper_mtc0_config5(cpu_env
, arg
);
6135 /* Stop translation as we may have switched the execution mode */
6136 ctx
->bstate
= BS_STOP
;
6138 /* 6,7 are implementation dependent */
6148 rn
= "Invalid config selector";
6149 goto cp0_unimplemented
;
6155 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6159 CP0_CHECK(ctx
->mrp
);
6160 gen_helper_mtc0_maar(cpu_env
, arg
);
6164 CP0_CHECK(ctx
->mrp
);
6165 gen_helper_mtc0_maari(cpu_env
, arg
);
6169 goto cp0_unimplemented
;
6175 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6179 goto cp0_unimplemented
;
6185 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6189 goto cp0_unimplemented
;
6195 #if defined(TARGET_MIPS64)
6196 check_insn(ctx
, ISA_MIPS3
);
6197 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6202 goto cp0_unimplemented
;
6206 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6207 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6210 gen_helper_mtc0_framemask(cpu_env
, arg
);
6214 goto cp0_unimplemented
;
6219 rn
= "Diagnostic"; /* implementation dependent */
6224 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6225 /* BS_STOP isn't good enough here, hflags may have changed. */
6226 gen_save_pc(ctx
->pc
+ 4);
6227 ctx
->bstate
= BS_EXCP
;
6231 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6232 rn
= "TraceControl";
6233 /* Stop translation as we may have switched the execution mode */
6234 ctx
->bstate
= BS_STOP
;
6235 goto cp0_unimplemented
;
6237 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6238 rn
= "TraceControl2";
6239 /* Stop translation as we may have switched the execution mode */
6240 ctx
->bstate
= BS_STOP
;
6241 goto cp0_unimplemented
;
6243 /* Stop translation as we may have switched the execution mode */
6244 ctx
->bstate
= BS_STOP
;
6245 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6246 rn
= "UserTraceData";
6247 /* Stop translation as we may have switched the execution mode */
6248 ctx
->bstate
= BS_STOP
;
6249 goto cp0_unimplemented
;
6251 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6252 /* Stop translation as we may have switched the execution mode */
6253 ctx
->bstate
= BS_STOP
;
6255 goto cp0_unimplemented
;
6257 goto cp0_unimplemented
;
6264 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6268 goto cp0_unimplemented
;
6274 gen_helper_mtc0_performance0(cpu_env
, arg
);
6275 rn
= "Performance0";
6278 // gen_helper_mtc0_performance1(arg);
6279 rn
= "Performance1";
6280 goto cp0_unimplemented
;
6282 // gen_helper_mtc0_performance2(arg);
6283 rn
= "Performance2";
6284 goto cp0_unimplemented
;
6286 // gen_helper_mtc0_performance3(arg);
6287 rn
= "Performance3";
6288 goto cp0_unimplemented
;
6290 // gen_helper_mtc0_performance4(arg);
6291 rn
= "Performance4";
6292 goto cp0_unimplemented
;
6294 // gen_helper_mtc0_performance5(arg);
6295 rn
= "Performance5";
6296 goto cp0_unimplemented
;
6298 // gen_helper_mtc0_performance6(arg);
6299 rn
= "Performance6";
6300 goto cp0_unimplemented
;
6302 // gen_helper_mtc0_performance7(arg);
6303 rn
= "Performance7";
6304 goto cp0_unimplemented
;
6306 goto cp0_unimplemented
;
6312 gen_helper_mtc0_errctl(cpu_env
, arg
);
6313 ctx
->bstate
= BS_STOP
;
6317 goto cp0_unimplemented
;
6327 goto cp0_unimplemented
;
6336 gen_helper_mtc0_taglo(cpu_env
, arg
);
6343 gen_helper_mtc0_datalo(cpu_env
, arg
);
6347 goto cp0_unimplemented
;
6356 gen_helper_mtc0_taghi(cpu_env
, arg
);
6363 gen_helper_mtc0_datahi(cpu_env
, arg
);
6368 goto cp0_unimplemented
;
6374 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6378 goto cp0_unimplemented
;
6385 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6389 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6390 tcg_gen_st_tl(arg
, cpu_env
,
6391 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6395 goto cp0_unimplemented
;
6399 goto cp0_unimplemented
;
6401 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6403 /* For simplicity assume that all writes can cause interrupts. */
6404 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6406 /* BS_STOP isn't sufficient, we need to ensure we break out of
6407 * translated code to check for pending interrupts. */
6408 gen_save_pc(ctx
->pc
+ 4);
6409 ctx
->bstate
= BS_EXCP
;
6414 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6417 #if defined(TARGET_MIPS64)
6418 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6420 const char *rn
= "invalid";
6423 check_insn(ctx
, ISA_MIPS64
);
6429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6433 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6434 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6438 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6439 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6443 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6444 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6453 goto cp0_unimplemented
;
6459 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6460 gen_helper_mfc0_random(arg
, cpu_env
);
6464 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6469 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6470 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6474 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6479 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6480 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6484 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6485 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6489 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6490 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6491 rn
= "VPEScheFBack";
6494 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6495 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6499 goto cp0_unimplemented
;
6505 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6509 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6510 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6514 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6515 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6519 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6520 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6524 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6525 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6529 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6530 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6534 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6535 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6539 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6540 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6544 goto cp0_unimplemented
;
6550 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6556 rn
= "GlobalNumber";
6559 goto cp0_unimplemented
;
6565 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6569 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6570 rn
= "ContextConfig";
6571 goto cp0_unimplemented
;
6573 CP0_CHECK(ctx
->ulri
);
6574 tcg_gen_ld_tl(arg
, cpu_env
,
6575 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6579 goto cp0_unimplemented
;
6585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6589 check_insn(ctx
, ISA_MIPS32R2
);
6590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6595 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6600 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6605 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6609 goto cp0_unimplemented
;
6615 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6619 check_insn(ctx
, ISA_MIPS32R2
);
6620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6624 check_insn(ctx
, ISA_MIPS32R2
);
6625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6629 check_insn(ctx
, ISA_MIPS32R2
);
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6634 check_insn(ctx
, ISA_MIPS32R2
);
6635 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6639 check_insn(ctx
, ISA_MIPS32R2
);
6640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6644 goto cp0_unimplemented
;
6650 check_insn(ctx
, ISA_MIPS32R2
);
6651 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6655 goto cp0_unimplemented
;
6661 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6675 goto cp0_unimplemented
;
6681 /* Mark as an IO operation because we read the time. */
6682 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6685 gen_helper_mfc0_count(arg
, cpu_env
);
6686 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6689 /* Break the TB to be able to take timer interrupts immediately
6690 after reading count. BS_STOP isn't sufficient, we need to ensure
6691 we break completely out of translated code. */
6692 gen_save_pc(ctx
->pc
+ 4);
6693 ctx
->bstate
= BS_EXCP
;
6696 /* 6,7 are implementation dependent */
6698 goto cp0_unimplemented
;
6704 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6708 goto cp0_unimplemented
;
6714 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6717 /* 6,7 are implementation dependent */
6719 goto cp0_unimplemented
;
6725 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6729 check_insn(ctx
, ISA_MIPS32R2
);
6730 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6734 check_insn(ctx
, ISA_MIPS32R2
);
6735 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6739 check_insn(ctx
, ISA_MIPS32R2
);
6740 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6744 goto cp0_unimplemented
;
6750 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6754 goto cp0_unimplemented
;
6760 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6764 goto cp0_unimplemented
;
6770 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6774 check_insn(ctx
, ISA_MIPS32R2
);
6775 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6779 check_insn(ctx
, ISA_MIPS32R2
);
6780 CP0_CHECK(ctx
->cmgcr
);
6781 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6785 goto cp0_unimplemented
;
6791 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6795 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6799 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6807 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6811 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6814 /* 6,7 are implementation dependent */
6816 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6820 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6824 goto cp0_unimplemented
;
6830 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6834 CP0_CHECK(ctx
->mrp
);
6835 gen_helper_dmfc0_maar(arg
, cpu_env
);
6839 CP0_CHECK(ctx
->mrp
);
6840 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6844 goto cp0_unimplemented
;
6850 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6854 goto cp0_unimplemented
;
6860 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6864 goto cp0_unimplemented
;
6870 check_insn(ctx
, ISA_MIPS3
);
6871 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6875 goto cp0_unimplemented
;
6879 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6880 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6883 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6887 goto cp0_unimplemented
;
6891 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6892 rn
= "'Diagnostic"; /* implementation dependent */
6897 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6901 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6902 rn
= "TraceControl";
6903 goto cp0_unimplemented
;
6905 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6906 rn
= "TraceControl2";
6907 goto cp0_unimplemented
;
6909 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6910 rn
= "UserTraceData";
6911 goto cp0_unimplemented
;
6913 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6915 goto cp0_unimplemented
;
6917 goto cp0_unimplemented
;
6924 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6928 goto cp0_unimplemented
;
6934 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6935 rn
= "Performance0";
6938 // gen_helper_dmfc0_performance1(arg);
6939 rn
= "Performance1";
6940 goto cp0_unimplemented
;
6942 // gen_helper_dmfc0_performance2(arg);
6943 rn
= "Performance2";
6944 goto cp0_unimplemented
;
6946 // gen_helper_dmfc0_performance3(arg);
6947 rn
= "Performance3";
6948 goto cp0_unimplemented
;
6950 // gen_helper_dmfc0_performance4(arg);
6951 rn
= "Performance4";
6952 goto cp0_unimplemented
;
6954 // gen_helper_dmfc0_performance5(arg);
6955 rn
= "Performance5";
6956 goto cp0_unimplemented
;
6958 // gen_helper_dmfc0_performance6(arg);
6959 rn
= "Performance6";
6960 goto cp0_unimplemented
;
6962 // gen_helper_dmfc0_performance7(arg);
6963 rn
= "Performance7";
6964 goto cp0_unimplemented
;
6966 goto cp0_unimplemented
;
6972 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6976 goto cp0_unimplemented
;
6983 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6987 goto cp0_unimplemented
;
6996 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7003 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7007 goto cp0_unimplemented
;
7016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7023 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7027 goto cp0_unimplemented
;
7033 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7037 goto cp0_unimplemented
;
7044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7048 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7049 tcg_gen_ld_tl(arg
, cpu_env
,
7050 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7054 goto cp0_unimplemented
;
7058 goto cp0_unimplemented
;
7060 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7064 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7065 gen_mfc0_unimplemented(ctx
, arg
);
7068 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7070 const char *rn
= "invalid";
7073 check_insn(ctx
, ISA_MIPS64
);
7075 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7083 gen_helper_mtc0_index(cpu_env
, arg
);
7087 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7088 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7092 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7097 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7107 goto cp0_unimplemented
;
7117 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7118 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7122 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7123 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7127 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7128 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7132 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7133 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7137 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7138 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7142 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7143 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7144 rn
= "VPEScheFBack";
7147 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7148 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7152 goto cp0_unimplemented
;
7158 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7163 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7168 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7172 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7173 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7177 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7178 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7182 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7183 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7187 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7188 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7192 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7193 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7197 goto cp0_unimplemented
;
7203 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7209 rn
= "GlobalNumber";
7212 goto cp0_unimplemented
;
7218 gen_helper_mtc0_context(cpu_env
, arg
);
7222 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7223 rn
= "ContextConfig";
7224 goto cp0_unimplemented
;
7226 CP0_CHECK(ctx
->ulri
);
7227 tcg_gen_st_tl(arg
, cpu_env
,
7228 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7232 goto cp0_unimplemented
;
7238 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7242 check_insn(ctx
, ISA_MIPS32R2
);
7243 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7248 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7253 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7258 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7262 goto cp0_unimplemented
;
7268 gen_helper_mtc0_wired(cpu_env
, arg
);
7272 check_insn(ctx
, ISA_MIPS32R2
);
7273 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7277 check_insn(ctx
, ISA_MIPS32R2
);
7278 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7282 check_insn(ctx
, ISA_MIPS32R2
);
7283 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7287 check_insn(ctx
, ISA_MIPS32R2
);
7288 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7292 check_insn(ctx
, ISA_MIPS32R2
);
7293 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7297 goto cp0_unimplemented
;
7303 check_insn(ctx
, ISA_MIPS32R2
);
7304 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7305 ctx
->bstate
= BS_STOP
;
7309 goto cp0_unimplemented
;
7327 goto cp0_unimplemented
;
7333 gen_helper_mtc0_count(cpu_env
, arg
);
7336 /* 6,7 are implementation dependent */
7338 goto cp0_unimplemented
;
7340 /* Stop translation as we may have switched the execution mode */
7341 ctx
->bstate
= BS_STOP
;
7346 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7350 goto cp0_unimplemented
;
7356 gen_helper_mtc0_compare(cpu_env
, arg
);
7359 /* 6,7 are implementation dependent */
7361 goto cp0_unimplemented
;
7363 /* Stop translation as we may have switched the execution mode */
7364 ctx
->bstate
= BS_STOP
;
7369 save_cpu_state(ctx
, 1);
7370 gen_helper_mtc0_status(cpu_env
, arg
);
7371 /* BS_STOP isn't good enough here, hflags may have changed. */
7372 gen_save_pc(ctx
->pc
+ 4);
7373 ctx
->bstate
= BS_EXCP
;
7377 check_insn(ctx
, ISA_MIPS32R2
);
7378 gen_helper_mtc0_intctl(cpu_env
, arg
);
7379 /* Stop translation as we may have switched the execution mode */
7380 ctx
->bstate
= BS_STOP
;
7384 check_insn(ctx
, ISA_MIPS32R2
);
7385 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7386 /* Stop translation as we may have switched the execution mode */
7387 ctx
->bstate
= BS_STOP
;
7391 check_insn(ctx
, ISA_MIPS32R2
);
7392 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7393 /* Stop translation as we may have switched the execution mode */
7394 ctx
->bstate
= BS_STOP
;
7398 goto cp0_unimplemented
;
7404 save_cpu_state(ctx
, 1);
7405 gen_helper_mtc0_cause(cpu_env
, arg
);
7406 /* Stop translation as we may have triggered an intetrupt. BS_STOP
7407 * isn't sufficient, we need to ensure we break out of translated
7408 * code to check for pending interrupts. */
7409 gen_save_pc(ctx
->pc
+ 4);
7410 ctx
->bstate
= BS_EXCP
;
7414 goto cp0_unimplemented
;
7420 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7424 goto cp0_unimplemented
;
7434 check_insn(ctx
, ISA_MIPS32R2
);
7435 gen_helper_mtc0_ebase(cpu_env
, arg
);
7439 goto cp0_unimplemented
;
7445 gen_helper_mtc0_config0(cpu_env
, arg
);
7447 /* Stop translation as we may have switched the execution mode */
7448 ctx
->bstate
= BS_STOP
;
7451 /* ignored, read only */
7455 gen_helper_mtc0_config2(cpu_env
, arg
);
7457 /* Stop translation as we may have switched the execution mode */
7458 ctx
->bstate
= BS_STOP
;
7461 gen_helper_mtc0_config3(cpu_env
, arg
);
7463 /* Stop translation as we may have switched the execution mode */
7464 ctx
->bstate
= BS_STOP
;
7467 /* currently ignored */
7471 gen_helper_mtc0_config5(cpu_env
, arg
);
7473 /* Stop translation as we may have switched the execution mode */
7474 ctx
->bstate
= BS_STOP
;
7476 /* 6,7 are implementation dependent */
7478 rn
= "Invalid config selector";
7479 goto cp0_unimplemented
;
7485 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7489 CP0_CHECK(ctx
->mrp
);
7490 gen_helper_mtc0_maar(cpu_env
, arg
);
7494 CP0_CHECK(ctx
->mrp
);
7495 gen_helper_mtc0_maari(cpu_env
, arg
);
7499 goto cp0_unimplemented
;
7505 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7509 goto cp0_unimplemented
;
7515 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7519 goto cp0_unimplemented
;
7525 check_insn(ctx
, ISA_MIPS3
);
7526 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7530 goto cp0_unimplemented
;
7534 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7535 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7538 gen_helper_mtc0_framemask(cpu_env
, arg
);
7542 goto cp0_unimplemented
;
7547 rn
= "Diagnostic"; /* implementation dependent */
7552 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7553 /* BS_STOP isn't good enough here, hflags may have changed. */
7554 gen_save_pc(ctx
->pc
+ 4);
7555 ctx
->bstate
= BS_EXCP
;
7559 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7560 /* Stop translation as we may have switched the execution mode */
7561 ctx
->bstate
= BS_STOP
;
7562 rn
= "TraceControl";
7563 goto cp0_unimplemented
;
7565 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7566 /* Stop translation as we may have switched the execution mode */
7567 ctx
->bstate
= BS_STOP
;
7568 rn
= "TraceControl2";
7569 goto cp0_unimplemented
;
7571 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7572 /* Stop translation as we may have switched the execution mode */
7573 ctx
->bstate
= BS_STOP
;
7574 rn
= "UserTraceData";
7575 goto cp0_unimplemented
;
7577 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7578 /* Stop translation as we may have switched the execution mode */
7579 ctx
->bstate
= BS_STOP
;
7581 goto cp0_unimplemented
;
7583 goto cp0_unimplemented
;
7590 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7594 goto cp0_unimplemented
;
7600 gen_helper_mtc0_performance0(cpu_env
, arg
);
7601 rn
= "Performance0";
7604 // gen_helper_mtc0_performance1(cpu_env, arg);
7605 rn
= "Performance1";
7606 goto cp0_unimplemented
;
7608 // gen_helper_mtc0_performance2(cpu_env, arg);
7609 rn
= "Performance2";
7610 goto cp0_unimplemented
;
7612 // gen_helper_mtc0_performance3(cpu_env, arg);
7613 rn
= "Performance3";
7614 goto cp0_unimplemented
;
7616 // gen_helper_mtc0_performance4(cpu_env, arg);
7617 rn
= "Performance4";
7618 goto cp0_unimplemented
;
7620 // gen_helper_mtc0_performance5(cpu_env, arg);
7621 rn
= "Performance5";
7622 goto cp0_unimplemented
;
7624 // gen_helper_mtc0_performance6(cpu_env, arg);
7625 rn
= "Performance6";
7626 goto cp0_unimplemented
;
7628 // gen_helper_mtc0_performance7(cpu_env, arg);
7629 rn
= "Performance7";
7630 goto cp0_unimplemented
;
7632 goto cp0_unimplemented
;
7638 gen_helper_mtc0_errctl(cpu_env
, arg
);
7639 ctx
->bstate
= BS_STOP
;
7643 goto cp0_unimplemented
;
7653 goto cp0_unimplemented
;
7662 gen_helper_mtc0_taglo(cpu_env
, arg
);
7669 gen_helper_mtc0_datalo(cpu_env
, arg
);
7673 goto cp0_unimplemented
;
7682 gen_helper_mtc0_taghi(cpu_env
, arg
);
7689 gen_helper_mtc0_datahi(cpu_env
, arg
);
7694 goto cp0_unimplemented
;
7700 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7704 goto cp0_unimplemented
;
7711 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7715 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7716 tcg_gen_st_tl(arg
, cpu_env
,
7717 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7721 goto cp0_unimplemented
;
7725 goto cp0_unimplemented
;
7727 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7729 /* For simplicity assume that all writes can cause interrupts. */
7730 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7732 /* BS_STOP isn't sufficient, we need to ensure we break out of
7733 * translated code to check for pending interrupts. */
7734 gen_save_pc(ctx
->pc
+ 4);
7735 ctx
->bstate
= BS_EXCP
;
7740 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7742 #endif /* TARGET_MIPS64 */
7744 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7745 int u
, int sel
, int h
)
7747 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7748 TCGv t0
= tcg_temp_local_new();
7750 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7751 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7752 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7753 tcg_gen_movi_tl(t0
, -1);
7754 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7755 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7756 tcg_gen_movi_tl(t0
, -1);
7762 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7765 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7775 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7778 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7781 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7784 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7787 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7790 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7793 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7796 gen_mfc0(ctx
, t0
, rt
, sel
);
7803 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7806 gen_mfc0(ctx
, t0
, rt
, sel
);
7812 gen_helper_mftc0_status(t0
, cpu_env
);
7815 gen_mfc0(ctx
, t0
, rt
, sel
);
7821 gen_helper_mftc0_cause(t0
, cpu_env
);
7831 gen_helper_mftc0_epc(t0
, cpu_env
);
7841 gen_helper_mftc0_ebase(t0
, cpu_env
);
7851 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7861 gen_helper_mftc0_debug(t0
, cpu_env
);
7864 gen_mfc0(ctx
, t0
, rt
, sel
);
7869 gen_mfc0(ctx
, t0
, rt
, sel
);
7871 } else switch (sel
) {
7872 /* GPR registers. */
7874 gen_helper_1e0i(mftgpr
, t0
, rt
);
7876 /* Auxiliary CPU registers */
7880 gen_helper_1e0i(mftlo
, t0
, 0);
7883 gen_helper_1e0i(mfthi
, t0
, 0);
7886 gen_helper_1e0i(mftacx
, t0
, 0);
7889 gen_helper_1e0i(mftlo
, t0
, 1);
7892 gen_helper_1e0i(mfthi
, t0
, 1);
7895 gen_helper_1e0i(mftacx
, t0
, 1);
7898 gen_helper_1e0i(mftlo
, t0
, 2);
7901 gen_helper_1e0i(mfthi
, t0
, 2);
7904 gen_helper_1e0i(mftacx
, t0
, 2);
7907 gen_helper_1e0i(mftlo
, t0
, 3);
7910 gen_helper_1e0i(mfthi
, t0
, 3);
7913 gen_helper_1e0i(mftacx
, t0
, 3);
7916 gen_helper_mftdsp(t0
, cpu_env
);
7922 /* Floating point (COP1). */
7924 /* XXX: For now we support only a single FPU context. */
7926 TCGv_i32 fp0
= tcg_temp_new_i32();
7928 gen_load_fpr32(ctx
, fp0
, rt
);
7929 tcg_gen_ext_i32_tl(t0
, fp0
);
7930 tcg_temp_free_i32(fp0
);
7932 TCGv_i32 fp0
= tcg_temp_new_i32();
7934 gen_load_fpr32h(ctx
, fp0
, rt
);
7935 tcg_gen_ext_i32_tl(t0
, fp0
);
7936 tcg_temp_free_i32(fp0
);
7940 /* XXX: For now we support only a single FPU context. */
7941 gen_helper_1e0i(cfc1
, t0
, rt
);
7943 /* COP2: Not implemented. */
7950 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7951 gen_store_gpr(t0
, rd
);
7957 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7958 generate_exception_end(ctx
, EXCP_RI
);
7961 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7962 int u
, int sel
, int h
)
7964 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7965 TCGv t0
= tcg_temp_local_new();
7967 gen_load_gpr(t0
, rt
);
7968 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7969 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7970 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7972 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7973 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7980 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7983 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7993 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7996 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7999 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8002 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8005 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8008 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8011 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8014 gen_mtc0(ctx
, t0
, rd
, sel
);
8021 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8024 gen_mtc0(ctx
, t0
, rd
, sel
);
8030 gen_helper_mttc0_status(cpu_env
, t0
);
8033 gen_mtc0(ctx
, t0
, rd
, sel
);
8039 gen_helper_mttc0_cause(cpu_env
, t0
);
8049 gen_helper_mttc0_ebase(cpu_env
, t0
);
8059 gen_helper_mttc0_debug(cpu_env
, t0
);
8062 gen_mtc0(ctx
, t0
, rd
, sel
);
8067 gen_mtc0(ctx
, t0
, rd
, sel
);
8069 } else switch (sel
) {
8070 /* GPR registers. */
8072 gen_helper_0e1i(mttgpr
, t0
, rd
);
8074 /* Auxiliary CPU registers */
8078 gen_helper_0e1i(mttlo
, t0
, 0);
8081 gen_helper_0e1i(mtthi
, t0
, 0);
8084 gen_helper_0e1i(mttacx
, t0
, 0);
8087 gen_helper_0e1i(mttlo
, t0
, 1);
8090 gen_helper_0e1i(mtthi
, t0
, 1);
8093 gen_helper_0e1i(mttacx
, t0
, 1);
8096 gen_helper_0e1i(mttlo
, t0
, 2);
8099 gen_helper_0e1i(mtthi
, t0
, 2);
8102 gen_helper_0e1i(mttacx
, t0
, 2);
8105 gen_helper_0e1i(mttlo
, t0
, 3);
8108 gen_helper_0e1i(mtthi
, t0
, 3);
8111 gen_helper_0e1i(mttacx
, t0
, 3);
8114 gen_helper_mttdsp(cpu_env
, t0
);
8120 /* Floating point (COP1). */
8122 /* XXX: For now we support only a single FPU context. */
8124 TCGv_i32 fp0
= tcg_temp_new_i32();
8126 tcg_gen_trunc_tl_i32(fp0
, t0
);
8127 gen_store_fpr32(ctx
, fp0
, rd
);
8128 tcg_temp_free_i32(fp0
);
8130 TCGv_i32 fp0
= tcg_temp_new_i32();
8132 tcg_gen_trunc_tl_i32(fp0
, t0
);
8133 gen_store_fpr32h(ctx
, fp0
, rd
);
8134 tcg_temp_free_i32(fp0
);
8138 /* XXX: For now we support only a single FPU context. */
8140 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8142 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8143 tcg_temp_free_i32(fs_tmp
);
8145 /* Stop translation as we may have changed hflags */
8146 ctx
->bstate
= BS_STOP
;
8148 /* COP2: Not implemented. */
8155 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8161 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8162 generate_exception_end(ctx
, EXCP_RI
);
8165 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8167 const char *opn
= "ldst";
8169 check_cp0_enabled(ctx
);
8176 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8181 TCGv t0
= tcg_temp_new();
8183 gen_load_gpr(t0
, rt
);
8184 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8189 #if defined(TARGET_MIPS64)
8191 check_insn(ctx
, ISA_MIPS3
);
8196 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8200 check_insn(ctx
, ISA_MIPS3
);
8202 TCGv t0
= tcg_temp_new();
8204 gen_load_gpr(t0
, rt
);
8205 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8217 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8223 TCGv t0
= tcg_temp_new();
8224 gen_load_gpr(t0
, rt
);
8225 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8231 check_insn(ctx
, ASE_MT
);
8236 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8237 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8241 check_insn(ctx
, ASE_MT
);
8242 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8243 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8248 if (!env
->tlb
->helper_tlbwi
)
8250 gen_helper_tlbwi(cpu_env
);
8255 if (!env
->tlb
->helper_tlbinv
) {
8258 gen_helper_tlbinv(cpu_env
);
8259 } /* treat as nop if TLBINV not supported */
8264 if (!env
->tlb
->helper_tlbinvf
) {
8267 gen_helper_tlbinvf(cpu_env
);
8268 } /* treat as nop if TLBINV not supported */
8272 if (!env
->tlb
->helper_tlbwr
)
8274 gen_helper_tlbwr(cpu_env
);
8278 if (!env
->tlb
->helper_tlbp
)
8280 gen_helper_tlbp(cpu_env
);
8284 if (!env
->tlb
->helper_tlbr
)
8286 gen_helper_tlbr(cpu_env
);
8288 case OPC_ERET
: /* OPC_ERETNC */
8289 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8290 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8293 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8294 if (ctx
->opcode
& (1 << bit_shift
)) {
8297 check_insn(ctx
, ISA_MIPS32R5
);
8298 gen_helper_eretnc(cpu_env
);
8302 check_insn(ctx
, ISA_MIPS2
);
8303 gen_helper_eret(cpu_env
);
8305 ctx
->bstate
= BS_EXCP
;
8310 check_insn(ctx
, ISA_MIPS32
);
8311 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8312 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8315 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8317 generate_exception_end(ctx
, EXCP_RI
);
8319 gen_helper_deret(cpu_env
);
8320 ctx
->bstate
= BS_EXCP
;
8325 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8326 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8327 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8330 /* If we get an exception, we want to restart at next instruction */
8332 save_cpu_state(ctx
, 1);
8334 gen_helper_wait(cpu_env
);
8335 ctx
->bstate
= BS_EXCP
;
8340 generate_exception_end(ctx
, EXCP_RI
);
8343 (void)opn
; /* avoid a compiler warning */
8345 #endif /* !CONFIG_USER_ONLY */
8347 /* CP1 Branches (before delay slot) */
8348 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8349 int32_t cc
, int32_t offset
)
8351 target_ulong btarget
;
8352 TCGv_i32 t0
= tcg_temp_new_i32();
8354 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8355 generate_exception_end(ctx
, EXCP_RI
);
8360 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8362 btarget
= ctx
->pc
+ 4 + offset
;
8366 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8367 tcg_gen_not_i32(t0
, t0
);
8368 tcg_gen_andi_i32(t0
, t0
, 1);
8369 tcg_gen_extu_i32_tl(bcond
, t0
);
8372 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8373 tcg_gen_not_i32(t0
, t0
);
8374 tcg_gen_andi_i32(t0
, t0
, 1);
8375 tcg_gen_extu_i32_tl(bcond
, t0
);
8378 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8379 tcg_gen_andi_i32(t0
, t0
, 1);
8380 tcg_gen_extu_i32_tl(bcond
, t0
);
8383 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8384 tcg_gen_andi_i32(t0
, t0
, 1);
8385 tcg_gen_extu_i32_tl(bcond
, t0
);
8387 ctx
->hflags
|= MIPS_HFLAG_BL
;
8391 TCGv_i32 t1
= tcg_temp_new_i32();
8392 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8393 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8394 tcg_gen_nand_i32(t0
, t0
, t1
);
8395 tcg_temp_free_i32(t1
);
8396 tcg_gen_andi_i32(t0
, t0
, 1);
8397 tcg_gen_extu_i32_tl(bcond
, t0
);
8402 TCGv_i32 t1
= tcg_temp_new_i32();
8403 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8404 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8405 tcg_gen_or_i32(t0
, t0
, t1
);
8406 tcg_temp_free_i32(t1
);
8407 tcg_gen_andi_i32(t0
, t0
, 1);
8408 tcg_gen_extu_i32_tl(bcond
, t0
);
8413 TCGv_i32 t1
= tcg_temp_new_i32();
8414 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8415 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8416 tcg_gen_and_i32(t0
, t0
, t1
);
8417 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8418 tcg_gen_and_i32(t0
, t0
, t1
);
8419 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8420 tcg_gen_nand_i32(t0
, t0
, t1
);
8421 tcg_temp_free_i32(t1
);
8422 tcg_gen_andi_i32(t0
, t0
, 1);
8423 tcg_gen_extu_i32_tl(bcond
, t0
);
8428 TCGv_i32 t1
= tcg_temp_new_i32();
8429 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8430 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8431 tcg_gen_or_i32(t0
, t0
, t1
);
8432 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8433 tcg_gen_or_i32(t0
, t0
, t1
);
8434 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8435 tcg_gen_or_i32(t0
, t0
, t1
);
8436 tcg_temp_free_i32(t1
);
8437 tcg_gen_andi_i32(t0
, t0
, 1);
8438 tcg_gen_extu_i32_tl(bcond
, t0
);
8441 ctx
->hflags
|= MIPS_HFLAG_BC
;
8444 MIPS_INVAL("cp1 cond branch");
8445 generate_exception_end(ctx
, EXCP_RI
);
8448 ctx
->btarget
= btarget
;
8449 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8451 tcg_temp_free_i32(t0
);
8454 /* R6 CP1 Branches */
8455 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8456 int32_t ft
, int32_t offset
,
8459 target_ulong btarget
;
8460 TCGv_i64 t0
= tcg_temp_new_i64();
8462 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8463 #ifdef MIPS_DEBUG_DISAS
8464 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8467 generate_exception_end(ctx
, EXCP_RI
);
8471 gen_load_fpr64(ctx
, t0
, ft
);
8472 tcg_gen_andi_i64(t0
, t0
, 1);
8474 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8478 tcg_gen_xori_i64(t0
, t0
, 1);
8479 ctx
->hflags
|= MIPS_HFLAG_BC
;
8482 /* t0 already set */
8483 ctx
->hflags
|= MIPS_HFLAG_BC
;
8486 MIPS_INVAL("cp1 cond branch");
8487 generate_exception_end(ctx
, EXCP_RI
);
8491 tcg_gen_trunc_i64_tl(bcond
, t0
);
8493 ctx
->btarget
= btarget
;
8495 switch (delayslot_size
) {
8497 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8500 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8505 tcg_temp_free_i64(t0
);
8508 /* Coprocessor 1 (FPU) */
8510 #define FOP(func, fmt) (((fmt) << 21) | (func))
8513 OPC_ADD_S
= FOP(0, FMT_S
),
8514 OPC_SUB_S
= FOP(1, FMT_S
),
8515 OPC_MUL_S
= FOP(2, FMT_S
),
8516 OPC_DIV_S
= FOP(3, FMT_S
),
8517 OPC_SQRT_S
= FOP(4, FMT_S
),
8518 OPC_ABS_S
= FOP(5, FMT_S
),
8519 OPC_MOV_S
= FOP(6, FMT_S
),
8520 OPC_NEG_S
= FOP(7, FMT_S
),
8521 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8522 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8523 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8524 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8525 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8526 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8527 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8528 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8529 OPC_SEL_S
= FOP(16, FMT_S
),
8530 OPC_MOVCF_S
= FOP(17, FMT_S
),
8531 OPC_MOVZ_S
= FOP(18, FMT_S
),
8532 OPC_MOVN_S
= FOP(19, FMT_S
),
8533 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8534 OPC_RECIP_S
= FOP(21, FMT_S
),
8535 OPC_RSQRT_S
= FOP(22, FMT_S
),
8536 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8537 OPC_MADDF_S
= FOP(24, FMT_S
),
8538 OPC_MSUBF_S
= FOP(25, FMT_S
),
8539 OPC_RINT_S
= FOP(26, FMT_S
),
8540 OPC_CLASS_S
= FOP(27, FMT_S
),
8541 OPC_MIN_S
= FOP(28, FMT_S
),
8542 OPC_RECIP2_S
= FOP(28, FMT_S
),
8543 OPC_MINA_S
= FOP(29, FMT_S
),
8544 OPC_RECIP1_S
= FOP(29, FMT_S
),
8545 OPC_MAX_S
= FOP(30, FMT_S
),
8546 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8547 OPC_MAXA_S
= FOP(31, FMT_S
),
8548 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8549 OPC_CVT_D_S
= FOP(33, FMT_S
),
8550 OPC_CVT_W_S
= FOP(36, FMT_S
),
8551 OPC_CVT_L_S
= FOP(37, FMT_S
),
8552 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8553 OPC_CMP_F_S
= FOP (48, FMT_S
),
8554 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8555 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8556 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8557 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8558 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8559 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8560 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8561 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8562 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8563 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8564 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8565 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8566 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8567 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8568 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8570 OPC_ADD_D
= FOP(0, FMT_D
),
8571 OPC_SUB_D
= FOP(1, FMT_D
),
8572 OPC_MUL_D
= FOP(2, FMT_D
),
8573 OPC_DIV_D
= FOP(3, FMT_D
),
8574 OPC_SQRT_D
= FOP(4, FMT_D
),
8575 OPC_ABS_D
= FOP(5, FMT_D
),
8576 OPC_MOV_D
= FOP(6, FMT_D
),
8577 OPC_NEG_D
= FOP(7, FMT_D
),
8578 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8579 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8580 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8581 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8582 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8583 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8584 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8585 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8586 OPC_SEL_D
= FOP(16, FMT_D
),
8587 OPC_MOVCF_D
= FOP(17, FMT_D
),
8588 OPC_MOVZ_D
= FOP(18, FMT_D
),
8589 OPC_MOVN_D
= FOP(19, FMT_D
),
8590 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8591 OPC_RECIP_D
= FOP(21, FMT_D
),
8592 OPC_RSQRT_D
= FOP(22, FMT_D
),
8593 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8594 OPC_MADDF_D
= FOP(24, FMT_D
),
8595 OPC_MSUBF_D
= FOP(25, FMT_D
),
8596 OPC_RINT_D
= FOP(26, FMT_D
),
8597 OPC_CLASS_D
= FOP(27, FMT_D
),
8598 OPC_MIN_D
= FOP(28, FMT_D
),
8599 OPC_RECIP2_D
= FOP(28, FMT_D
),
8600 OPC_MINA_D
= FOP(29, FMT_D
),
8601 OPC_RECIP1_D
= FOP(29, FMT_D
),
8602 OPC_MAX_D
= FOP(30, FMT_D
),
8603 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8604 OPC_MAXA_D
= FOP(31, FMT_D
),
8605 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8606 OPC_CVT_S_D
= FOP(32, FMT_D
),
8607 OPC_CVT_W_D
= FOP(36, FMT_D
),
8608 OPC_CVT_L_D
= FOP(37, FMT_D
),
8609 OPC_CMP_F_D
= FOP (48, FMT_D
),
8610 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8611 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8612 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8613 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8614 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8615 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8616 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8617 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8618 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8619 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8620 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8621 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8622 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8623 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8624 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8626 OPC_CVT_S_W
= FOP(32, FMT_W
),
8627 OPC_CVT_D_W
= FOP(33, FMT_W
),
8628 OPC_CVT_S_L
= FOP(32, FMT_L
),
8629 OPC_CVT_D_L
= FOP(33, FMT_L
),
8630 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8632 OPC_ADD_PS
= FOP(0, FMT_PS
),
8633 OPC_SUB_PS
= FOP(1, FMT_PS
),
8634 OPC_MUL_PS
= FOP(2, FMT_PS
),
8635 OPC_DIV_PS
= FOP(3, FMT_PS
),
8636 OPC_ABS_PS
= FOP(5, FMT_PS
),
8637 OPC_MOV_PS
= FOP(6, FMT_PS
),
8638 OPC_NEG_PS
= FOP(7, FMT_PS
),
8639 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8640 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8641 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8642 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8643 OPC_MULR_PS
= FOP(26, FMT_PS
),
8644 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8645 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8646 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8647 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8649 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8650 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8651 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8652 OPC_PLL_PS
= FOP(44, FMT_PS
),
8653 OPC_PLU_PS
= FOP(45, FMT_PS
),
8654 OPC_PUL_PS
= FOP(46, FMT_PS
),
8655 OPC_PUU_PS
= FOP(47, FMT_PS
),
8656 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8657 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8658 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8659 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8660 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8661 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8662 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8663 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8664 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8665 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8666 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8667 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8668 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8669 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8670 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8671 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8675 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8676 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8677 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8678 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8679 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8680 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8681 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8682 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8683 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8684 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8685 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8686 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8687 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8688 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8689 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8690 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8691 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8692 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8693 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8694 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8695 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8696 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8698 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8699 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8700 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8701 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8702 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8703 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8704 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8705 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8706 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8707 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8708 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8709 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8710 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8711 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8712 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8713 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8714 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8715 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8716 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8717 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8718 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8719 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8721 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8723 TCGv t0
= tcg_temp_new();
8728 TCGv_i32 fp0
= tcg_temp_new_i32();
8730 gen_load_fpr32(ctx
, fp0
, fs
);
8731 tcg_gen_ext_i32_tl(t0
, fp0
);
8732 tcg_temp_free_i32(fp0
);
8734 gen_store_gpr(t0
, rt
);
8737 gen_load_gpr(t0
, rt
);
8739 TCGv_i32 fp0
= tcg_temp_new_i32();
8741 tcg_gen_trunc_tl_i32(fp0
, t0
);
8742 gen_store_fpr32(ctx
, fp0
, fs
);
8743 tcg_temp_free_i32(fp0
);
8747 gen_helper_1e0i(cfc1
, t0
, fs
);
8748 gen_store_gpr(t0
, rt
);
8751 gen_load_gpr(t0
, rt
);
8752 save_cpu_state(ctx
, 0);
8754 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8756 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8757 tcg_temp_free_i32(fs_tmp
);
8759 /* Stop translation as we may have changed hflags */
8760 ctx
->bstate
= BS_STOP
;
8762 #if defined(TARGET_MIPS64)
8764 gen_load_fpr64(ctx
, t0
, fs
);
8765 gen_store_gpr(t0
, rt
);
8768 gen_load_gpr(t0
, rt
);
8769 gen_store_fpr64(ctx
, t0
, fs
);
8774 TCGv_i32 fp0
= tcg_temp_new_i32();
8776 gen_load_fpr32h(ctx
, fp0
, fs
);
8777 tcg_gen_ext_i32_tl(t0
, fp0
);
8778 tcg_temp_free_i32(fp0
);
8780 gen_store_gpr(t0
, rt
);
8783 gen_load_gpr(t0
, rt
);
8785 TCGv_i32 fp0
= tcg_temp_new_i32();
8787 tcg_gen_trunc_tl_i32(fp0
, t0
);
8788 gen_store_fpr32h(ctx
, fp0
, fs
);
8789 tcg_temp_free_i32(fp0
);
8793 MIPS_INVAL("cp1 move");
8794 generate_exception_end(ctx
, EXCP_RI
);
8802 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8818 l1
= gen_new_label();
8819 t0
= tcg_temp_new_i32();
8820 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8821 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8822 tcg_temp_free_i32(t0
);
8824 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8826 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8831 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8835 TCGv_i32 t0
= tcg_temp_new_i32();
8836 TCGLabel
*l1
= gen_new_label();
8843 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8844 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8845 gen_load_fpr32(ctx
, t0
, fs
);
8846 gen_store_fpr32(ctx
, t0
, fd
);
8848 tcg_temp_free_i32(t0
);
8851 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8854 TCGv_i32 t0
= tcg_temp_new_i32();
8856 TCGLabel
*l1
= gen_new_label();
8863 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8864 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8865 tcg_temp_free_i32(t0
);
8866 fp0
= tcg_temp_new_i64();
8867 gen_load_fpr64(ctx
, fp0
, fs
);
8868 gen_store_fpr64(ctx
, fp0
, fd
);
8869 tcg_temp_free_i64(fp0
);
8873 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8877 TCGv_i32 t0
= tcg_temp_new_i32();
8878 TCGLabel
*l1
= gen_new_label();
8879 TCGLabel
*l2
= gen_new_label();
8886 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8887 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8888 gen_load_fpr32(ctx
, t0
, fs
);
8889 gen_store_fpr32(ctx
, t0
, fd
);
8892 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8893 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8894 gen_load_fpr32h(ctx
, t0
, fs
);
8895 gen_store_fpr32h(ctx
, t0
, fd
);
8896 tcg_temp_free_i32(t0
);
8900 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8903 TCGv_i32 t1
= tcg_const_i32(0);
8904 TCGv_i32 fp0
= tcg_temp_new_i32();
8905 TCGv_i32 fp1
= tcg_temp_new_i32();
8906 TCGv_i32 fp2
= tcg_temp_new_i32();
8907 gen_load_fpr32(ctx
, fp0
, fd
);
8908 gen_load_fpr32(ctx
, fp1
, ft
);
8909 gen_load_fpr32(ctx
, fp2
, fs
);
8913 tcg_gen_andi_i32(fp0
, fp0
, 1);
8914 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8917 tcg_gen_andi_i32(fp1
, fp1
, 1);
8918 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8921 tcg_gen_andi_i32(fp1
, fp1
, 1);
8922 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8925 MIPS_INVAL("gen_sel_s");
8926 generate_exception_end(ctx
, EXCP_RI
);
8930 gen_store_fpr32(ctx
, fp0
, fd
);
8931 tcg_temp_free_i32(fp2
);
8932 tcg_temp_free_i32(fp1
);
8933 tcg_temp_free_i32(fp0
);
8934 tcg_temp_free_i32(t1
);
8937 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8940 TCGv_i64 t1
= tcg_const_i64(0);
8941 TCGv_i64 fp0
= tcg_temp_new_i64();
8942 TCGv_i64 fp1
= tcg_temp_new_i64();
8943 TCGv_i64 fp2
= tcg_temp_new_i64();
8944 gen_load_fpr64(ctx
, fp0
, fd
);
8945 gen_load_fpr64(ctx
, fp1
, ft
);
8946 gen_load_fpr64(ctx
, fp2
, fs
);
8950 tcg_gen_andi_i64(fp0
, fp0
, 1);
8951 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8954 tcg_gen_andi_i64(fp1
, fp1
, 1);
8955 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8958 tcg_gen_andi_i64(fp1
, fp1
, 1);
8959 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8962 MIPS_INVAL("gen_sel_d");
8963 generate_exception_end(ctx
, EXCP_RI
);
8967 gen_store_fpr64(ctx
, fp0
, fd
);
8968 tcg_temp_free_i64(fp2
);
8969 tcg_temp_free_i64(fp1
);
8970 tcg_temp_free_i64(fp0
);
8971 tcg_temp_free_i64(t1
);
8974 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8975 int ft
, int fs
, int fd
, int cc
)
8977 uint32_t func
= ctx
->opcode
& 0x3f;
8981 TCGv_i32 fp0
= tcg_temp_new_i32();
8982 TCGv_i32 fp1
= tcg_temp_new_i32();
8984 gen_load_fpr32(ctx
, fp0
, fs
);
8985 gen_load_fpr32(ctx
, fp1
, ft
);
8986 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8987 tcg_temp_free_i32(fp1
);
8988 gen_store_fpr32(ctx
, fp0
, fd
);
8989 tcg_temp_free_i32(fp0
);
8994 TCGv_i32 fp0
= tcg_temp_new_i32();
8995 TCGv_i32 fp1
= tcg_temp_new_i32();
8997 gen_load_fpr32(ctx
, fp0
, fs
);
8998 gen_load_fpr32(ctx
, fp1
, ft
);
8999 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9000 tcg_temp_free_i32(fp1
);
9001 gen_store_fpr32(ctx
, fp0
, fd
);
9002 tcg_temp_free_i32(fp0
);
9007 TCGv_i32 fp0
= tcg_temp_new_i32();
9008 TCGv_i32 fp1
= tcg_temp_new_i32();
9010 gen_load_fpr32(ctx
, fp0
, fs
);
9011 gen_load_fpr32(ctx
, fp1
, ft
);
9012 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9013 tcg_temp_free_i32(fp1
);
9014 gen_store_fpr32(ctx
, fp0
, fd
);
9015 tcg_temp_free_i32(fp0
);
9020 TCGv_i32 fp0
= tcg_temp_new_i32();
9021 TCGv_i32 fp1
= tcg_temp_new_i32();
9023 gen_load_fpr32(ctx
, fp0
, fs
);
9024 gen_load_fpr32(ctx
, fp1
, ft
);
9025 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9026 tcg_temp_free_i32(fp1
);
9027 gen_store_fpr32(ctx
, fp0
, fd
);
9028 tcg_temp_free_i32(fp0
);
9033 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 gen_load_fpr32(ctx
, fp0
, fs
);
9036 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9037 gen_store_fpr32(ctx
, fp0
, fd
);
9038 tcg_temp_free_i32(fp0
);
9043 TCGv_i32 fp0
= tcg_temp_new_i32();
9045 gen_load_fpr32(ctx
, fp0
, fs
);
9047 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9049 gen_helper_float_abs_s(fp0
, fp0
);
9051 gen_store_fpr32(ctx
, fp0
, fd
);
9052 tcg_temp_free_i32(fp0
);
9057 TCGv_i32 fp0
= tcg_temp_new_i32();
9059 gen_load_fpr32(ctx
, fp0
, fs
);
9060 gen_store_fpr32(ctx
, fp0
, fd
);
9061 tcg_temp_free_i32(fp0
);
9066 TCGv_i32 fp0
= tcg_temp_new_i32();
9068 gen_load_fpr32(ctx
, fp0
, fs
);
9070 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9072 gen_helper_float_chs_s(fp0
, fp0
);
9074 gen_store_fpr32(ctx
, fp0
, fd
);
9075 tcg_temp_free_i32(fp0
);
9079 check_cp1_64bitmode(ctx
);
9081 TCGv_i32 fp32
= tcg_temp_new_i32();
9082 TCGv_i64 fp64
= tcg_temp_new_i64();
9084 gen_load_fpr32(ctx
, fp32
, fs
);
9086 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9088 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9090 tcg_temp_free_i32(fp32
);
9091 gen_store_fpr64(ctx
, fp64
, fd
);
9092 tcg_temp_free_i64(fp64
);
9096 check_cp1_64bitmode(ctx
);
9098 TCGv_i32 fp32
= tcg_temp_new_i32();
9099 TCGv_i64 fp64
= tcg_temp_new_i64();
9101 gen_load_fpr32(ctx
, fp32
, fs
);
9103 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9105 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9107 tcg_temp_free_i32(fp32
);
9108 gen_store_fpr64(ctx
, fp64
, fd
);
9109 tcg_temp_free_i64(fp64
);
9113 check_cp1_64bitmode(ctx
);
9115 TCGv_i32 fp32
= tcg_temp_new_i32();
9116 TCGv_i64 fp64
= tcg_temp_new_i64();
9118 gen_load_fpr32(ctx
, fp32
, fs
);
9120 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9122 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9124 tcg_temp_free_i32(fp32
);
9125 gen_store_fpr64(ctx
, fp64
, fd
);
9126 tcg_temp_free_i64(fp64
);
9130 check_cp1_64bitmode(ctx
);
9132 TCGv_i32 fp32
= tcg_temp_new_i32();
9133 TCGv_i64 fp64
= tcg_temp_new_i64();
9135 gen_load_fpr32(ctx
, fp32
, fs
);
9137 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9139 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9141 tcg_temp_free_i32(fp32
);
9142 gen_store_fpr64(ctx
, fp64
, fd
);
9143 tcg_temp_free_i64(fp64
);
9148 TCGv_i32 fp0
= tcg_temp_new_i32();
9150 gen_load_fpr32(ctx
, fp0
, fs
);
9152 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9154 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9156 gen_store_fpr32(ctx
, fp0
, fd
);
9157 tcg_temp_free_i32(fp0
);
9162 TCGv_i32 fp0
= tcg_temp_new_i32();
9164 gen_load_fpr32(ctx
, fp0
, fs
);
9166 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9168 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9170 gen_store_fpr32(ctx
, fp0
, fd
);
9171 tcg_temp_free_i32(fp0
);
9176 TCGv_i32 fp0
= tcg_temp_new_i32();
9178 gen_load_fpr32(ctx
, fp0
, fs
);
9180 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9182 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9184 gen_store_fpr32(ctx
, fp0
, fd
);
9185 tcg_temp_free_i32(fp0
);
9190 TCGv_i32 fp0
= tcg_temp_new_i32();
9192 gen_load_fpr32(ctx
, fp0
, fs
);
9194 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9196 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9198 gen_store_fpr32(ctx
, fp0
, fd
);
9199 tcg_temp_free_i32(fp0
);
9203 check_insn(ctx
, ISA_MIPS32R6
);
9204 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9207 check_insn(ctx
, ISA_MIPS32R6
);
9208 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9211 check_insn(ctx
, ISA_MIPS32R6
);
9212 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9216 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9221 TCGLabel
*l1
= gen_new_label();
9225 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9227 fp0
= tcg_temp_new_i32();
9228 gen_load_fpr32(ctx
, fp0
, fs
);
9229 gen_store_fpr32(ctx
, fp0
, fd
);
9230 tcg_temp_free_i32(fp0
);
9235 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9237 TCGLabel
*l1
= gen_new_label();
9241 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9242 fp0
= tcg_temp_new_i32();
9243 gen_load_fpr32(ctx
, fp0
, fs
);
9244 gen_store_fpr32(ctx
, fp0
, fd
);
9245 tcg_temp_free_i32(fp0
);
9252 TCGv_i32 fp0
= tcg_temp_new_i32();
9254 gen_load_fpr32(ctx
, fp0
, fs
);
9255 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9256 gen_store_fpr32(ctx
, fp0
, fd
);
9257 tcg_temp_free_i32(fp0
);
9262 TCGv_i32 fp0
= tcg_temp_new_i32();
9264 gen_load_fpr32(ctx
, fp0
, fs
);
9265 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9266 gen_store_fpr32(ctx
, fp0
, fd
);
9267 tcg_temp_free_i32(fp0
);
9271 check_insn(ctx
, ISA_MIPS32R6
);
9273 TCGv_i32 fp0
= tcg_temp_new_i32();
9274 TCGv_i32 fp1
= tcg_temp_new_i32();
9275 TCGv_i32 fp2
= tcg_temp_new_i32();
9276 gen_load_fpr32(ctx
, fp0
, fs
);
9277 gen_load_fpr32(ctx
, fp1
, ft
);
9278 gen_load_fpr32(ctx
, fp2
, fd
);
9279 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9280 gen_store_fpr32(ctx
, fp2
, fd
);
9281 tcg_temp_free_i32(fp2
);
9282 tcg_temp_free_i32(fp1
);
9283 tcg_temp_free_i32(fp0
);
9287 check_insn(ctx
, ISA_MIPS32R6
);
9289 TCGv_i32 fp0
= tcg_temp_new_i32();
9290 TCGv_i32 fp1
= tcg_temp_new_i32();
9291 TCGv_i32 fp2
= tcg_temp_new_i32();
9292 gen_load_fpr32(ctx
, fp0
, fs
);
9293 gen_load_fpr32(ctx
, fp1
, ft
);
9294 gen_load_fpr32(ctx
, fp2
, fd
);
9295 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9296 gen_store_fpr32(ctx
, fp2
, fd
);
9297 tcg_temp_free_i32(fp2
);
9298 tcg_temp_free_i32(fp1
);
9299 tcg_temp_free_i32(fp0
);
9303 check_insn(ctx
, ISA_MIPS32R6
);
9305 TCGv_i32 fp0
= tcg_temp_new_i32();
9306 gen_load_fpr32(ctx
, fp0
, fs
);
9307 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9308 gen_store_fpr32(ctx
, fp0
, fd
);
9309 tcg_temp_free_i32(fp0
);
9313 check_insn(ctx
, ISA_MIPS32R6
);
9315 TCGv_i32 fp0
= tcg_temp_new_i32();
9316 gen_load_fpr32(ctx
, fp0
, fs
);
9317 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9318 gen_store_fpr32(ctx
, fp0
, fd
);
9319 tcg_temp_free_i32(fp0
);
9322 case OPC_MIN_S
: /* OPC_RECIP2_S */
9323 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9325 TCGv_i32 fp0
= tcg_temp_new_i32();
9326 TCGv_i32 fp1
= tcg_temp_new_i32();
9327 TCGv_i32 fp2
= tcg_temp_new_i32();
9328 gen_load_fpr32(ctx
, fp0
, fs
);
9329 gen_load_fpr32(ctx
, fp1
, ft
);
9330 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9331 gen_store_fpr32(ctx
, fp2
, fd
);
9332 tcg_temp_free_i32(fp2
);
9333 tcg_temp_free_i32(fp1
);
9334 tcg_temp_free_i32(fp0
);
9337 check_cp1_64bitmode(ctx
);
9339 TCGv_i32 fp0
= tcg_temp_new_i32();
9340 TCGv_i32 fp1
= tcg_temp_new_i32();
9342 gen_load_fpr32(ctx
, fp0
, fs
);
9343 gen_load_fpr32(ctx
, fp1
, ft
);
9344 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9345 tcg_temp_free_i32(fp1
);
9346 gen_store_fpr32(ctx
, fp0
, fd
);
9347 tcg_temp_free_i32(fp0
);
9351 case OPC_MINA_S
: /* OPC_RECIP1_S */
9352 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9354 TCGv_i32 fp0
= tcg_temp_new_i32();
9355 TCGv_i32 fp1
= tcg_temp_new_i32();
9356 TCGv_i32 fp2
= tcg_temp_new_i32();
9357 gen_load_fpr32(ctx
, fp0
, fs
);
9358 gen_load_fpr32(ctx
, fp1
, ft
);
9359 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9360 gen_store_fpr32(ctx
, fp2
, fd
);
9361 tcg_temp_free_i32(fp2
);
9362 tcg_temp_free_i32(fp1
);
9363 tcg_temp_free_i32(fp0
);
9366 check_cp1_64bitmode(ctx
);
9368 TCGv_i32 fp0
= tcg_temp_new_i32();
9370 gen_load_fpr32(ctx
, fp0
, fs
);
9371 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9372 gen_store_fpr32(ctx
, fp0
, fd
);
9373 tcg_temp_free_i32(fp0
);
9377 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9378 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9380 TCGv_i32 fp0
= tcg_temp_new_i32();
9381 TCGv_i32 fp1
= tcg_temp_new_i32();
9382 gen_load_fpr32(ctx
, fp0
, fs
);
9383 gen_load_fpr32(ctx
, fp1
, ft
);
9384 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9385 gen_store_fpr32(ctx
, fp1
, fd
);
9386 tcg_temp_free_i32(fp1
);
9387 tcg_temp_free_i32(fp0
);
9390 check_cp1_64bitmode(ctx
);
9392 TCGv_i32 fp0
= tcg_temp_new_i32();
9394 gen_load_fpr32(ctx
, fp0
, fs
);
9395 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9396 gen_store_fpr32(ctx
, fp0
, fd
);
9397 tcg_temp_free_i32(fp0
);
9401 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9402 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9404 TCGv_i32 fp0
= tcg_temp_new_i32();
9405 TCGv_i32 fp1
= tcg_temp_new_i32();
9406 gen_load_fpr32(ctx
, fp0
, fs
);
9407 gen_load_fpr32(ctx
, fp1
, ft
);
9408 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9409 gen_store_fpr32(ctx
, fp1
, fd
);
9410 tcg_temp_free_i32(fp1
);
9411 tcg_temp_free_i32(fp0
);
9414 check_cp1_64bitmode(ctx
);
9416 TCGv_i32 fp0
= tcg_temp_new_i32();
9417 TCGv_i32 fp1
= tcg_temp_new_i32();
9419 gen_load_fpr32(ctx
, fp0
, fs
);
9420 gen_load_fpr32(ctx
, fp1
, ft
);
9421 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9422 tcg_temp_free_i32(fp1
);
9423 gen_store_fpr32(ctx
, fp0
, fd
);
9424 tcg_temp_free_i32(fp0
);
9429 check_cp1_registers(ctx
, fd
);
9431 TCGv_i32 fp32
= tcg_temp_new_i32();
9432 TCGv_i64 fp64
= tcg_temp_new_i64();
9434 gen_load_fpr32(ctx
, fp32
, fs
);
9435 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9436 tcg_temp_free_i32(fp32
);
9437 gen_store_fpr64(ctx
, fp64
, fd
);
9438 tcg_temp_free_i64(fp64
);
9443 TCGv_i32 fp0
= tcg_temp_new_i32();
9445 gen_load_fpr32(ctx
, fp0
, fs
);
9447 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9449 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9451 gen_store_fpr32(ctx
, fp0
, fd
);
9452 tcg_temp_free_i32(fp0
);
9456 check_cp1_64bitmode(ctx
);
9458 TCGv_i32 fp32
= tcg_temp_new_i32();
9459 TCGv_i64 fp64
= tcg_temp_new_i64();
9461 gen_load_fpr32(ctx
, fp32
, fs
);
9463 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9465 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9467 tcg_temp_free_i32(fp32
);
9468 gen_store_fpr64(ctx
, fp64
, fd
);
9469 tcg_temp_free_i64(fp64
);
9475 TCGv_i64 fp64
= tcg_temp_new_i64();
9476 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9477 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9479 gen_load_fpr32(ctx
, fp32_0
, fs
);
9480 gen_load_fpr32(ctx
, fp32_1
, ft
);
9481 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9482 tcg_temp_free_i32(fp32_1
);
9483 tcg_temp_free_i32(fp32_0
);
9484 gen_store_fpr64(ctx
, fp64
, fd
);
9485 tcg_temp_free_i64(fp64
);
9497 case OPC_CMP_NGLE_S
:
9504 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9505 if (ctx
->opcode
& (1 << 6)) {
9506 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9508 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9512 check_cp1_registers(ctx
, fs
| ft
| fd
);
9514 TCGv_i64 fp0
= tcg_temp_new_i64();
9515 TCGv_i64 fp1
= tcg_temp_new_i64();
9517 gen_load_fpr64(ctx
, fp0
, fs
);
9518 gen_load_fpr64(ctx
, fp1
, ft
);
9519 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9520 tcg_temp_free_i64(fp1
);
9521 gen_store_fpr64(ctx
, fp0
, fd
);
9522 tcg_temp_free_i64(fp0
);
9526 check_cp1_registers(ctx
, fs
| ft
| fd
);
9528 TCGv_i64 fp0
= tcg_temp_new_i64();
9529 TCGv_i64 fp1
= tcg_temp_new_i64();
9531 gen_load_fpr64(ctx
, fp0
, fs
);
9532 gen_load_fpr64(ctx
, fp1
, ft
);
9533 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9534 tcg_temp_free_i64(fp1
);
9535 gen_store_fpr64(ctx
, fp0
, fd
);
9536 tcg_temp_free_i64(fp0
);
9540 check_cp1_registers(ctx
, fs
| ft
| fd
);
9542 TCGv_i64 fp0
= tcg_temp_new_i64();
9543 TCGv_i64 fp1
= tcg_temp_new_i64();
9545 gen_load_fpr64(ctx
, fp0
, fs
);
9546 gen_load_fpr64(ctx
, fp1
, ft
);
9547 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9548 tcg_temp_free_i64(fp1
);
9549 gen_store_fpr64(ctx
, fp0
, fd
);
9550 tcg_temp_free_i64(fp0
);
9554 check_cp1_registers(ctx
, fs
| ft
| fd
);
9556 TCGv_i64 fp0
= tcg_temp_new_i64();
9557 TCGv_i64 fp1
= tcg_temp_new_i64();
9559 gen_load_fpr64(ctx
, fp0
, fs
);
9560 gen_load_fpr64(ctx
, fp1
, ft
);
9561 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9562 tcg_temp_free_i64(fp1
);
9563 gen_store_fpr64(ctx
, fp0
, fd
);
9564 tcg_temp_free_i64(fp0
);
9568 check_cp1_registers(ctx
, fs
| fd
);
9570 TCGv_i64 fp0
= tcg_temp_new_i64();
9572 gen_load_fpr64(ctx
, fp0
, fs
);
9573 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9574 gen_store_fpr64(ctx
, fp0
, fd
);
9575 tcg_temp_free_i64(fp0
);
9579 check_cp1_registers(ctx
, fs
| fd
);
9581 TCGv_i64 fp0
= tcg_temp_new_i64();
9583 gen_load_fpr64(ctx
, fp0
, fs
);
9585 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9587 gen_helper_float_abs_d(fp0
, fp0
);
9589 gen_store_fpr64(ctx
, fp0
, fd
);
9590 tcg_temp_free_i64(fp0
);
9594 check_cp1_registers(ctx
, fs
| fd
);
9596 TCGv_i64 fp0
= tcg_temp_new_i64();
9598 gen_load_fpr64(ctx
, fp0
, fs
);
9599 gen_store_fpr64(ctx
, fp0
, fd
);
9600 tcg_temp_free_i64(fp0
);
9604 check_cp1_registers(ctx
, fs
| fd
);
9606 TCGv_i64 fp0
= tcg_temp_new_i64();
9608 gen_load_fpr64(ctx
, fp0
, fs
);
9610 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9612 gen_helper_float_chs_d(fp0
, fp0
);
9614 gen_store_fpr64(ctx
, fp0
, fd
);
9615 tcg_temp_free_i64(fp0
);
9619 check_cp1_64bitmode(ctx
);
9621 TCGv_i64 fp0
= tcg_temp_new_i64();
9623 gen_load_fpr64(ctx
, fp0
, fs
);
9625 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9627 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9629 gen_store_fpr64(ctx
, fp0
, fd
);
9630 tcg_temp_free_i64(fp0
);
9634 check_cp1_64bitmode(ctx
);
9636 TCGv_i64 fp0
= tcg_temp_new_i64();
9638 gen_load_fpr64(ctx
, fp0
, fs
);
9640 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9642 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9644 gen_store_fpr64(ctx
, fp0
, fd
);
9645 tcg_temp_free_i64(fp0
);
9649 check_cp1_64bitmode(ctx
);
9651 TCGv_i64 fp0
= tcg_temp_new_i64();
9653 gen_load_fpr64(ctx
, fp0
, fs
);
9655 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9657 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9659 gen_store_fpr64(ctx
, fp0
, fd
);
9660 tcg_temp_free_i64(fp0
);
9664 check_cp1_64bitmode(ctx
);
9666 TCGv_i64 fp0
= tcg_temp_new_i64();
9668 gen_load_fpr64(ctx
, fp0
, fs
);
9670 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9672 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9674 gen_store_fpr64(ctx
, fp0
, fd
);
9675 tcg_temp_free_i64(fp0
);
9679 check_cp1_registers(ctx
, fs
);
9681 TCGv_i32 fp32
= tcg_temp_new_i32();
9682 TCGv_i64 fp64
= tcg_temp_new_i64();
9684 gen_load_fpr64(ctx
, fp64
, fs
);
9686 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9688 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9690 tcg_temp_free_i64(fp64
);
9691 gen_store_fpr32(ctx
, fp32
, fd
);
9692 tcg_temp_free_i32(fp32
);
9696 check_cp1_registers(ctx
, fs
);
9698 TCGv_i32 fp32
= tcg_temp_new_i32();
9699 TCGv_i64 fp64
= tcg_temp_new_i64();
9701 gen_load_fpr64(ctx
, fp64
, fs
);
9703 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9705 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9707 tcg_temp_free_i64(fp64
);
9708 gen_store_fpr32(ctx
, fp32
, fd
);
9709 tcg_temp_free_i32(fp32
);
9713 check_cp1_registers(ctx
, fs
);
9715 TCGv_i32 fp32
= tcg_temp_new_i32();
9716 TCGv_i64 fp64
= tcg_temp_new_i64();
9718 gen_load_fpr64(ctx
, fp64
, fs
);
9720 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9722 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9724 tcg_temp_free_i64(fp64
);
9725 gen_store_fpr32(ctx
, fp32
, fd
);
9726 tcg_temp_free_i32(fp32
);
9730 check_cp1_registers(ctx
, fs
);
9732 TCGv_i32 fp32
= tcg_temp_new_i32();
9733 TCGv_i64 fp64
= tcg_temp_new_i64();
9735 gen_load_fpr64(ctx
, fp64
, fs
);
9737 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9739 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9741 tcg_temp_free_i64(fp64
);
9742 gen_store_fpr32(ctx
, fp32
, fd
);
9743 tcg_temp_free_i32(fp32
);
9747 check_insn(ctx
, ISA_MIPS32R6
);
9748 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9751 check_insn(ctx
, ISA_MIPS32R6
);
9752 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9755 check_insn(ctx
, ISA_MIPS32R6
);
9756 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9759 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9760 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9763 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9765 TCGLabel
*l1
= gen_new_label();
9769 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9771 fp0
= tcg_temp_new_i64();
9772 gen_load_fpr64(ctx
, fp0
, fs
);
9773 gen_store_fpr64(ctx
, fp0
, fd
);
9774 tcg_temp_free_i64(fp0
);
9779 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9781 TCGLabel
*l1
= gen_new_label();
9785 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9786 fp0
= tcg_temp_new_i64();
9787 gen_load_fpr64(ctx
, fp0
, fs
);
9788 gen_store_fpr64(ctx
, fp0
, fd
);
9789 tcg_temp_free_i64(fp0
);
9795 check_cp1_registers(ctx
, fs
| fd
);
9797 TCGv_i64 fp0
= tcg_temp_new_i64();
9799 gen_load_fpr64(ctx
, fp0
, fs
);
9800 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9801 gen_store_fpr64(ctx
, fp0
, fd
);
9802 tcg_temp_free_i64(fp0
);
9806 check_cp1_registers(ctx
, fs
| fd
);
9808 TCGv_i64 fp0
= tcg_temp_new_i64();
9810 gen_load_fpr64(ctx
, fp0
, fs
);
9811 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9812 gen_store_fpr64(ctx
, fp0
, fd
);
9813 tcg_temp_free_i64(fp0
);
9817 check_insn(ctx
, ISA_MIPS32R6
);
9819 TCGv_i64 fp0
= tcg_temp_new_i64();
9820 TCGv_i64 fp1
= tcg_temp_new_i64();
9821 TCGv_i64 fp2
= tcg_temp_new_i64();
9822 gen_load_fpr64(ctx
, fp0
, fs
);
9823 gen_load_fpr64(ctx
, fp1
, ft
);
9824 gen_load_fpr64(ctx
, fp2
, fd
);
9825 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9826 gen_store_fpr64(ctx
, fp2
, fd
);
9827 tcg_temp_free_i64(fp2
);
9828 tcg_temp_free_i64(fp1
);
9829 tcg_temp_free_i64(fp0
);
9833 check_insn(ctx
, ISA_MIPS32R6
);
9835 TCGv_i64 fp0
= tcg_temp_new_i64();
9836 TCGv_i64 fp1
= tcg_temp_new_i64();
9837 TCGv_i64 fp2
= tcg_temp_new_i64();
9838 gen_load_fpr64(ctx
, fp0
, fs
);
9839 gen_load_fpr64(ctx
, fp1
, ft
);
9840 gen_load_fpr64(ctx
, fp2
, fd
);
9841 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9842 gen_store_fpr64(ctx
, fp2
, fd
);
9843 tcg_temp_free_i64(fp2
);
9844 tcg_temp_free_i64(fp1
);
9845 tcg_temp_free_i64(fp0
);
9849 check_insn(ctx
, ISA_MIPS32R6
);
9851 TCGv_i64 fp0
= tcg_temp_new_i64();
9852 gen_load_fpr64(ctx
, fp0
, fs
);
9853 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9854 gen_store_fpr64(ctx
, fp0
, fd
);
9855 tcg_temp_free_i64(fp0
);
9859 check_insn(ctx
, ISA_MIPS32R6
);
9861 TCGv_i64 fp0
= tcg_temp_new_i64();
9862 gen_load_fpr64(ctx
, fp0
, fs
);
9863 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9864 gen_store_fpr64(ctx
, fp0
, fd
);
9865 tcg_temp_free_i64(fp0
);
9868 case OPC_MIN_D
: /* OPC_RECIP2_D */
9869 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9871 TCGv_i64 fp0
= tcg_temp_new_i64();
9872 TCGv_i64 fp1
= tcg_temp_new_i64();
9873 gen_load_fpr64(ctx
, fp0
, fs
);
9874 gen_load_fpr64(ctx
, fp1
, ft
);
9875 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9876 gen_store_fpr64(ctx
, fp1
, fd
);
9877 tcg_temp_free_i64(fp1
);
9878 tcg_temp_free_i64(fp0
);
9881 check_cp1_64bitmode(ctx
);
9883 TCGv_i64 fp0
= tcg_temp_new_i64();
9884 TCGv_i64 fp1
= tcg_temp_new_i64();
9886 gen_load_fpr64(ctx
, fp0
, fs
);
9887 gen_load_fpr64(ctx
, fp1
, ft
);
9888 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9889 tcg_temp_free_i64(fp1
);
9890 gen_store_fpr64(ctx
, fp0
, fd
);
9891 tcg_temp_free_i64(fp0
);
9895 case OPC_MINA_D
: /* OPC_RECIP1_D */
9896 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9898 TCGv_i64 fp0
= tcg_temp_new_i64();
9899 TCGv_i64 fp1
= tcg_temp_new_i64();
9900 gen_load_fpr64(ctx
, fp0
, fs
);
9901 gen_load_fpr64(ctx
, fp1
, ft
);
9902 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9903 gen_store_fpr64(ctx
, fp1
, fd
);
9904 tcg_temp_free_i64(fp1
);
9905 tcg_temp_free_i64(fp0
);
9908 check_cp1_64bitmode(ctx
);
9910 TCGv_i64 fp0
= tcg_temp_new_i64();
9912 gen_load_fpr64(ctx
, fp0
, fs
);
9913 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9914 gen_store_fpr64(ctx
, fp0
, fd
);
9915 tcg_temp_free_i64(fp0
);
9919 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9920 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9922 TCGv_i64 fp0
= tcg_temp_new_i64();
9923 TCGv_i64 fp1
= tcg_temp_new_i64();
9924 gen_load_fpr64(ctx
, fp0
, fs
);
9925 gen_load_fpr64(ctx
, fp1
, ft
);
9926 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9927 gen_store_fpr64(ctx
, fp1
, fd
);
9928 tcg_temp_free_i64(fp1
);
9929 tcg_temp_free_i64(fp0
);
9932 check_cp1_64bitmode(ctx
);
9934 TCGv_i64 fp0
= tcg_temp_new_i64();
9936 gen_load_fpr64(ctx
, fp0
, fs
);
9937 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9938 gen_store_fpr64(ctx
, fp0
, fd
);
9939 tcg_temp_free_i64(fp0
);
9943 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9944 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9946 TCGv_i64 fp0
= tcg_temp_new_i64();
9947 TCGv_i64 fp1
= tcg_temp_new_i64();
9948 gen_load_fpr64(ctx
, fp0
, fs
);
9949 gen_load_fpr64(ctx
, fp1
, ft
);
9950 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9951 gen_store_fpr64(ctx
, fp1
, fd
);
9952 tcg_temp_free_i64(fp1
);
9953 tcg_temp_free_i64(fp0
);
9956 check_cp1_64bitmode(ctx
);
9958 TCGv_i64 fp0
= tcg_temp_new_i64();
9959 TCGv_i64 fp1
= tcg_temp_new_i64();
9961 gen_load_fpr64(ctx
, fp0
, fs
);
9962 gen_load_fpr64(ctx
, fp1
, ft
);
9963 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9964 tcg_temp_free_i64(fp1
);
9965 gen_store_fpr64(ctx
, fp0
, fd
);
9966 tcg_temp_free_i64(fp0
);
9979 case OPC_CMP_NGLE_D
:
9986 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9987 if (ctx
->opcode
& (1 << 6)) {
9988 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9990 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9994 check_cp1_registers(ctx
, fs
);
9996 TCGv_i32 fp32
= tcg_temp_new_i32();
9997 TCGv_i64 fp64
= tcg_temp_new_i64();
9999 gen_load_fpr64(ctx
, fp64
, fs
);
10000 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10001 tcg_temp_free_i64(fp64
);
10002 gen_store_fpr32(ctx
, fp32
, fd
);
10003 tcg_temp_free_i32(fp32
);
10007 check_cp1_registers(ctx
, fs
);
10009 TCGv_i32 fp32
= tcg_temp_new_i32();
10010 TCGv_i64 fp64
= tcg_temp_new_i64();
10012 gen_load_fpr64(ctx
, fp64
, fs
);
10013 if (ctx
->nan2008
) {
10014 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10016 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10018 tcg_temp_free_i64(fp64
);
10019 gen_store_fpr32(ctx
, fp32
, fd
);
10020 tcg_temp_free_i32(fp32
);
10024 check_cp1_64bitmode(ctx
);
10026 TCGv_i64 fp0
= tcg_temp_new_i64();
10028 gen_load_fpr64(ctx
, fp0
, fs
);
10029 if (ctx
->nan2008
) {
10030 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10032 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10034 gen_store_fpr64(ctx
, fp0
, fd
);
10035 tcg_temp_free_i64(fp0
);
10040 TCGv_i32 fp0
= tcg_temp_new_i32();
10042 gen_load_fpr32(ctx
, fp0
, fs
);
10043 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10044 gen_store_fpr32(ctx
, fp0
, fd
);
10045 tcg_temp_free_i32(fp0
);
10049 check_cp1_registers(ctx
, fd
);
10051 TCGv_i32 fp32
= tcg_temp_new_i32();
10052 TCGv_i64 fp64
= tcg_temp_new_i64();
10054 gen_load_fpr32(ctx
, fp32
, fs
);
10055 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10056 tcg_temp_free_i32(fp32
);
10057 gen_store_fpr64(ctx
, fp64
, fd
);
10058 tcg_temp_free_i64(fp64
);
10062 check_cp1_64bitmode(ctx
);
10064 TCGv_i32 fp32
= tcg_temp_new_i32();
10065 TCGv_i64 fp64
= tcg_temp_new_i64();
10067 gen_load_fpr64(ctx
, fp64
, fs
);
10068 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10069 tcg_temp_free_i64(fp64
);
10070 gen_store_fpr32(ctx
, fp32
, fd
);
10071 tcg_temp_free_i32(fp32
);
10075 check_cp1_64bitmode(ctx
);
10077 TCGv_i64 fp0
= tcg_temp_new_i64();
10079 gen_load_fpr64(ctx
, fp0
, fs
);
10080 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10081 gen_store_fpr64(ctx
, fp0
, fd
);
10082 tcg_temp_free_i64(fp0
);
10085 case OPC_CVT_PS_PW
:
10088 TCGv_i64 fp0
= tcg_temp_new_i64();
10090 gen_load_fpr64(ctx
, fp0
, fs
);
10091 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10092 gen_store_fpr64(ctx
, fp0
, fd
);
10093 tcg_temp_free_i64(fp0
);
10099 TCGv_i64 fp0
= tcg_temp_new_i64();
10100 TCGv_i64 fp1
= tcg_temp_new_i64();
10102 gen_load_fpr64(ctx
, fp0
, fs
);
10103 gen_load_fpr64(ctx
, fp1
, ft
);
10104 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10105 tcg_temp_free_i64(fp1
);
10106 gen_store_fpr64(ctx
, fp0
, fd
);
10107 tcg_temp_free_i64(fp0
);
10113 TCGv_i64 fp0
= tcg_temp_new_i64();
10114 TCGv_i64 fp1
= tcg_temp_new_i64();
10116 gen_load_fpr64(ctx
, fp0
, fs
);
10117 gen_load_fpr64(ctx
, fp1
, ft
);
10118 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10119 tcg_temp_free_i64(fp1
);
10120 gen_store_fpr64(ctx
, fp0
, fd
);
10121 tcg_temp_free_i64(fp0
);
10127 TCGv_i64 fp0
= tcg_temp_new_i64();
10128 TCGv_i64 fp1
= tcg_temp_new_i64();
10130 gen_load_fpr64(ctx
, fp0
, fs
);
10131 gen_load_fpr64(ctx
, fp1
, ft
);
10132 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10133 tcg_temp_free_i64(fp1
);
10134 gen_store_fpr64(ctx
, fp0
, fd
);
10135 tcg_temp_free_i64(fp0
);
10141 TCGv_i64 fp0
= tcg_temp_new_i64();
10143 gen_load_fpr64(ctx
, fp0
, fs
);
10144 gen_helper_float_abs_ps(fp0
, fp0
);
10145 gen_store_fpr64(ctx
, fp0
, fd
);
10146 tcg_temp_free_i64(fp0
);
10152 TCGv_i64 fp0
= tcg_temp_new_i64();
10154 gen_load_fpr64(ctx
, fp0
, fs
);
10155 gen_store_fpr64(ctx
, fp0
, fd
);
10156 tcg_temp_free_i64(fp0
);
10162 TCGv_i64 fp0
= tcg_temp_new_i64();
10164 gen_load_fpr64(ctx
, fp0
, fs
);
10165 gen_helper_float_chs_ps(fp0
, fp0
);
10166 gen_store_fpr64(ctx
, fp0
, fd
);
10167 tcg_temp_free_i64(fp0
);
10172 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10177 TCGLabel
*l1
= gen_new_label();
10181 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10182 fp0
= tcg_temp_new_i64();
10183 gen_load_fpr64(ctx
, fp0
, fs
);
10184 gen_store_fpr64(ctx
, fp0
, fd
);
10185 tcg_temp_free_i64(fp0
);
10192 TCGLabel
*l1
= gen_new_label();
10196 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10197 fp0
= tcg_temp_new_i64();
10198 gen_load_fpr64(ctx
, fp0
, fs
);
10199 gen_store_fpr64(ctx
, fp0
, fd
);
10200 tcg_temp_free_i64(fp0
);
10208 TCGv_i64 fp0
= tcg_temp_new_i64();
10209 TCGv_i64 fp1
= tcg_temp_new_i64();
10211 gen_load_fpr64(ctx
, fp0
, ft
);
10212 gen_load_fpr64(ctx
, fp1
, fs
);
10213 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10214 tcg_temp_free_i64(fp1
);
10215 gen_store_fpr64(ctx
, fp0
, fd
);
10216 tcg_temp_free_i64(fp0
);
10222 TCGv_i64 fp0
= tcg_temp_new_i64();
10223 TCGv_i64 fp1
= tcg_temp_new_i64();
10225 gen_load_fpr64(ctx
, fp0
, ft
);
10226 gen_load_fpr64(ctx
, fp1
, fs
);
10227 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10228 tcg_temp_free_i64(fp1
);
10229 gen_store_fpr64(ctx
, fp0
, fd
);
10230 tcg_temp_free_i64(fp0
);
10233 case OPC_RECIP2_PS
:
10236 TCGv_i64 fp0
= tcg_temp_new_i64();
10237 TCGv_i64 fp1
= tcg_temp_new_i64();
10239 gen_load_fpr64(ctx
, fp0
, fs
);
10240 gen_load_fpr64(ctx
, fp1
, ft
);
10241 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10242 tcg_temp_free_i64(fp1
);
10243 gen_store_fpr64(ctx
, fp0
, fd
);
10244 tcg_temp_free_i64(fp0
);
10247 case OPC_RECIP1_PS
:
10250 TCGv_i64 fp0
= tcg_temp_new_i64();
10252 gen_load_fpr64(ctx
, fp0
, fs
);
10253 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10254 gen_store_fpr64(ctx
, fp0
, fd
);
10255 tcg_temp_free_i64(fp0
);
10258 case OPC_RSQRT1_PS
:
10261 TCGv_i64 fp0
= tcg_temp_new_i64();
10263 gen_load_fpr64(ctx
, fp0
, fs
);
10264 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10265 gen_store_fpr64(ctx
, fp0
, fd
);
10266 tcg_temp_free_i64(fp0
);
10269 case OPC_RSQRT2_PS
:
10272 TCGv_i64 fp0
= tcg_temp_new_i64();
10273 TCGv_i64 fp1
= tcg_temp_new_i64();
10275 gen_load_fpr64(ctx
, fp0
, fs
);
10276 gen_load_fpr64(ctx
, fp1
, ft
);
10277 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10278 tcg_temp_free_i64(fp1
);
10279 gen_store_fpr64(ctx
, fp0
, fd
);
10280 tcg_temp_free_i64(fp0
);
10284 check_cp1_64bitmode(ctx
);
10286 TCGv_i32 fp0
= tcg_temp_new_i32();
10288 gen_load_fpr32h(ctx
, fp0
, fs
);
10289 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10290 gen_store_fpr32(ctx
, fp0
, fd
);
10291 tcg_temp_free_i32(fp0
);
10294 case OPC_CVT_PW_PS
:
10297 TCGv_i64 fp0
= tcg_temp_new_i64();
10299 gen_load_fpr64(ctx
, fp0
, fs
);
10300 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10301 gen_store_fpr64(ctx
, fp0
, fd
);
10302 tcg_temp_free_i64(fp0
);
10306 check_cp1_64bitmode(ctx
);
10308 TCGv_i32 fp0
= tcg_temp_new_i32();
10310 gen_load_fpr32(ctx
, fp0
, fs
);
10311 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10312 gen_store_fpr32(ctx
, fp0
, fd
);
10313 tcg_temp_free_i32(fp0
);
10319 TCGv_i32 fp0
= tcg_temp_new_i32();
10320 TCGv_i32 fp1
= tcg_temp_new_i32();
10322 gen_load_fpr32(ctx
, fp0
, fs
);
10323 gen_load_fpr32(ctx
, fp1
, ft
);
10324 gen_store_fpr32h(ctx
, fp0
, fd
);
10325 gen_store_fpr32(ctx
, fp1
, fd
);
10326 tcg_temp_free_i32(fp0
);
10327 tcg_temp_free_i32(fp1
);
10333 TCGv_i32 fp0
= tcg_temp_new_i32();
10334 TCGv_i32 fp1
= tcg_temp_new_i32();
10336 gen_load_fpr32(ctx
, fp0
, fs
);
10337 gen_load_fpr32h(ctx
, fp1
, ft
);
10338 gen_store_fpr32(ctx
, fp1
, fd
);
10339 gen_store_fpr32h(ctx
, fp0
, fd
);
10340 tcg_temp_free_i32(fp0
);
10341 tcg_temp_free_i32(fp1
);
10347 TCGv_i32 fp0
= tcg_temp_new_i32();
10348 TCGv_i32 fp1
= tcg_temp_new_i32();
10350 gen_load_fpr32h(ctx
, fp0
, fs
);
10351 gen_load_fpr32(ctx
, fp1
, ft
);
10352 gen_store_fpr32(ctx
, fp1
, fd
);
10353 gen_store_fpr32h(ctx
, fp0
, fd
);
10354 tcg_temp_free_i32(fp0
);
10355 tcg_temp_free_i32(fp1
);
10361 TCGv_i32 fp0
= tcg_temp_new_i32();
10362 TCGv_i32 fp1
= tcg_temp_new_i32();
10364 gen_load_fpr32h(ctx
, fp0
, fs
);
10365 gen_load_fpr32h(ctx
, fp1
, ft
);
10366 gen_store_fpr32(ctx
, fp1
, fd
);
10367 gen_store_fpr32h(ctx
, fp0
, fd
);
10368 tcg_temp_free_i32(fp0
);
10369 tcg_temp_free_i32(fp1
);
10373 case OPC_CMP_UN_PS
:
10374 case OPC_CMP_EQ_PS
:
10375 case OPC_CMP_UEQ_PS
:
10376 case OPC_CMP_OLT_PS
:
10377 case OPC_CMP_ULT_PS
:
10378 case OPC_CMP_OLE_PS
:
10379 case OPC_CMP_ULE_PS
:
10380 case OPC_CMP_SF_PS
:
10381 case OPC_CMP_NGLE_PS
:
10382 case OPC_CMP_SEQ_PS
:
10383 case OPC_CMP_NGL_PS
:
10384 case OPC_CMP_LT_PS
:
10385 case OPC_CMP_NGE_PS
:
10386 case OPC_CMP_LE_PS
:
10387 case OPC_CMP_NGT_PS
:
10388 if (ctx
->opcode
& (1 << 6)) {
10389 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10391 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10395 MIPS_INVAL("farith");
10396 generate_exception_end(ctx
, EXCP_RI
);
10401 /* Coprocessor 3 (FPU) */
10402 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10403 int fd
, int fs
, int base
, int index
)
10405 TCGv t0
= tcg_temp_new();
10408 gen_load_gpr(t0
, index
);
10409 } else if (index
== 0) {
10410 gen_load_gpr(t0
, base
);
10412 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10414 /* Don't do NOP if destination is zero: we must perform the actual
10420 TCGv_i32 fp0
= tcg_temp_new_i32();
10422 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10423 tcg_gen_trunc_tl_i32(fp0
, t0
);
10424 gen_store_fpr32(ctx
, fp0
, fd
);
10425 tcg_temp_free_i32(fp0
);
10430 check_cp1_registers(ctx
, fd
);
10432 TCGv_i64 fp0
= tcg_temp_new_i64();
10433 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10434 gen_store_fpr64(ctx
, fp0
, fd
);
10435 tcg_temp_free_i64(fp0
);
10439 check_cp1_64bitmode(ctx
);
10440 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10442 TCGv_i64 fp0
= tcg_temp_new_i64();
10444 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10445 gen_store_fpr64(ctx
, fp0
, fd
);
10446 tcg_temp_free_i64(fp0
);
10452 TCGv_i32 fp0
= tcg_temp_new_i32();
10453 gen_load_fpr32(ctx
, fp0
, fs
);
10454 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10455 tcg_temp_free_i32(fp0
);
10460 check_cp1_registers(ctx
, fs
);
10462 TCGv_i64 fp0
= tcg_temp_new_i64();
10463 gen_load_fpr64(ctx
, fp0
, fs
);
10464 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10465 tcg_temp_free_i64(fp0
);
10469 check_cp1_64bitmode(ctx
);
10470 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10472 TCGv_i64 fp0
= tcg_temp_new_i64();
10473 gen_load_fpr64(ctx
, fp0
, fs
);
10474 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10475 tcg_temp_free_i64(fp0
);
10482 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10483 int fd
, int fr
, int fs
, int ft
)
10489 TCGv t0
= tcg_temp_local_new();
10490 TCGv_i32 fp
= tcg_temp_new_i32();
10491 TCGv_i32 fph
= tcg_temp_new_i32();
10492 TCGLabel
*l1
= gen_new_label();
10493 TCGLabel
*l2
= gen_new_label();
10495 gen_load_gpr(t0
, fr
);
10496 tcg_gen_andi_tl(t0
, t0
, 0x7);
10498 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10499 gen_load_fpr32(ctx
, fp
, fs
);
10500 gen_load_fpr32h(ctx
, fph
, fs
);
10501 gen_store_fpr32(ctx
, fp
, fd
);
10502 gen_store_fpr32h(ctx
, fph
, fd
);
10505 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10507 #ifdef TARGET_WORDS_BIGENDIAN
10508 gen_load_fpr32(ctx
, fp
, fs
);
10509 gen_load_fpr32h(ctx
, fph
, ft
);
10510 gen_store_fpr32h(ctx
, fp
, fd
);
10511 gen_store_fpr32(ctx
, fph
, fd
);
10513 gen_load_fpr32h(ctx
, fph
, fs
);
10514 gen_load_fpr32(ctx
, fp
, ft
);
10515 gen_store_fpr32(ctx
, fph
, fd
);
10516 gen_store_fpr32h(ctx
, fp
, fd
);
10519 tcg_temp_free_i32(fp
);
10520 tcg_temp_free_i32(fph
);
10526 TCGv_i32 fp0
= tcg_temp_new_i32();
10527 TCGv_i32 fp1
= tcg_temp_new_i32();
10528 TCGv_i32 fp2
= tcg_temp_new_i32();
10530 gen_load_fpr32(ctx
, fp0
, fs
);
10531 gen_load_fpr32(ctx
, fp1
, ft
);
10532 gen_load_fpr32(ctx
, fp2
, fr
);
10533 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10534 tcg_temp_free_i32(fp0
);
10535 tcg_temp_free_i32(fp1
);
10536 gen_store_fpr32(ctx
, fp2
, fd
);
10537 tcg_temp_free_i32(fp2
);
10542 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10544 TCGv_i64 fp0
= tcg_temp_new_i64();
10545 TCGv_i64 fp1
= tcg_temp_new_i64();
10546 TCGv_i64 fp2
= tcg_temp_new_i64();
10548 gen_load_fpr64(ctx
, fp0
, fs
);
10549 gen_load_fpr64(ctx
, fp1
, ft
);
10550 gen_load_fpr64(ctx
, fp2
, fr
);
10551 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10552 tcg_temp_free_i64(fp0
);
10553 tcg_temp_free_i64(fp1
);
10554 gen_store_fpr64(ctx
, fp2
, fd
);
10555 tcg_temp_free_i64(fp2
);
10561 TCGv_i64 fp0
= tcg_temp_new_i64();
10562 TCGv_i64 fp1
= tcg_temp_new_i64();
10563 TCGv_i64 fp2
= tcg_temp_new_i64();
10565 gen_load_fpr64(ctx
, fp0
, fs
);
10566 gen_load_fpr64(ctx
, fp1
, ft
);
10567 gen_load_fpr64(ctx
, fp2
, fr
);
10568 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10569 tcg_temp_free_i64(fp0
);
10570 tcg_temp_free_i64(fp1
);
10571 gen_store_fpr64(ctx
, fp2
, fd
);
10572 tcg_temp_free_i64(fp2
);
10578 TCGv_i32 fp0
= tcg_temp_new_i32();
10579 TCGv_i32 fp1
= tcg_temp_new_i32();
10580 TCGv_i32 fp2
= tcg_temp_new_i32();
10582 gen_load_fpr32(ctx
, fp0
, fs
);
10583 gen_load_fpr32(ctx
, fp1
, ft
);
10584 gen_load_fpr32(ctx
, fp2
, fr
);
10585 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10586 tcg_temp_free_i32(fp0
);
10587 tcg_temp_free_i32(fp1
);
10588 gen_store_fpr32(ctx
, fp2
, fd
);
10589 tcg_temp_free_i32(fp2
);
10594 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10596 TCGv_i64 fp0
= tcg_temp_new_i64();
10597 TCGv_i64 fp1
= tcg_temp_new_i64();
10598 TCGv_i64 fp2
= tcg_temp_new_i64();
10600 gen_load_fpr64(ctx
, fp0
, fs
);
10601 gen_load_fpr64(ctx
, fp1
, ft
);
10602 gen_load_fpr64(ctx
, fp2
, fr
);
10603 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10604 tcg_temp_free_i64(fp0
);
10605 tcg_temp_free_i64(fp1
);
10606 gen_store_fpr64(ctx
, fp2
, fd
);
10607 tcg_temp_free_i64(fp2
);
10613 TCGv_i64 fp0
= tcg_temp_new_i64();
10614 TCGv_i64 fp1
= tcg_temp_new_i64();
10615 TCGv_i64 fp2
= tcg_temp_new_i64();
10617 gen_load_fpr64(ctx
, fp0
, fs
);
10618 gen_load_fpr64(ctx
, fp1
, ft
);
10619 gen_load_fpr64(ctx
, fp2
, fr
);
10620 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10621 tcg_temp_free_i64(fp0
);
10622 tcg_temp_free_i64(fp1
);
10623 gen_store_fpr64(ctx
, fp2
, fd
);
10624 tcg_temp_free_i64(fp2
);
10630 TCGv_i32 fp0
= tcg_temp_new_i32();
10631 TCGv_i32 fp1
= tcg_temp_new_i32();
10632 TCGv_i32 fp2
= tcg_temp_new_i32();
10634 gen_load_fpr32(ctx
, fp0
, fs
);
10635 gen_load_fpr32(ctx
, fp1
, ft
);
10636 gen_load_fpr32(ctx
, fp2
, fr
);
10637 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10638 tcg_temp_free_i32(fp0
);
10639 tcg_temp_free_i32(fp1
);
10640 gen_store_fpr32(ctx
, fp2
, fd
);
10641 tcg_temp_free_i32(fp2
);
10646 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10648 TCGv_i64 fp0
= tcg_temp_new_i64();
10649 TCGv_i64 fp1
= tcg_temp_new_i64();
10650 TCGv_i64 fp2
= tcg_temp_new_i64();
10652 gen_load_fpr64(ctx
, fp0
, fs
);
10653 gen_load_fpr64(ctx
, fp1
, ft
);
10654 gen_load_fpr64(ctx
, fp2
, fr
);
10655 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10656 tcg_temp_free_i64(fp0
);
10657 tcg_temp_free_i64(fp1
);
10658 gen_store_fpr64(ctx
, fp2
, fd
);
10659 tcg_temp_free_i64(fp2
);
10665 TCGv_i64 fp0
= tcg_temp_new_i64();
10666 TCGv_i64 fp1
= tcg_temp_new_i64();
10667 TCGv_i64 fp2
= tcg_temp_new_i64();
10669 gen_load_fpr64(ctx
, fp0
, fs
);
10670 gen_load_fpr64(ctx
, fp1
, ft
);
10671 gen_load_fpr64(ctx
, fp2
, fr
);
10672 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10673 tcg_temp_free_i64(fp0
);
10674 tcg_temp_free_i64(fp1
);
10675 gen_store_fpr64(ctx
, fp2
, fd
);
10676 tcg_temp_free_i64(fp2
);
10682 TCGv_i32 fp0
= tcg_temp_new_i32();
10683 TCGv_i32 fp1
= tcg_temp_new_i32();
10684 TCGv_i32 fp2
= tcg_temp_new_i32();
10686 gen_load_fpr32(ctx
, fp0
, fs
);
10687 gen_load_fpr32(ctx
, fp1
, ft
);
10688 gen_load_fpr32(ctx
, fp2
, fr
);
10689 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10690 tcg_temp_free_i32(fp0
);
10691 tcg_temp_free_i32(fp1
);
10692 gen_store_fpr32(ctx
, fp2
, fd
);
10693 tcg_temp_free_i32(fp2
);
10698 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10700 TCGv_i64 fp0
= tcg_temp_new_i64();
10701 TCGv_i64 fp1
= tcg_temp_new_i64();
10702 TCGv_i64 fp2
= tcg_temp_new_i64();
10704 gen_load_fpr64(ctx
, fp0
, fs
);
10705 gen_load_fpr64(ctx
, fp1
, ft
);
10706 gen_load_fpr64(ctx
, fp2
, fr
);
10707 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10708 tcg_temp_free_i64(fp0
);
10709 tcg_temp_free_i64(fp1
);
10710 gen_store_fpr64(ctx
, fp2
, fd
);
10711 tcg_temp_free_i64(fp2
);
10717 TCGv_i64 fp0
= tcg_temp_new_i64();
10718 TCGv_i64 fp1
= tcg_temp_new_i64();
10719 TCGv_i64 fp2
= tcg_temp_new_i64();
10721 gen_load_fpr64(ctx
, fp0
, fs
);
10722 gen_load_fpr64(ctx
, fp1
, ft
);
10723 gen_load_fpr64(ctx
, fp2
, fr
);
10724 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10725 tcg_temp_free_i64(fp0
);
10726 tcg_temp_free_i64(fp1
);
10727 gen_store_fpr64(ctx
, fp2
, fd
);
10728 tcg_temp_free_i64(fp2
);
10732 MIPS_INVAL("flt3_arith");
10733 generate_exception_end(ctx
, EXCP_RI
);
10738 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10742 #if !defined(CONFIG_USER_ONLY)
10743 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10744 Therefore only check the ISA in system mode. */
10745 check_insn(ctx
, ISA_MIPS32R2
);
10747 t0
= tcg_temp_new();
10751 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10752 gen_store_gpr(t0
, rt
);
10755 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10756 gen_store_gpr(t0
, rt
);
10759 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
10762 gen_helper_rdhwr_cc(t0
, cpu_env
);
10763 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
10766 gen_store_gpr(t0
, rt
);
10767 /* Break the TB to be able to take timer interrupts immediately
10768 after reading count. BS_STOP isn't sufficient, we need to ensure
10769 we break completely out of translated code. */
10770 gen_save_pc(ctx
->pc
+ 4);
10771 ctx
->bstate
= BS_EXCP
;
10774 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10775 gen_store_gpr(t0
, rt
);
10778 check_insn(ctx
, ISA_MIPS32R6
);
10780 /* Performance counter registers are not implemented other than
10781 * control register 0.
10783 generate_exception(ctx
, EXCP_RI
);
10785 gen_helper_rdhwr_performance(t0
, cpu_env
);
10786 gen_store_gpr(t0
, rt
);
10789 check_insn(ctx
, ISA_MIPS32R6
);
10790 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10791 gen_store_gpr(t0
, rt
);
10794 #if defined(CONFIG_USER_ONLY)
10795 tcg_gen_ld_tl(t0
, cpu_env
,
10796 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10797 gen_store_gpr(t0
, rt
);
10800 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10801 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10802 tcg_gen_ld_tl(t0
, cpu_env
,
10803 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10804 gen_store_gpr(t0
, rt
);
10806 generate_exception_end(ctx
, EXCP_RI
);
10810 default: /* Invalid */
10811 MIPS_INVAL("rdhwr");
10812 generate_exception_end(ctx
, EXCP_RI
);
10818 static inline void clear_branch_hflags(DisasContext
*ctx
)
10820 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10821 if (ctx
->bstate
== BS_NONE
) {
10822 save_cpu_state(ctx
, 0);
10824 /* it is not safe to save ctx->hflags as hflags may be changed
10825 in execution time by the instruction in delay / forbidden slot. */
10826 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10830 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10832 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10833 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10834 /* Branches completion */
10835 clear_branch_hflags(ctx
);
10836 ctx
->bstate
= BS_BRANCH
;
10837 /* FIXME: Need to clear can_do_io. */
10838 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10839 case MIPS_HFLAG_FBNSLOT
:
10840 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10843 /* unconditional branch */
10844 if (proc_hflags
& MIPS_HFLAG_BX
) {
10845 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10847 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10849 case MIPS_HFLAG_BL
:
10850 /* blikely taken case */
10851 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10853 case MIPS_HFLAG_BC
:
10854 /* Conditional branch */
10856 TCGLabel
*l1
= gen_new_label();
10858 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10859 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10861 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10864 case MIPS_HFLAG_BR
:
10865 /* unconditional branch to register */
10866 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10867 TCGv t0
= tcg_temp_new();
10868 TCGv_i32 t1
= tcg_temp_new_i32();
10870 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10871 tcg_gen_trunc_tl_i32(t1
, t0
);
10873 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10874 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10875 tcg_gen_or_i32(hflags
, hflags
, t1
);
10876 tcg_temp_free_i32(t1
);
10878 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10880 tcg_gen_mov_tl(cpu_PC
, btarget
);
10882 if (ctx
->singlestep_enabled
) {
10883 save_cpu_state(ctx
, 0);
10884 gen_helper_raise_exception_debug(cpu_env
);
10886 tcg_gen_lookup_and_goto_ptr();
10889 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10895 /* Compact Branches */
10896 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10897 int rs
, int rt
, int32_t offset
)
10899 int bcond_compute
= 0;
10900 TCGv t0
= tcg_temp_new();
10901 TCGv t1
= tcg_temp_new();
10902 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10904 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10905 #ifdef MIPS_DEBUG_DISAS
10906 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10909 generate_exception_end(ctx
, EXCP_RI
);
10913 /* Load needed operands and calculate btarget */
10915 /* compact branch */
10916 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10917 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10918 gen_load_gpr(t0
, rs
);
10919 gen_load_gpr(t1
, rt
);
10921 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10922 if (rs
<= rt
&& rs
== 0) {
10923 /* OPC_BEQZALC, OPC_BNEZALC */
10924 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10927 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10928 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10929 gen_load_gpr(t0
, rs
);
10930 gen_load_gpr(t1
, rt
);
10932 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10934 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10935 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10936 if (rs
== 0 || rs
== rt
) {
10937 /* OPC_BLEZALC, OPC_BGEZALC */
10938 /* OPC_BGTZALC, OPC_BLTZALC */
10939 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10941 gen_load_gpr(t0
, rs
);
10942 gen_load_gpr(t1
, rt
);
10944 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10948 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10953 /* OPC_BEQZC, OPC_BNEZC */
10954 gen_load_gpr(t0
, rs
);
10956 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10958 /* OPC_JIC, OPC_JIALC */
10959 TCGv tbase
= tcg_temp_new();
10960 TCGv toffset
= tcg_temp_new();
10962 gen_load_gpr(tbase
, rt
);
10963 tcg_gen_movi_tl(toffset
, offset
);
10964 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10965 tcg_temp_free(tbase
);
10966 tcg_temp_free(toffset
);
10970 MIPS_INVAL("Compact branch/jump");
10971 generate_exception_end(ctx
, EXCP_RI
);
10975 if (bcond_compute
== 0) {
10976 /* Uncoditional compact branch */
10979 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10982 ctx
->hflags
|= MIPS_HFLAG_BR
;
10985 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10988 ctx
->hflags
|= MIPS_HFLAG_B
;
10991 MIPS_INVAL("Compact branch/jump");
10992 generate_exception_end(ctx
, EXCP_RI
);
10996 /* Generating branch here as compact branches don't have delay slot */
10997 gen_branch(ctx
, 4);
10999 /* Conditional compact branch */
11000 TCGLabel
*fs
= gen_new_label();
11001 save_cpu_state(ctx
, 0);
11004 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11005 if (rs
== 0 && rt
!= 0) {
11007 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11008 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11010 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11013 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11016 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11017 if (rs
== 0 && rt
!= 0) {
11019 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11020 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11022 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11025 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11028 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11029 if (rs
== 0 && rt
!= 0) {
11031 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11032 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11034 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11037 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11040 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11041 if (rs
== 0 && rt
!= 0) {
11043 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11044 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11046 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11049 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11052 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11053 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11055 /* OPC_BOVC, OPC_BNVC */
11056 TCGv t2
= tcg_temp_new();
11057 TCGv t3
= tcg_temp_new();
11058 TCGv t4
= tcg_temp_new();
11059 TCGv input_overflow
= tcg_temp_new();
11061 gen_load_gpr(t0
, rs
);
11062 gen_load_gpr(t1
, rt
);
11063 tcg_gen_ext32s_tl(t2
, t0
);
11064 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11065 tcg_gen_ext32s_tl(t3
, t1
);
11066 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11067 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11069 tcg_gen_add_tl(t4
, t2
, t3
);
11070 tcg_gen_ext32s_tl(t4
, t4
);
11071 tcg_gen_xor_tl(t2
, t2
, t3
);
11072 tcg_gen_xor_tl(t3
, t4
, t3
);
11073 tcg_gen_andc_tl(t2
, t3
, t2
);
11074 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11075 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11076 if (opc
== OPC_BOVC
) {
11078 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11081 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11083 tcg_temp_free(input_overflow
);
11087 } else if (rs
< rt
&& rs
== 0) {
11088 /* OPC_BEQZALC, OPC_BNEZALC */
11089 if (opc
== OPC_BEQZALC
) {
11091 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11094 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11097 /* OPC_BEQC, OPC_BNEC */
11098 if (opc
== OPC_BEQC
) {
11100 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11103 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11108 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11111 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11114 MIPS_INVAL("Compact conditional branch/jump");
11115 generate_exception_end(ctx
, EXCP_RI
);
11119 /* Generating branch here as compact branches don't have delay slot */
11120 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11123 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11131 /* ISA extensions (ASEs) */
11132 /* MIPS16 extension to MIPS32 */
11134 /* MIPS16 major opcodes */
11136 M16_OPC_ADDIUSP
= 0x00,
11137 M16_OPC_ADDIUPC
= 0x01,
11139 M16_OPC_JAL
= 0x03,
11140 M16_OPC_BEQZ
= 0x04,
11141 M16_OPC_BNEQZ
= 0x05,
11142 M16_OPC_SHIFT
= 0x06,
11144 M16_OPC_RRIA
= 0x08,
11145 M16_OPC_ADDIU8
= 0x09,
11146 M16_OPC_SLTI
= 0x0a,
11147 M16_OPC_SLTIU
= 0x0b,
11150 M16_OPC_CMPI
= 0x0e,
11154 M16_OPC_LWSP
= 0x12,
11156 M16_OPC_LBU
= 0x14,
11157 M16_OPC_LHU
= 0x15,
11158 M16_OPC_LWPC
= 0x16,
11159 M16_OPC_LWU
= 0x17,
11162 M16_OPC_SWSP
= 0x1a,
11164 M16_OPC_RRR
= 0x1c,
11166 M16_OPC_EXTEND
= 0x1e,
11170 /* I8 funct field */
11189 /* RR funct field */
11223 /* I64 funct field */
11231 I64_DADDIUPC
= 0x6,
11235 /* RR ry field for CNVT */
11237 RR_RY_CNVT_ZEB
= 0x0,
11238 RR_RY_CNVT_ZEH
= 0x1,
11239 RR_RY_CNVT_ZEW
= 0x2,
11240 RR_RY_CNVT_SEB
= 0x4,
11241 RR_RY_CNVT_SEH
= 0x5,
11242 RR_RY_CNVT_SEW
= 0x6,
11245 static int xlat (int r
)
11247 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11252 static void gen_mips16_save (DisasContext
*ctx
,
11253 int xsregs
, int aregs
,
11254 int do_ra
, int do_s0
, int do_s1
,
11257 TCGv t0
= tcg_temp_new();
11258 TCGv t1
= tcg_temp_new();
11259 TCGv t2
= tcg_temp_new();
11289 generate_exception_end(ctx
, EXCP_RI
);
11295 gen_base_offset_addr(ctx
, t0
, 29, 12);
11296 gen_load_gpr(t1
, 7);
11297 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11300 gen_base_offset_addr(ctx
, t0
, 29, 8);
11301 gen_load_gpr(t1
, 6);
11302 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11305 gen_base_offset_addr(ctx
, t0
, 29, 4);
11306 gen_load_gpr(t1
, 5);
11307 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11310 gen_base_offset_addr(ctx
, t0
, 29, 0);
11311 gen_load_gpr(t1
, 4);
11312 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11315 gen_load_gpr(t0
, 29);
11317 #define DECR_AND_STORE(reg) do { \
11318 tcg_gen_movi_tl(t2, -4); \
11319 gen_op_addr_add(ctx, t0, t0, t2); \
11320 gen_load_gpr(t1, reg); \
11321 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11325 DECR_AND_STORE(31);
11330 DECR_AND_STORE(30);
11333 DECR_AND_STORE(23);
11336 DECR_AND_STORE(22);
11339 DECR_AND_STORE(21);
11342 DECR_AND_STORE(20);
11345 DECR_AND_STORE(19);
11348 DECR_AND_STORE(18);
11352 DECR_AND_STORE(17);
11355 DECR_AND_STORE(16);
11385 generate_exception_end(ctx
, EXCP_RI
);
11401 #undef DECR_AND_STORE
11403 tcg_gen_movi_tl(t2
, -framesize
);
11404 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11410 static void gen_mips16_restore (DisasContext
*ctx
,
11411 int xsregs
, int aregs
,
11412 int do_ra
, int do_s0
, int do_s1
,
11416 TCGv t0
= tcg_temp_new();
11417 TCGv t1
= tcg_temp_new();
11418 TCGv t2
= tcg_temp_new();
11420 tcg_gen_movi_tl(t2
, framesize
);
11421 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11423 #define DECR_AND_LOAD(reg) do { \
11424 tcg_gen_movi_tl(t2, -4); \
11425 gen_op_addr_add(ctx, t0, t0, t2); \
11426 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11427 gen_store_gpr(t1, reg); \
11491 generate_exception_end(ctx
, EXCP_RI
);
11507 #undef DECR_AND_LOAD
11509 tcg_gen_movi_tl(t2
, framesize
);
11510 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11516 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11517 int is_64_bit
, int extended
)
11521 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11522 generate_exception_end(ctx
, EXCP_RI
);
11526 t0
= tcg_temp_new();
11528 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11529 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11531 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11537 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11540 TCGv_i32 t0
= tcg_const_i32(op
);
11541 TCGv t1
= tcg_temp_new();
11542 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11543 gen_helper_cache(cpu_env
, t1
, t0
);
11546 #if defined(TARGET_MIPS64)
11547 static void decode_i64_mips16 (DisasContext
*ctx
,
11548 int ry
, int funct
, int16_t offset
,
11553 check_insn(ctx
, ISA_MIPS3
);
11554 check_mips_64(ctx
);
11555 offset
= extended
? offset
: offset
<< 3;
11556 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11559 check_insn(ctx
, ISA_MIPS3
);
11560 check_mips_64(ctx
);
11561 offset
= extended
? offset
: offset
<< 3;
11562 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11565 check_insn(ctx
, ISA_MIPS3
);
11566 check_mips_64(ctx
);
11567 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11568 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11571 check_insn(ctx
, ISA_MIPS3
);
11572 check_mips_64(ctx
);
11573 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11574 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11577 check_insn(ctx
, ISA_MIPS3
);
11578 check_mips_64(ctx
);
11579 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11580 generate_exception_end(ctx
, EXCP_RI
);
11582 offset
= extended
? offset
: offset
<< 3;
11583 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11587 check_insn(ctx
, ISA_MIPS3
);
11588 check_mips_64(ctx
);
11589 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11590 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11593 check_insn(ctx
, ISA_MIPS3
);
11594 check_mips_64(ctx
);
11595 offset
= extended
? offset
: offset
<< 2;
11596 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11599 check_insn(ctx
, ISA_MIPS3
);
11600 check_mips_64(ctx
);
11601 offset
= extended
? offset
: offset
<< 2;
11602 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11608 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11610 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11611 int op
, rx
, ry
, funct
, sa
;
11612 int16_t imm
, offset
;
11614 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11615 op
= (ctx
->opcode
>> 11) & 0x1f;
11616 sa
= (ctx
->opcode
>> 22) & 0x1f;
11617 funct
= (ctx
->opcode
>> 8) & 0x7;
11618 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11619 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11620 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11621 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11622 | (ctx
->opcode
& 0x1f));
11624 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11627 case M16_OPC_ADDIUSP
:
11628 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11630 case M16_OPC_ADDIUPC
:
11631 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11634 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11635 /* No delay slot, so just process as a normal instruction */
11638 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11639 /* No delay slot, so just process as a normal instruction */
11641 case M16_OPC_BNEQZ
:
11642 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11643 /* No delay slot, so just process as a normal instruction */
11645 case M16_OPC_SHIFT
:
11646 switch (ctx
->opcode
& 0x3) {
11648 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11651 #if defined(TARGET_MIPS64)
11652 check_mips_64(ctx
);
11653 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11655 generate_exception_end(ctx
, EXCP_RI
);
11659 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11662 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11666 #if defined(TARGET_MIPS64)
11668 check_insn(ctx
, ISA_MIPS3
);
11669 check_mips_64(ctx
);
11670 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11674 imm
= ctx
->opcode
& 0xf;
11675 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11676 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11677 imm
= (int16_t) (imm
<< 1) >> 1;
11678 if ((ctx
->opcode
>> 4) & 0x1) {
11679 #if defined(TARGET_MIPS64)
11680 check_mips_64(ctx
);
11681 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11683 generate_exception_end(ctx
, EXCP_RI
);
11686 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11689 case M16_OPC_ADDIU8
:
11690 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11693 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11695 case M16_OPC_SLTIU
:
11696 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11701 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11704 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11707 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11710 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11713 check_insn(ctx
, ISA_MIPS32
);
11715 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11716 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11717 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11718 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11719 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11720 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11721 | (ctx
->opcode
& 0xf)) << 3;
11723 if (ctx
->opcode
& (1 << 7)) {
11724 gen_mips16_save(ctx
, xsregs
, aregs
,
11725 do_ra
, do_s0
, do_s1
,
11728 gen_mips16_restore(ctx
, xsregs
, aregs
,
11729 do_ra
, do_s0
, do_s1
,
11735 generate_exception_end(ctx
, EXCP_RI
);
11740 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11743 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11745 #if defined(TARGET_MIPS64)
11747 check_insn(ctx
, ISA_MIPS3
);
11748 check_mips_64(ctx
);
11749 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11753 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11756 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11759 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11762 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11765 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11768 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11771 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11773 #if defined(TARGET_MIPS64)
11775 check_insn(ctx
, ISA_MIPS3
);
11776 check_mips_64(ctx
);
11777 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11781 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11784 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11787 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11790 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11792 #if defined(TARGET_MIPS64)
11794 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11798 generate_exception_end(ctx
, EXCP_RI
);
11805 static inline bool is_uhi(int sdbbp_code
)
11807 #ifdef CONFIG_USER_ONLY
11810 return semihosting_enabled() && sdbbp_code
== 1;
11814 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11818 int op
, cnvt_op
, op1
, offset
;
11822 op
= (ctx
->opcode
>> 11) & 0x1f;
11823 sa
= (ctx
->opcode
>> 2) & 0x7;
11824 sa
= sa
== 0 ? 8 : sa
;
11825 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11826 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11827 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11828 op1
= offset
= ctx
->opcode
& 0x1f;
11833 case M16_OPC_ADDIUSP
:
11835 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11837 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11840 case M16_OPC_ADDIUPC
:
11841 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11844 offset
= (ctx
->opcode
& 0x7ff) << 1;
11845 offset
= (int16_t)(offset
<< 4) >> 4;
11846 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11847 /* No delay slot, so just process as a normal instruction */
11850 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11851 offset
= (((ctx
->opcode
& 0x1f) << 21)
11852 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11854 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11855 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11859 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11860 ((int8_t)ctx
->opcode
) << 1, 0);
11861 /* No delay slot, so just process as a normal instruction */
11863 case M16_OPC_BNEQZ
:
11864 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11865 ((int8_t)ctx
->opcode
) << 1, 0);
11866 /* No delay slot, so just process as a normal instruction */
11868 case M16_OPC_SHIFT
:
11869 switch (ctx
->opcode
& 0x3) {
11871 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11874 #if defined(TARGET_MIPS64)
11875 check_insn(ctx
, ISA_MIPS3
);
11876 check_mips_64(ctx
);
11877 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11879 generate_exception_end(ctx
, EXCP_RI
);
11883 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11886 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11890 #if defined(TARGET_MIPS64)
11892 check_insn(ctx
, ISA_MIPS3
);
11893 check_mips_64(ctx
);
11894 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11899 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11901 if ((ctx
->opcode
>> 4) & 1) {
11902 #if defined(TARGET_MIPS64)
11903 check_insn(ctx
, ISA_MIPS3
);
11904 check_mips_64(ctx
);
11905 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11907 generate_exception_end(ctx
, EXCP_RI
);
11910 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11914 case M16_OPC_ADDIU8
:
11916 int16_t imm
= (int8_t) ctx
->opcode
;
11918 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11923 int16_t imm
= (uint8_t) ctx
->opcode
;
11924 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11927 case M16_OPC_SLTIU
:
11929 int16_t imm
= (uint8_t) ctx
->opcode
;
11930 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11937 funct
= (ctx
->opcode
>> 8) & 0x7;
11940 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11941 ((int8_t)ctx
->opcode
) << 1, 0);
11944 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11945 ((int8_t)ctx
->opcode
) << 1, 0);
11948 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11951 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11952 ((int8_t)ctx
->opcode
) << 3);
11955 check_insn(ctx
, ISA_MIPS32
);
11957 int do_ra
= ctx
->opcode
& (1 << 6);
11958 int do_s0
= ctx
->opcode
& (1 << 5);
11959 int do_s1
= ctx
->opcode
& (1 << 4);
11960 int framesize
= ctx
->opcode
& 0xf;
11962 if (framesize
== 0) {
11965 framesize
= framesize
<< 3;
11968 if (ctx
->opcode
& (1 << 7)) {
11969 gen_mips16_save(ctx
, 0, 0,
11970 do_ra
, do_s0
, do_s1
, framesize
);
11972 gen_mips16_restore(ctx
, 0, 0,
11973 do_ra
, do_s0
, do_s1
, framesize
);
11979 int rz
= xlat(ctx
->opcode
& 0x7);
11981 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11982 ((ctx
->opcode
>> 5) & 0x7);
11983 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11987 reg32
= ctx
->opcode
& 0x1f;
11988 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11991 generate_exception_end(ctx
, EXCP_RI
);
11998 int16_t imm
= (uint8_t) ctx
->opcode
;
12000 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12005 int16_t imm
= (uint8_t) ctx
->opcode
;
12006 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12009 #if defined(TARGET_MIPS64)
12011 check_insn(ctx
, ISA_MIPS3
);
12012 check_mips_64(ctx
);
12013 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12017 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12020 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12023 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12026 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12029 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12032 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12035 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12037 #if defined (TARGET_MIPS64)
12039 check_insn(ctx
, ISA_MIPS3
);
12040 check_mips_64(ctx
);
12041 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12045 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12048 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12051 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12054 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12058 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12061 switch (ctx
->opcode
& 0x3) {
12063 mips32_op
= OPC_ADDU
;
12066 mips32_op
= OPC_SUBU
;
12068 #if defined(TARGET_MIPS64)
12070 mips32_op
= OPC_DADDU
;
12071 check_insn(ctx
, ISA_MIPS3
);
12072 check_mips_64(ctx
);
12075 mips32_op
= OPC_DSUBU
;
12076 check_insn(ctx
, ISA_MIPS3
);
12077 check_mips_64(ctx
);
12081 generate_exception_end(ctx
, EXCP_RI
);
12085 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12094 int nd
= (ctx
->opcode
>> 7) & 0x1;
12095 int link
= (ctx
->opcode
>> 6) & 0x1;
12096 int ra
= (ctx
->opcode
>> 5) & 0x1;
12099 check_insn(ctx
, ISA_MIPS32
);
12108 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12113 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12114 gen_helper_do_semihosting(cpu_env
);
12116 /* XXX: not clear which exception should be raised
12117 * when in debug mode...
12119 check_insn(ctx
, ISA_MIPS32
);
12120 generate_exception_end(ctx
, EXCP_DBp
);
12124 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12127 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12130 generate_exception_end(ctx
, EXCP_BREAK
);
12133 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12136 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12139 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12141 #if defined (TARGET_MIPS64)
12143 check_insn(ctx
, ISA_MIPS3
);
12144 check_mips_64(ctx
);
12145 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12149 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12152 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12155 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12158 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12161 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12164 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12167 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12170 check_insn(ctx
, ISA_MIPS32
);
12172 case RR_RY_CNVT_ZEB
:
12173 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12175 case RR_RY_CNVT_ZEH
:
12176 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12178 case RR_RY_CNVT_SEB
:
12179 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12181 case RR_RY_CNVT_SEH
:
12182 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12184 #if defined (TARGET_MIPS64)
12185 case RR_RY_CNVT_ZEW
:
12186 check_insn(ctx
, ISA_MIPS64
);
12187 check_mips_64(ctx
);
12188 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12190 case RR_RY_CNVT_SEW
:
12191 check_insn(ctx
, ISA_MIPS64
);
12192 check_mips_64(ctx
);
12193 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12197 generate_exception_end(ctx
, EXCP_RI
);
12202 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12204 #if defined (TARGET_MIPS64)
12206 check_insn(ctx
, ISA_MIPS3
);
12207 check_mips_64(ctx
);
12208 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12211 check_insn(ctx
, ISA_MIPS3
);
12212 check_mips_64(ctx
);
12213 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12216 check_insn(ctx
, ISA_MIPS3
);
12217 check_mips_64(ctx
);
12218 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12221 check_insn(ctx
, ISA_MIPS3
);
12222 check_mips_64(ctx
);
12223 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12227 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12230 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12233 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12236 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12238 #if defined (TARGET_MIPS64)
12240 check_insn(ctx
, ISA_MIPS3
);
12241 check_mips_64(ctx
);
12242 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12245 check_insn(ctx
, ISA_MIPS3
);
12246 check_mips_64(ctx
);
12247 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12250 check_insn(ctx
, ISA_MIPS3
);
12251 check_mips_64(ctx
);
12252 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12255 check_insn(ctx
, ISA_MIPS3
);
12256 check_mips_64(ctx
);
12257 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12261 generate_exception_end(ctx
, EXCP_RI
);
12265 case M16_OPC_EXTEND
:
12266 decode_extended_mips16_opc(env
, ctx
);
12269 #if defined(TARGET_MIPS64)
12271 funct
= (ctx
->opcode
>> 8) & 0x7;
12272 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12276 generate_exception_end(ctx
, EXCP_RI
);
12283 /* microMIPS extension to MIPS32/MIPS64 */
12286 * microMIPS32/microMIPS64 major opcodes
12288 * 1. MIPS Architecture for Programmers Volume II-B:
12289 * The microMIPS32 Instruction Set (Revision 3.05)
12291 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12293 * 2. MIPS Architecture For Programmers Volume II-A:
12294 * The MIPS64 Instruction Set (Revision 3.51)
12324 POOL32S
= 0x16, /* MIPS64 */
12325 DADDIU32
= 0x17, /* MIPS64 */
12354 /* 0x29 is reserved */
12367 /* 0x31 is reserved */
12380 SD32
= 0x36, /* MIPS64 */
12381 LD32
= 0x37, /* MIPS64 */
12383 /* 0x39 is reserved */
12399 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12409 /* POOL32A encoding of minor opcode field */
12412 /* These opcodes are distinguished only by bits 9..6; those bits are
12413 * what are recorded below. */
12450 /* The following can be distinguished by their lower 6 bits. */
12460 /* POOL32AXF encoding of minor opcode field extension */
12463 * 1. MIPS Architecture for Programmers Volume II-B:
12464 * The microMIPS32 Instruction Set (Revision 3.05)
12466 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12468 * 2. MIPS Architecture for Programmers VolumeIV-e:
12469 * The MIPS DSP Application-Specific Extension
12470 * to the microMIPS32 Architecture (Revision 2.34)
12472 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12487 /* begin of microMIPS32 DSP */
12489 /* bits 13..12 for 0x01 */
12495 /* bits 13..12 for 0x2a */
12501 /* bits 13..12 for 0x32 */
12505 /* end of microMIPS32 DSP */
12507 /* bits 15..12 for 0x2c */
12524 /* bits 15..12 for 0x34 */
12532 /* bits 15..12 for 0x3c */
12534 JR
= 0x0, /* alias */
12542 /* bits 15..12 for 0x05 */
12546 /* bits 15..12 for 0x0d */
12558 /* bits 15..12 for 0x15 */
12564 /* bits 15..12 for 0x1d */
12568 /* bits 15..12 for 0x2d */
12573 /* bits 15..12 for 0x35 */
12580 /* POOL32B encoding of minor opcode field (bits 15..12) */
12596 /* POOL32C encoding of minor opcode field (bits 15..12) */
12617 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12630 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12643 /* POOL32F encoding of minor opcode field (bits 5..0) */
12646 /* These are the bit 7..6 values */
12655 /* These are the bit 8..6 values */
12680 MOVZ_FMT_05
= 0x05,
12714 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12721 /* POOL32Fxf encoding of minor opcode extension field */
12759 /* POOL32I encoding of minor opcode field (bits 25..21) */
12789 /* These overlap and are distinguished by bit16 of the instruction */
12798 /* POOL16A encoding of minor opcode field */
12805 /* POOL16B encoding of minor opcode field */
12812 /* POOL16C encoding of minor opcode field */
12832 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12852 /* POOL16D encoding of minor opcode field */
12859 /* POOL16E encoding of minor opcode field */
12866 static int mmreg (int r
)
12868 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12873 /* Used for 16-bit store instructions. */
12874 static int mmreg2 (int r
)
12876 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12881 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12882 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12883 #define uMIPS_RS2(op) uMIPS_RS(op)
12884 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12885 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12886 #define uMIPS_RS5(op) (op & 0x1f)
12888 /* Signed immediate */
12889 #define SIMM(op, start, width) \
12890 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12893 /* Zero-extended immediate */
12894 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12896 static void gen_addiur1sp(DisasContext
*ctx
)
12898 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12900 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12903 static void gen_addiur2(DisasContext
*ctx
)
12905 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12906 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12907 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12909 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12912 static void gen_addiusp(DisasContext
*ctx
)
12914 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12917 if (encoded
<= 1) {
12918 decoded
= 256 + encoded
;
12919 } else if (encoded
<= 255) {
12921 } else if (encoded
<= 509) {
12922 decoded
= encoded
- 512;
12924 decoded
= encoded
- 768;
12927 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12930 static void gen_addius5(DisasContext
*ctx
)
12932 int imm
= SIMM(ctx
->opcode
, 1, 4);
12933 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12935 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12938 static void gen_andi16(DisasContext
*ctx
)
12940 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12941 31, 32, 63, 64, 255, 32768, 65535 };
12942 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12943 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12944 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12946 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12949 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12950 int base
, int16_t offset
)
12955 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12956 generate_exception_end(ctx
, EXCP_RI
);
12960 t0
= tcg_temp_new();
12962 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12964 t1
= tcg_const_tl(reglist
);
12965 t2
= tcg_const_i32(ctx
->mem_idx
);
12967 save_cpu_state(ctx
, 1);
12970 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12973 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12975 #ifdef TARGET_MIPS64
12977 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12980 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12986 tcg_temp_free_i32(t2
);
12990 static void gen_pool16c_insn(DisasContext
*ctx
)
12992 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12993 int rs
= mmreg(ctx
->opcode
& 0x7);
12995 switch (((ctx
->opcode
) >> 4) & 0x3f) {
13000 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13006 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13012 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13018 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13025 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13026 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13028 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13037 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13038 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13040 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13047 int reg
= ctx
->opcode
& 0x1f;
13049 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13055 int reg
= ctx
->opcode
& 0x1f;
13056 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13057 /* Let normal delay slot handling in our caller take us
13058 to the branch target. */
13063 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13064 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13068 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13069 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13073 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13077 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13080 generate_exception_end(ctx
, EXCP_BREAK
);
13083 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13084 gen_helper_do_semihosting(cpu_env
);
13086 /* XXX: not clear which exception should be raised
13087 * when in debug mode...
13089 check_insn(ctx
, ISA_MIPS32
);
13090 generate_exception_end(ctx
, EXCP_DBp
);
13093 case JRADDIUSP
+ 0:
13094 case JRADDIUSP
+ 1:
13096 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13097 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13098 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13099 /* Let normal delay slot handling in our caller take us
13100 to the branch target. */
13104 generate_exception_end(ctx
, EXCP_RI
);
13109 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13112 int rd
, rs
, re
, rt
;
13113 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13114 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13115 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13116 rd
= rd_enc
[enc_dest
];
13117 re
= re_enc
[enc_dest
];
13118 rs
= rs_rt_enc
[enc_rs
];
13119 rt
= rs_rt_enc
[enc_rt
];
13121 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13123 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13126 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13128 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13132 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13134 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13135 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13137 switch (ctx
->opcode
& 0xf) {
13139 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13142 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13146 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13147 int offset
= extract32(ctx
->opcode
, 4, 4);
13148 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13151 case R6_JRC16
: /* JRCADDIUSP */
13152 if ((ctx
->opcode
>> 4) & 1) {
13154 int imm
= extract32(ctx
->opcode
, 5, 5);
13155 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13156 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13159 int rs
= extract32(ctx
->opcode
, 5, 5);
13160 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13163 case MOVEP
... MOVEP_07
:
13164 case MOVEP_0C
... MOVEP_0F
:
13166 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13167 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13168 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13169 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13173 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13176 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13180 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13181 int offset
= extract32(ctx
->opcode
, 4, 4);
13182 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13185 case JALRC16
: /* BREAK16, SDBBP16 */
13186 switch (ctx
->opcode
& 0x3f) {
13188 case JALRC16
+ 0x20:
13190 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13195 generate_exception(ctx
, EXCP_BREAK
);
13199 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13200 gen_helper_do_semihosting(cpu_env
);
13202 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13203 generate_exception(ctx
, EXCP_RI
);
13205 generate_exception(ctx
, EXCP_DBp
);
13212 generate_exception(ctx
, EXCP_RI
);
13217 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13219 TCGv t0
= tcg_temp_new();
13220 TCGv t1
= tcg_temp_new();
13222 gen_load_gpr(t0
, base
);
13225 gen_load_gpr(t1
, index
);
13226 tcg_gen_shli_tl(t1
, t1
, 2);
13227 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13230 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13231 gen_store_gpr(t1
, rd
);
13237 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13238 int base
, int16_t offset
)
13242 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13243 generate_exception_end(ctx
, EXCP_RI
);
13247 t0
= tcg_temp_new();
13248 t1
= tcg_temp_new();
13250 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13255 generate_exception_end(ctx
, EXCP_RI
);
13258 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13259 gen_store_gpr(t1
, rd
);
13260 tcg_gen_movi_tl(t1
, 4);
13261 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13262 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13263 gen_store_gpr(t1
, rd
+1);
13266 gen_load_gpr(t1
, rd
);
13267 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13268 tcg_gen_movi_tl(t1
, 4);
13269 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13270 gen_load_gpr(t1
, rd
+1);
13271 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13273 #ifdef TARGET_MIPS64
13276 generate_exception_end(ctx
, EXCP_RI
);
13279 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13280 gen_store_gpr(t1
, rd
);
13281 tcg_gen_movi_tl(t1
, 8);
13282 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13283 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13284 gen_store_gpr(t1
, rd
+1);
13287 gen_load_gpr(t1
, rd
);
13288 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13289 tcg_gen_movi_tl(t1
, 8);
13290 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13291 gen_load_gpr(t1
, rd
+1);
13292 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13300 static void gen_sync(int stype
)
13302 TCGBar tcg_mo
= TCG_BAR_SC
;
13305 case 0x4: /* SYNC_WMB */
13306 tcg_mo
|= TCG_MO_ST_ST
;
13308 case 0x10: /* SYNC_MB */
13309 tcg_mo
|= TCG_MO_ALL
;
13311 case 0x11: /* SYNC_ACQUIRE */
13312 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13314 case 0x12: /* SYNC_RELEASE */
13315 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13317 case 0x13: /* SYNC_RMB */
13318 tcg_mo
|= TCG_MO_LD_LD
;
13321 tcg_mo
|= TCG_MO_ALL
;
13325 tcg_gen_mb(tcg_mo
);
13328 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13330 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13331 int minor
= (ctx
->opcode
>> 12) & 0xf;
13332 uint32_t mips32_op
;
13334 switch (extension
) {
13336 mips32_op
= OPC_TEQ
;
13339 mips32_op
= OPC_TGE
;
13342 mips32_op
= OPC_TGEU
;
13345 mips32_op
= OPC_TLT
;
13348 mips32_op
= OPC_TLTU
;
13351 mips32_op
= OPC_TNE
;
13353 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13355 #ifndef CONFIG_USER_ONLY
13358 check_cp0_enabled(ctx
);
13360 /* Treat as NOP. */
13363 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13367 check_cp0_enabled(ctx
);
13369 TCGv t0
= tcg_temp_new();
13371 gen_load_gpr(t0
, rt
);
13372 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13378 switch (minor
& 3) {
13380 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13383 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13386 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13389 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13392 goto pool32axf_invalid
;
13396 switch (minor
& 3) {
13398 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13401 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13404 goto pool32axf_invalid
;
13410 check_insn(ctx
, ISA_MIPS32R6
);
13411 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13414 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13417 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13420 mips32_op
= OPC_CLO
;
13423 mips32_op
= OPC_CLZ
;
13425 check_insn(ctx
, ISA_MIPS32
);
13426 gen_cl(ctx
, mips32_op
, rt
, rs
);
13429 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13430 gen_rdhwr(ctx
, rt
, rs
, 0);
13433 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13437 mips32_op
= OPC_MULT
;
13440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13441 mips32_op
= OPC_MULTU
;
13444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13445 mips32_op
= OPC_DIV
;
13448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13449 mips32_op
= OPC_DIVU
;
13452 check_insn(ctx
, ISA_MIPS32
);
13453 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13457 mips32_op
= OPC_MADD
;
13460 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13461 mips32_op
= OPC_MADDU
;
13464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13465 mips32_op
= OPC_MSUB
;
13468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13469 mips32_op
= OPC_MSUBU
;
13471 check_insn(ctx
, ISA_MIPS32
);
13472 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13475 goto pool32axf_invalid
;
13486 generate_exception_err(ctx
, EXCP_CpU
, 2);
13489 goto pool32axf_invalid
;
13494 case JALR
: /* JALRC */
13495 case JALR_HB
: /* JALRC_HB */
13496 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13497 /* JALRC, JALRC_HB */
13498 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13500 /* JALR, JALR_HB */
13501 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13502 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13508 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13509 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13512 goto pool32axf_invalid
;
13518 check_cp0_enabled(ctx
);
13519 check_insn(ctx
, ISA_MIPS32R2
);
13520 gen_load_srsgpr(rs
, rt
);
13523 check_cp0_enabled(ctx
);
13524 check_insn(ctx
, ISA_MIPS32R2
);
13525 gen_store_srsgpr(rs
, rt
);
13528 goto pool32axf_invalid
;
13531 #ifndef CONFIG_USER_ONLY
13535 mips32_op
= OPC_TLBP
;
13538 mips32_op
= OPC_TLBR
;
13541 mips32_op
= OPC_TLBWI
;
13544 mips32_op
= OPC_TLBWR
;
13547 mips32_op
= OPC_TLBINV
;
13550 mips32_op
= OPC_TLBINVF
;
13553 mips32_op
= OPC_WAIT
;
13556 mips32_op
= OPC_DERET
;
13559 mips32_op
= OPC_ERET
;
13561 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13564 goto pool32axf_invalid
;
13570 check_cp0_enabled(ctx
);
13572 TCGv t0
= tcg_temp_new();
13574 save_cpu_state(ctx
, 1);
13575 gen_helper_di(t0
, cpu_env
);
13576 gen_store_gpr(t0
, rs
);
13577 /* Stop translation as we may have switched the execution mode */
13578 ctx
->bstate
= BS_STOP
;
13583 check_cp0_enabled(ctx
);
13585 TCGv t0
= tcg_temp_new();
13587 save_cpu_state(ctx
, 1);
13588 gen_helper_ei(t0
, cpu_env
);
13589 gen_store_gpr(t0
, rs
);
13590 /* BS_STOP isn't sufficient, we need to ensure we break out
13591 of translated code to check for pending interrupts. */
13592 gen_save_pc(ctx
->pc
+ 4);
13593 ctx
->bstate
= BS_EXCP
;
13598 goto pool32axf_invalid
;
13605 gen_sync(extract32(ctx
->opcode
, 16, 5));
13608 generate_exception_end(ctx
, EXCP_SYSCALL
);
13611 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13612 gen_helper_do_semihosting(cpu_env
);
13614 check_insn(ctx
, ISA_MIPS32
);
13615 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13616 generate_exception_end(ctx
, EXCP_RI
);
13618 generate_exception_end(ctx
, EXCP_DBp
);
13623 goto pool32axf_invalid
;
13627 switch (minor
& 3) {
13629 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13632 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13635 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13638 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13641 goto pool32axf_invalid
;
13645 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13648 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13651 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13654 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13657 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13660 goto pool32axf_invalid
;
13665 MIPS_INVAL("pool32axf");
13666 generate_exception_end(ctx
, EXCP_RI
);
13671 /* Values for microMIPS fmt field. Variable-width, depending on which
13672 formats the instruction supports. */
13691 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13693 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13694 uint32_t mips32_op
;
13696 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13697 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13698 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13700 switch (extension
) {
13701 case FLOAT_1BIT_FMT(CFC1
, 0):
13702 mips32_op
= OPC_CFC1
;
13704 case FLOAT_1BIT_FMT(CTC1
, 0):
13705 mips32_op
= OPC_CTC1
;
13707 case FLOAT_1BIT_FMT(MFC1
, 0):
13708 mips32_op
= OPC_MFC1
;
13710 case FLOAT_1BIT_FMT(MTC1
, 0):
13711 mips32_op
= OPC_MTC1
;
13713 case FLOAT_1BIT_FMT(MFHC1
, 0):
13714 mips32_op
= OPC_MFHC1
;
13716 case FLOAT_1BIT_FMT(MTHC1
, 0):
13717 mips32_op
= OPC_MTHC1
;
13719 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13722 /* Reciprocal square root */
13723 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13724 mips32_op
= OPC_RSQRT_S
;
13726 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13727 mips32_op
= OPC_RSQRT_D
;
13731 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13732 mips32_op
= OPC_SQRT_S
;
13734 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13735 mips32_op
= OPC_SQRT_D
;
13739 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13740 mips32_op
= OPC_RECIP_S
;
13742 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13743 mips32_op
= OPC_RECIP_D
;
13747 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13748 mips32_op
= OPC_FLOOR_L_S
;
13750 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13751 mips32_op
= OPC_FLOOR_L_D
;
13753 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13754 mips32_op
= OPC_FLOOR_W_S
;
13756 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13757 mips32_op
= OPC_FLOOR_W_D
;
13761 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13762 mips32_op
= OPC_CEIL_L_S
;
13764 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13765 mips32_op
= OPC_CEIL_L_D
;
13767 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13768 mips32_op
= OPC_CEIL_W_S
;
13770 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13771 mips32_op
= OPC_CEIL_W_D
;
13775 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13776 mips32_op
= OPC_TRUNC_L_S
;
13778 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13779 mips32_op
= OPC_TRUNC_L_D
;
13781 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13782 mips32_op
= OPC_TRUNC_W_S
;
13784 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13785 mips32_op
= OPC_TRUNC_W_D
;
13789 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13790 mips32_op
= OPC_ROUND_L_S
;
13792 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13793 mips32_op
= OPC_ROUND_L_D
;
13795 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13796 mips32_op
= OPC_ROUND_W_S
;
13798 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13799 mips32_op
= OPC_ROUND_W_D
;
13802 /* Integer to floating-point conversion */
13803 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13804 mips32_op
= OPC_CVT_L_S
;
13806 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13807 mips32_op
= OPC_CVT_L_D
;
13809 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13810 mips32_op
= OPC_CVT_W_S
;
13812 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13813 mips32_op
= OPC_CVT_W_D
;
13816 /* Paired-foo conversions */
13817 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13818 mips32_op
= OPC_CVT_S_PL
;
13820 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13821 mips32_op
= OPC_CVT_S_PU
;
13823 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13824 mips32_op
= OPC_CVT_PW_PS
;
13826 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13827 mips32_op
= OPC_CVT_PS_PW
;
13830 /* Floating-point moves */
13831 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13832 mips32_op
= OPC_MOV_S
;
13834 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13835 mips32_op
= OPC_MOV_D
;
13837 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13838 mips32_op
= OPC_MOV_PS
;
13841 /* Absolute value */
13842 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13843 mips32_op
= OPC_ABS_S
;
13845 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13846 mips32_op
= OPC_ABS_D
;
13848 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13849 mips32_op
= OPC_ABS_PS
;
13853 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13854 mips32_op
= OPC_NEG_S
;
13856 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13857 mips32_op
= OPC_NEG_D
;
13859 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13860 mips32_op
= OPC_NEG_PS
;
13863 /* Reciprocal square root step */
13864 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13865 mips32_op
= OPC_RSQRT1_S
;
13867 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13868 mips32_op
= OPC_RSQRT1_D
;
13870 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13871 mips32_op
= OPC_RSQRT1_PS
;
13874 /* Reciprocal step */
13875 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13876 mips32_op
= OPC_RECIP1_S
;
13878 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13879 mips32_op
= OPC_RECIP1_S
;
13881 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13882 mips32_op
= OPC_RECIP1_PS
;
13885 /* Conversions from double */
13886 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13887 mips32_op
= OPC_CVT_D_S
;
13889 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13890 mips32_op
= OPC_CVT_D_W
;
13892 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13893 mips32_op
= OPC_CVT_D_L
;
13896 /* Conversions from single */
13897 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13898 mips32_op
= OPC_CVT_S_D
;
13900 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13901 mips32_op
= OPC_CVT_S_W
;
13903 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13904 mips32_op
= OPC_CVT_S_L
;
13906 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13909 /* Conditional moves on floating-point codes */
13910 case COND_FLOAT_MOV(MOVT
, 0):
13911 case COND_FLOAT_MOV(MOVT
, 1):
13912 case COND_FLOAT_MOV(MOVT
, 2):
13913 case COND_FLOAT_MOV(MOVT
, 3):
13914 case COND_FLOAT_MOV(MOVT
, 4):
13915 case COND_FLOAT_MOV(MOVT
, 5):
13916 case COND_FLOAT_MOV(MOVT
, 6):
13917 case COND_FLOAT_MOV(MOVT
, 7):
13918 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13919 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13921 case COND_FLOAT_MOV(MOVF
, 0):
13922 case COND_FLOAT_MOV(MOVF
, 1):
13923 case COND_FLOAT_MOV(MOVF
, 2):
13924 case COND_FLOAT_MOV(MOVF
, 3):
13925 case COND_FLOAT_MOV(MOVF
, 4):
13926 case COND_FLOAT_MOV(MOVF
, 5):
13927 case COND_FLOAT_MOV(MOVF
, 6):
13928 case COND_FLOAT_MOV(MOVF
, 7):
13929 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13930 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13933 MIPS_INVAL("pool32fxf");
13934 generate_exception_end(ctx
, EXCP_RI
);
13939 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13943 int rt
, rs
, rd
, rr
;
13945 uint32_t op
, minor
, minor2
, mips32_op
;
13946 uint32_t cond
, fmt
, cc
;
13948 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13949 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13951 rt
= (ctx
->opcode
>> 21) & 0x1f;
13952 rs
= (ctx
->opcode
>> 16) & 0x1f;
13953 rd
= (ctx
->opcode
>> 11) & 0x1f;
13954 rr
= (ctx
->opcode
>> 6) & 0x1f;
13955 imm
= (int16_t) ctx
->opcode
;
13957 op
= (ctx
->opcode
>> 26) & 0x3f;
13960 minor
= ctx
->opcode
& 0x3f;
13963 minor
= (ctx
->opcode
>> 6) & 0xf;
13966 mips32_op
= OPC_SLL
;
13969 mips32_op
= OPC_SRA
;
13972 mips32_op
= OPC_SRL
;
13975 mips32_op
= OPC_ROTR
;
13977 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13980 check_insn(ctx
, ISA_MIPS32R6
);
13981 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13984 check_insn(ctx
, ISA_MIPS32R6
);
13985 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13988 check_insn(ctx
, ISA_MIPS32R6
);
13989 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13992 goto pool32a_invalid
;
13996 minor
= (ctx
->opcode
>> 6) & 0xf;
14000 mips32_op
= OPC_ADD
;
14003 mips32_op
= OPC_ADDU
;
14006 mips32_op
= OPC_SUB
;
14009 mips32_op
= OPC_SUBU
;
14012 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14013 mips32_op
= OPC_MUL
;
14015 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14019 mips32_op
= OPC_SLLV
;
14022 mips32_op
= OPC_SRLV
;
14025 mips32_op
= OPC_SRAV
;
14028 mips32_op
= OPC_ROTRV
;
14030 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14032 /* Logical operations */
14034 mips32_op
= OPC_AND
;
14037 mips32_op
= OPC_OR
;
14040 mips32_op
= OPC_NOR
;
14043 mips32_op
= OPC_XOR
;
14045 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14047 /* Set less than */
14049 mips32_op
= OPC_SLT
;
14052 mips32_op
= OPC_SLTU
;
14054 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14057 goto pool32a_invalid
;
14061 minor
= (ctx
->opcode
>> 6) & 0xf;
14063 /* Conditional moves */
14064 case MOVN
: /* MUL */
14065 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14067 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14070 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14073 case MOVZ
: /* MUH */
14074 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14076 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14079 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14083 check_insn(ctx
, ISA_MIPS32R6
);
14084 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14087 check_insn(ctx
, ISA_MIPS32R6
);
14088 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14090 case LWXS
: /* DIV */
14091 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14093 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14096 gen_ldxs(ctx
, rs
, rt
, rd
);
14100 check_insn(ctx
, ISA_MIPS32R6
);
14101 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14104 check_insn(ctx
, ISA_MIPS32R6
);
14105 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14108 check_insn(ctx
, ISA_MIPS32R6
);
14109 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14112 goto pool32a_invalid
;
14116 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14119 check_insn(ctx
, ISA_MIPS32R6
);
14120 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14121 extract32(ctx
->opcode
, 9, 2));
14124 check_insn(ctx
, ISA_MIPS32R6
);
14125 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14126 extract32(ctx
->opcode
, 9, 2));
14129 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14132 gen_pool32axf(env
, ctx
, rt
, rs
);
14135 generate_exception_end(ctx
, EXCP_BREAK
);
14138 check_insn(ctx
, ISA_MIPS32R6
);
14139 generate_exception_end(ctx
, EXCP_RI
);
14143 MIPS_INVAL("pool32a");
14144 generate_exception_end(ctx
, EXCP_RI
);
14149 minor
= (ctx
->opcode
>> 12) & 0xf;
14152 check_cp0_enabled(ctx
);
14153 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14154 gen_cache_operation(ctx
, rt
, rs
, imm
);
14159 /* COP2: Not implemented. */
14160 generate_exception_err(ctx
, EXCP_CpU
, 2);
14162 #ifdef TARGET_MIPS64
14165 check_insn(ctx
, ISA_MIPS3
);
14166 check_mips_64(ctx
);
14171 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14173 #ifdef TARGET_MIPS64
14176 check_insn(ctx
, ISA_MIPS3
);
14177 check_mips_64(ctx
);
14182 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14185 MIPS_INVAL("pool32b");
14186 generate_exception_end(ctx
, EXCP_RI
);
14191 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14192 minor
= ctx
->opcode
& 0x3f;
14193 check_cp1_enabled(ctx
);
14196 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14197 mips32_op
= OPC_ALNV_PS
;
14200 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14201 mips32_op
= OPC_MADD_S
;
14204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14205 mips32_op
= OPC_MADD_D
;
14208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14209 mips32_op
= OPC_MADD_PS
;
14212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14213 mips32_op
= OPC_MSUB_S
;
14216 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14217 mips32_op
= OPC_MSUB_D
;
14220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14221 mips32_op
= OPC_MSUB_PS
;
14224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14225 mips32_op
= OPC_NMADD_S
;
14228 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14229 mips32_op
= OPC_NMADD_D
;
14232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14233 mips32_op
= OPC_NMADD_PS
;
14236 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14237 mips32_op
= OPC_NMSUB_S
;
14240 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14241 mips32_op
= OPC_NMSUB_D
;
14244 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14245 mips32_op
= OPC_NMSUB_PS
;
14247 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14249 case CABS_COND_FMT
:
14250 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14251 cond
= (ctx
->opcode
>> 6) & 0xf;
14252 cc
= (ctx
->opcode
>> 13) & 0x7;
14253 fmt
= (ctx
->opcode
>> 10) & 0x3;
14256 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14259 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14262 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14265 goto pool32f_invalid
;
14269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14270 cond
= (ctx
->opcode
>> 6) & 0xf;
14271 cc
= (ctx
->opcode
>> 13) & 0x7;
14272 fmt
= (ctx
->opcode
>> 10) & 0x3;
14275 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14278 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14281 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14284 goto pool32f_invalid
;
14288 check_insn(ctx
, ISA_MIPS32R6
);
14289 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14292 check_insn(ctx
, ISA_MIPS32R6
);
14293 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14296 gen_pool32fxf(ctx
, rt
, rs
);
14300 switch ((ctx
->opcode
>> 6) & 0x7) {
14302 mips32_op
= OPC_PLL_PS
;
14305 mips32_op
= OPC_PLU_PS
;
14308 mips32_op
= OPC_PUL_PS
;
14311 mips32_op
= OPC_PUU_PS
;
14314 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14315 mips32_op
= OPC_CVT_PS_S
;
14317 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14320 goto pool32f_invalid
;
14324 check_insn(ctx
, ISA_MIPS32R6
);
14325 switch ((ctx
->opcode
>> 9) & 0x3) {
14327 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14330 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14333 goto pool32f_invalid
;
14338 switch ((ctx
->opcode
>> 6) & 0x7) {
14340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14341 mips32_op
= OPC_LWXC1
;
14344 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14345 mips32_op
= OPC_SWXC1
;
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 mips32_op
= OPC_LDXC1
;
14352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14353 mips32_op
= OPC_SDXC1
;
14356 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14357 mips32_op
= OPC_LUXC1
;
14360 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14361 mips32_op
= OPC_SUXC1
;
14363 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14366 goto pool32f_invalid
;
14370 check_insn(ctx
, ISA_MIPS32R6
);
14371 switch ((ctx
->opcode
>> 9) & 0x3) {
14373 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14376 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14379 goto pool32f_invalid
;
14384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14385 fmt
= (ctx
->opcode
>> 9) & 0x3;
14386 switch ((ctx
->opcode
>> 6) & 0x7) {
14390 mips32_op
= OPC_RSQRT2_S
;
14393 mips32_op
= OPC_RSQRT2_D
;
14396 mips32_op
= OPC_RSQRT2_PS
;
14399 goto pool32f_invalid
;
14405 mips32_op
= OPC_RECIP2_S
;
14408 mips32_op
= OPC_RECIP2_D
;
14411 mips32_op
= OPC_RECIP2_PS
;
14414 goto pool32f_invalid
;
14418 mips32_op
= OPC_ADDR_PS
;
14421 mips32_op
= OPC_MULR_PS
;
14423 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14426 goto pool32f_invalid
;
14430 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14431 cc
= (ctx
->opcode
>> 13) & 0x7;
14432 fmt
= (ctx
->opcode
>> 9) & 0x3;
14433 switch ((ctx
->opcode
>> 6) & 0x7) {
14434 case MOVF_FMT
: /* RINT_FMT */
14435 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14439 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14442 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14445 goto pool32f_invalid
;
14451 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14454 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14458 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14461 goto pool32f_invalid
;
14465 case MOVT_FMT
: /* CLASS_FMT */
14466 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14470 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14473 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14476 goto pool32f_invalid
;
14482 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14485 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14489 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14492 goto pool32f_invalid
;
14497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14500 goto pool32f_invalid
;
14503 #define FINSN_3ARG_SDPS(prfx) \
14504 switch ((ctx->opcode >> 8) & 0x3) { \
14506 mips32_op = OPC_##prfx##_S; \
14509 mips32_op = OPC_##prfx##_D; \
14511 case FMT_SDPS_PS: \
14513 mips32_op = OPC_##prfx##_PS; \
14516 goto pool32f_invalid; \
14519 check_insn(ctx
, ISA_MIPS32R6
);
14520 switch ((ctx
->opcode
>> 9) & 0x3) {
14522 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14525 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14528 goto pool32f_invalid
;
14532 check_insn(ctx
, ISA_MIPS32R6
);
14533 switch ((ctx
->opcode
>> 9) & 0x3) {
14535 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14538 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14541 goto pool32f_invalid
;
14545 /* regular FP ops */
14546 switch ((ctx
->opcode
>> 6) & 0x3) {
14548 FINSN_3ARG_SDPS(ADD
);
14551 FINSN_3ARG_SDPS(SUB
);
14554 FINSN_3ARG_SDPS(MUL
);
14557 fmt
= (ctx
->opcode
>> 8) & 0x3;
14559 mips32_op
= OPC_DIV_D
;
14560 } else if (fmt
== 0) {
14561 mips32_op
= OPC_DIV_S
;
14563 goto pool32f_invalid
;
14567 goto pool32f_invalid
;
14572 switch ((ctx
->opcode
>> 6) & 0x7) {
14573 case MOVN_FMT
: /* SELNEZ_FMT */
14574 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14576 switch ((ctx
->opcode
>> 9) & 0x3) {
14578 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14581 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14584 goto pool32f_invalid
;
14588 FINSN_3ARG_SDPS(MOVN
);
14592 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14593 FINSN_3ARG_SDPS(MOVN
);
14595 case MOVZ_FMT
: /* SELEQZ_FMT */
14596 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14598 switch ((ctx
->opcode
>> 9) & 0x3) {
14600 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14603 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14606 goto pool32f_invalid
;
14610 FINSN_3ARG_SDPS(MOVZ
);
14614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14615 FINSN_3ARG_SDPS(MOVZ
);
14618 check_insn(ctx
, ISA_MIPS32R6
);
14619 switch ((ctx
->opcode
>> 9) & 0x3) {
14621 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14624 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14627 goto pool32f_invalid
;
14631 check_insn(ctx
, ISA_MIPS32R6
);
14632 switch ((ctx
->opcode
>> 9) & 0x3) {
14634 mips32_op
= OPC_MADDF_S
;
14637 mips32_op
= OPC_MADDF_D
;
14640 goto pool32f_invalid
;
14644 check_insn(ctx
, ISA_MIPS32R6
);
14645 switch ((ctx
->opcode
>> 9) & 0x3) {
14647 mips32_op
= OPC_MSUBF_S
;
14650 mips32_op
= OPC_MSUBF_D
;
14653 goto pool32f_invalid
;
14657 goto pool32f_invalid
;
14661 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14665 MIPS_INVAL("pool32f");
14666 generate_exception_end(ctx
, EXCP_RI
);
14670 generate_exception_err(ctx
, EXCP_CpU
, 1);
14674 minor
= (ctx
->opcode
>> 21) & 0x1f;
14677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14678 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14681 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14682 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14683 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14686 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14687 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14688 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14691 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14692 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14695 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14696 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14697 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14700 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14701 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14702 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14705 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14706 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14709 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14710 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14714 case TLTI
: /* BC1EQZC */
14715 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14717 check_cp1_enabled(ctx
);
14718 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14721 mips32_op
= OPC_TLTI
;
14725 case TGEI
: /* BC1NEZC */
14726 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14728 check_cp1_enabled(ctx
);
14729 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14732 mips32_op
= OPC_TGEI
;
14737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14738 mips32_op
= OPC_TLTIU
;
14741 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14742 mips32_op
= OPC_TGEIU
;
14744 case TNEI
: /* SYNCI */
14745 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14747 /* Break the TB to be able to sync copied instructions
14749 ctx
->bstate
= BS_STOP
;
14752 mips32_op
= OPC_TNEI
;
14757 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14758 mips32_op
= OPC_TEQI
;
14760 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14765 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14766 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14767 4, rs
, 0, imm
<< 1, 0);
14768 /* Compact branches don't have a delay slot, so just let
14769 the normal delay slot handling take us to the branch
14773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14774 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14778 /* Break the TB to be able to sync copied instructions
14780 ctx
->bstate
= BS_STOP
;
14784 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14785 /* COP2: Not implemented. */
14786 generate_exception_err(ctx
, EXCP_CpU
, 2);
14789 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14790 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14793 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14794 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14797 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14798 mips32_op
= OPC_BC1FANY4
;
14801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14802 mips32_op
= OPC_BC1TANY4
;
14805 check_insn(ctx
, ASE_MIPS3D
);
14808 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14809 check_cp1_enabled(ctx
);
14810 gen_compute_branch1(ctx
, mips32_op
,
14811 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14813 generate_exception_err(ctx
, EXCP_CpU
, 1);
14818 /* MIPS DSP: not implemented */
14821 MIPS_INVAL("pool32i");
14822 generate_exception_end(ctx
, EXCP_RI
);
14827 minor
= (ctx
->opcode
>> 12) & 0xf;
14828 offset
= sextract32(ctx
->opcode
, 0,
14829 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14833 mips32_op
= OPC_LWL
;
14836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14837 mips32_op
= OPC_SWL
;
14840 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14841 mips32_op
= OPC_LWR
;
14844 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14845 mips32_op
= OPC_SWR
;
14847 #if defined(TARGET_MIPS64)
14849 check_insn(ctx
, ISA_MIPS3
);
14850 check_mips_64(ctx
);
14851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14852 mips32_op
= OPC_LDL
;
14855 check_insn(ctx
, ISA_MIPS3
);
14856 check_mips_64(ctx
);
14857 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14858 mips32_op
= OPC_SDL
;
14861 check_insn(ctx
, ISA_MIPS3
);
14862 check_mips_64(ctx
);
14863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14864 mips32_op
= OPC_LDR
;
14867 check_insn(ctx
, ISA_MIPS3
);
14868 check_mips_64(ctx
);
14869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14870 mips32_op
= OPC_SDR
;
14873 check_insn(ctx
, ISA_MIPS3
);
14874 check_mips_64(ctx
);
14875 mips32_op
= OPC_LWU
;
14878 check_insn(ctx
, ISA_MIPS3
);
14879 check_mips_64(ctx
);
14880 mips32_op
= OPC_LLD
;
14884 mips32_op
= OPC_LL
;
14887 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14890 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
14893 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14895 #if defined(TARGET_MIPS64)
14897 check_insn(ctx
, ISA_MIPS3
);
14898 check_mips_64(ctx
);
14899 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14904 MIPS_INVAL("pool32c ld-eva");
14905 generate_exception_end(ctx
, EXCP_RI
);
14908 check_cp0_enabled(ctx
);
14910 minor2
= (ctx
->opcode
>> 9) & 0x7;
14911 offset
= sextract32(ctx
->opcode
, 0, 9);
14914 mips32_op
= OPC_LBUE
;
14917 mips32_op
= OPC_LHUE
;
14920 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14921 mips32_op
= OPC_LWLE
;
14924 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14925 mips32_op
= OPC_LWRE
;
14928 mips32_op
= OPC_LBE
;
14931 mips32_op
= OPC_LHE
;
14934 mips32_op
= OPC_LLE
;
14937 mips32_op
= OPC_LWE
;
14943 MIPS_INVAL("pool32c st-eva");
14944 generate_exception_end(ctx
, EXCP_RI
);
14947 check_cp0_enabled(ctx
);
14949 minor2
= (ctx
->opcode
>> 9) & 0x7;
14950 offset
= sextract32(ctx
->opcode
, 0, 9);
14953 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14954 mips32_op
= OPC_SWLE
;
14957 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14958 mips32_op
= OPC_SWRE
;
14961 /* Treat as no-op */
14962 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14963 /* hint codes 24-31 are reserved and signal RI */
14964 generate_exception(ctx
, EXCP_RI
);
14968 /* Treat as no-op */
14969 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14970 gen_cache_operation(ctx
, rt
, rs
, offset
);
14974 mips32_op
= OPC_SBE
;
14977 mips32_op
= OPC_SHE
;
14980 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
14983 mips32_op
= OPC_SWE
;
14988 /* Treat as no-op */
14989 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14990 /* hint codes 24-31 are reserved and signal RI */
14991 generate_exception(ctx
, EXCP_RI
);
14995 MIPS_INVAL("pool32c");
14996 generate_exception_end(ctx
, EXCP_RI
);
15000 case ADDI32
: /* AUI, LUI */
15001 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15003 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15006 mips32_op
= OPC_ADDI
;
15011 mips32_op
= OPC_ADDIU
;
15013 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15016 /* Logical operations */
15018 mips32_op
= OPC_ORI
;
15021 mips32_op
= OPC_XORI
;
15024 mips32_op
= OPC_ANDI
;
15026 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15029 /* Set less than immediate */
15031 mips32_op
= OPC_SLTI
;
15034 mips32_op
= OPC_SLTIU
;
15036 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15039 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15040 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15041 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15042 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15044 case JALS32
: /* BOVC, BEQC, BEQZALC */
15045 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15048 mips32_op
= OPC_BOVC
;
15049 } else if (rs
< rt
&& rs
== 0) {
15051 mips32_op
= OPC_BEQZALC
;
15054 mips32_op
= OPC_BEQC
;
15056 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15059 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15060 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15061 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15064 case BEQ32
: /* BC */
15065 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15067 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15068 sextract32(ctx
->opcode
<< 1, 0, 27));
15071 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15074 case BNE32
: /* BALC */
15075 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15077 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15078 sextract32(ctx
->opcode
<< 1, 0, 27));
15081 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15084 case J32
: /* BGTZC, BLTZC, BLTC */
15085 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15086 if (rs
== 0 && rt
!= 0) {
15088 mips32_op
= OPC_BGTZC
;
15089 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15091 mips32_op
= OPC_BLTZC
;
15094 mips32_op
= OPC_BLTC
;
15096 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15099 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15100 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15103 case JAL32
: /* BLEZC, BGEZC, BGEC */
15104 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15105 if (rs
== 0 && rt
!= 0) {
15107 mips32_op
= OPC_BLEZC
;
15108 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15110 mips32_op
= OPC_BGEZC
;
15113 mips32_op
= OPC_BGEC
;
15115 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15118 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15119 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15120 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15123 /* Floating point (COP1) */
15125 mips32_op
= OPC_LWC1
;
15128 mips32_op
= OPC_LDC1
;
15131 mips32_op
= OPC_SWC1
;
15134 mips32_op
= OPC_SDC1
;
15136 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15138 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15139 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15140 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15141 switch ((ctx
->opcode
>> 16) & 0x1f) {
15142 case ADDIUPC_00
... ADDIUPC_07
:
15143 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
15146 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
15149 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
15151 case LWPC_08
... LWPC_0F
:
15152 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
15155 generate_exception(ctx
, EXCP_RI
);
15160 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15161 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15163 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15166 case BNVC
: /* BNEC, BNEZALC */
15167 check_insn(ctx
, ISA_MIPS32R6
);
15170 mips32_op
= OPC_BNVC
;
15171 } else if (rs
< rt
&& rs
== 0) {
15173 mips32_op
= OPC_BNEZALC
;
15176 mips32_op
= OPC_BNEC
;
15178 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15180 case R6_BNEZC
: /* JIALC */
15181 check_insn(ctx
, ISA_MIPS32R6
);
15184 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15185 sextract32(ctx
->opcode
<< 1, 0, 22));
15188 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15191 case R6_BEQZC
: /* JIC */
15192 check_insn(ctx
, ISA_MIPS32R6
);
15195 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15196 sextract32(ctx
->opcode
<< 1, 0, 22));
15199 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15202 case BLEZALC
: /* BGEZALC, BGEUC */
15203 check_insn(ctx
, ISA_MIPS32R6
);
15204 if (rs
== 0 && rt
!= 0) {
15206 mips32_op
= OPC_BLEZALC
;
15207 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15209 mips32_op
= OPC_BGEZALC
;
15212 mips32_op
= OPC_BGEUC
;
15214 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15216 case BGTZALC
: /* BLTZALC, BLTUC */
15217 check_insn(ctx
, ISA_MIPS32R6
);
15218 if (rs
== 0 && rt
!= 0) {
15220 mips32_op
= OPC_BGTZALC
;
15221 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15223 mips32_op
= OPC_BLTZALC
;
15226 mips32_op
= OPC_BLTUC
;
15228 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15230 /* Loads and stores */
15232 mips32_op
= OPC_LB
;
15235 mips32_op
= OPC_LBU
;
15238 mips32_op
= OPC_LH
;
15241 mips32_op
= OPC_LHU
;
15244 mips32_op
= OPC_LW
;
15246 #ifdef TARGET_MIPS64
15248 check_insn(ctx
, ISA_MIPS3
);
15249 check_mips_64(ctx
);
15250 mips32_op
= OPC_LD
;
15253 check_insn(ctx
, ISA_MIPS3
);
15254 check_mips_64(ctx
);
15255 mips32_op
= OPC_SD
;
15259 mips32_op
= OPC_SB
;
15262 mips32_op
= OPC_SH
;
15265 mips32_op
= OPC_SW
;
15268 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15271 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15274 generate_exception_end(ctx
, EXCP_RI
);
15279 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15283 /* make sure instructions are on a halfword boundary */
15284 if (ctx
->pc
& 0x1) {
15285 env
->CP0_BadVAddr
= ctx
->pc
;
15286 generate_exception_end(ctx
, EXCP_AdEL
);
15290 op
= (ctx
->opcode
>> 10) & 0x3f;
15291 /* Enforce properly-sized instructions in a delay slot */
15292 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15293 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15295 /* POOL32A, POOL32B, POOL32I, POOL32C */
15297 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15299 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15301 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15303 /* LB32, LH32, LWC132, LDC132, LW32 */
15304 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15305 generate_exception_end(ctx
, EXCP_RI
);
15310 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15312 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15314 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15315 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15316 generate_exception_end(ctx
, EXCP_RI
);
15326 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15327 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15328 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15331 switch (ctx
->opcode
& 0x1) {
15339 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15340 /* In the Release 6 the register number location in
15341 * the instruction encoding has changed.
15343 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15345 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15351 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15352 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15353 int amount
= (ctx
->opcode
>> 1) & 0x7;
15355 amount
= amount
== 0 ? 8 : amount
;
15357 switch (ctx
->opcode
& 0x1) {
15366 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15370 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15371 gen_pool16c_r6_insn(ctx
);
15373 gen_pool16c_insn(ctx
);
15378 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15379 int rb
= 28; /* GP */
15380 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15382 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15386 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15387 if (ctx
->opcode
& 1) {
15388 generate_exception_end(ctx
, EXCP_RI
);
15391 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15392 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15393 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15394 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15399 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15400 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15401 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15402 offset
= (offset
== 0xf ? -1 : offset
);
15404 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15409 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15410 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15411 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15413 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15418 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15419 int rb
= 29; /* SP */
15420 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15422 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15427 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15428 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15429 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15431 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15436 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15437 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15438 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15440 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15445 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15446 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15447 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15449 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15454 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15455 int rb
= 29; /* SP */
15456 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15458 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15463 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15464 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15465 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15467 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15472 int rd
= uMIPS_RD5(ctx
->opcode
);
15473 int rs
= uMIPS_RS5(ctx
->opcode
);
15475 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15482 switch (ctx
->opcode
& 0x1) {
15492 switch (ctx
->opcode
& 0x1) {
15497 gen_addiur1sp(ctx
);
15501 case B16
: /* BC16 */
15502 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15503 sextract32(ctx
->opcode
, 0, 10) << 1,
15504 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15506 case BNEZ16
: /* BNEZC16 */
15507 case BEQZ16
: /* BEQZC16 */
15508 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15509 mmreg(uMIPS_RD(ctx
->opcode
)),
15510 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15511 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15516 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15517 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15519 imm
= (imm
== 0x7f ? -1 : imm
);
15520 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15526 generate_exception_end(ctx
, EXCP_RI
);
15529 decode_micromips32_opc(env
, ctx
);
15536 /* SmartMIPS extension to MIPS32 */
15538 #if defined(TARGET_MIPS64)
15540 /* MDMX extension to MIPS64 */
15544 /* MIPSDSP functions. */
15545 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15546 int rd
, int base
, int offset
)
15551 t0
= tcg_temp_new();
15554 gen_load_gpr(t0
, offset
);
15555 } else if (offset
== 0) {
15556 gen_load_gpr(t0
, base
);
15558 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15563 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15564 gen_store_gpr(t0
, rd
);
15567 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15568 gen_store_gpr(t0
, rd
);
15571 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15572 gen_store_gpr(t0
, rd
);
15574 #if defined(TARGET_MIPS64)
15576 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15577 gen_store_gpr(t0
, rd
);
15584 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15585 int ret
, int v1
, int v2
)
15591 /* Treat as NOP. */
15595 v1_t
= tcg_temp_new();
15596 v2_t
= tcg_temp_new();
15598 gen_load_gpr(v1_t
, v1
);
15599 gen_load_gpr(v2_t
, v2
);
15602 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15603 case OPC_MULT_G_2E
:
15607 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15609 case OPC_ADDUH_R_QB
:
15610 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15613 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15615 case OPC_ADDQH_R_PH
:
15616 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15619 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15621 case OPC_ADDQH_R_W
:
15622 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15625 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15627 case OPC_SUBUH_R_QB
:
15628 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15631 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15633 case OPC_SUBQH_R_PH
:
15634 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15637 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15639 case OPC_SUBQH_R_W
:
15640 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15644 case OPC_ABSQ_S_PH_DSP
:
15646 case OPC_ABSQ_S_QB
:
15648 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15650 case OPC_ABSQ_S_PH
:
15652 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15656 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15658 case OPC_PRECEQ_W_PHL
:
15660 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15661 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15663 case OPC_PRECEQ_W_PHR
:
15665 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15666 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15667 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15669 case OPC_PRECEQU_PH_QBL
:
15671 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15673 case OPC_PRECEQU_PH_QBR
:
15675 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15677 case OPC_PRECEQU_PH_QBLA
:
15679 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15681 case OPC_PRECEQU_PH_QBRA
:
15683 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15685 case OPC_PRECEU_PH_QBL
:
15687 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15689 case OPC_PRECEU_PH_QBR
:
15691 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15693 case OPC_PRECEU_PH_QBLA
:
15695 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15697 case OPC_PRECEU_PH_QBRA
:
15699 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15703 case OPC_ADDU_QB_DSP
:
15707 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15709 case OPC_ADDQ_S_PH
:
15711 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15715 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15719 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15721 case OPC_ADDU_S_QB
:
15723 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15727 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15729 case OPC_ADDU_S_PH
:
15731 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15735 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15737 case OPC_SUBQ_S_PH
:
15739 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15743 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15747 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15749 case OPC_SUBU_S_QB
:
15751 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15755 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15757 case OPC_SUBU_S_PH
:
15759 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15763 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15767 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15771 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15773 case OPC_RADDU_W_QB
:
15775 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15779 case OPC_CMPU_EQ_QB_DSP
:
15781 case OPC_PRECR_QB_PH
:
15783 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15785 case OPC_PRECRQ_QB_PH
:
15787 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15789 case OPC_PRECR_SRA_PH_W
:
15792 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15793 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15795 tcg_temp_free_i32(sa_t
);
15798 case OPC_PRECR_SRA_R_PH_W
:
15801 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15802 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15804 tcg_temp_free_i32(sa_t
);
15807 case OPC_PRECRQ_PH_W
:
15809 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15811 case OPC_PRECRQ_RS_PH_W
:
15813 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15815 case OPC_PRECRQU_S_QB_PH
:
15817 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15821 #ifdef TARGET_MIPS64
15822 case OPC_ABSQ_S_QH_DSP
:
15824 case OPC_PRECEQ_L_PWL
:
15826 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15828 case OPC_PRECEQ_L_PWR
:
15830 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15832 case OPC_PRECEQ_PW_QHL
:
15834 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15836 case OPC_PRECEQ_PW_QHR
:
15838 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15840 case OPC_PRECEQ_PW_QHLA
:
15842 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15844 case OPC_PRECEQ_PW_QHRA
:
15846 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15848 case OPC_PRECEQU_QH_OBL
:
15850 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15852 case OPC_PRECEQU_QH_OBR
:
15854 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15856 case OPC_PRECEQU_QH_OBLA
:
15858 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15860 case OPC_PRECEQU_QH_OBRA
:
15862 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15864 case OPC_PRECEU_QH_OBL
:
15866 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15868 case OPC_PRECEU_QH_OBR
:
15870 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15872 case OPC_PRECEU_QH_OBLA
:
15874 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15876 case OPC_PRECEU_QH_OBRA
:
15878 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15880 case OPC_ABSQ_S_OB
:
15882 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15884 case OPC_ABSQ_S_PW
:
15886 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15888 case OPC_ABSQ_S_QH
:
15890 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15894 case OPC_ADDU_OB_DSP
:
15896 case OPC_RADDU_L_OB
:
15898 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15902 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15904 case OPC_SUBQ_S_PW
:
15906 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15910 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15912 case OPC_SUBQ_S_QH
:
15914 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15918 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15920 case OPC_SUBU_S_OB
:
15922 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15926 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15928 case OPC_SUBU_S_QH
:
15930 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15934 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15936 case OPC_SUBUH_R_OB
:
15938 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15942 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15944 case OPC_ADDQ_S_PW
:
15946 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15950 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15952 case OPC_ADDQ_S_QH
:
15954 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15958 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15960 case OPC_ADDU_S_OB
:
15962 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15966 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15968 case OPC_ADDU_S_QH
:
15970 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15974 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15976 case OPC_ADDUH_R_OB
:
15978 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15982 case OPC_CMPU_EQ_OB_DSP
:
15984 case OPC_PRECR_OB_QH
:
15986 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15988 case OPC_PRECR_SRA_QH_PW
:
15991 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15992 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15993 tcg_temp_free_i32(ret_t
);
15996 case OPC_PRECR_SRA_R_QH_PW
:
15999 TCGv_i32 sa_v
= tcg_const_i32(ret
);
16000 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
16001 tcg_temp_free_i32(sa_v
);
16004 case OPC_PRECRQ_OB_QH
:
16006 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16008 case OPC_PRECRQ_PW_L
:
16010 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
16012 case OPC_PRECRQ_QH_PW
:
16014 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16016 case OPC_PRECRQ_RS_QH_PW
:
16018 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16020 case OPC_PRECRQU_S_OB_QH
:
16022 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16029 tcg_temp_free(v1_t
);
16030 tcg_temp_free(v2_t
);
16033 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16034 int ret
, int v1
, int v2
)
16042 /* Treat as NOP. */
16046 t0
= tcg_temp_new();
16047 v1_t
= tcg_temp_new();
16048 v2_t
= tcg_temp_new();
16050 tcg_gen_movi_tl(t0
, v1
);
16051 gen_load_gpr(v1_t
, v1
);
16052 gen_load_gpr(v2_t
, v2
);
16055 case OPC_SHLL_QB_DSP
:
16057 op2
= MASK_SHLL_QB(ctx
->opcode
);
16061 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16065 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16069 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16073 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16075 case OPC_SHLL_S_PH
:
16077 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16079 case OPC_SHLLV_S_PH
:
16081 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16085 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16087 case OPC_SHLLV_S_W
:
16089 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16093 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16097 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16101 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16105 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16109 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16111 case OPC_SHRA_R_QB
:
16113 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16117 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16119 case OPC_SHRAV_R_QB
:
16121 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16125 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16127 case OPC_SHRA_R_PH
:
16129 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16133 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16135 case OPC_SHRAV_R_PH
:
16137 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16141 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16143 case OPC_SHRAV_R_W
:
16145 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16147 default: /* Invalid */
16148 MIPS_INVAL("MASK SHLL.QB");
16149 generate_exception_end(ctx
, EXCP_RI
);
16154 #ifdef TARGET_MIPS64
16155 case OPC_SHLL_OB_DSP
:
16156 op2
= MASK_SHLL_OB(ctx
->opcode
);
16160 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16164 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16166 case OPC_SHLL_S_PW
:
16168 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16170 case OPC_SHLLV_S_PW
:
16172 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16176 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16180 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16184 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16188 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16190 case OPC_SHLL_S_QH
:
16192 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16194 case OPC_SHLLV_S_QH
:
16196 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16200 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16204 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16206 case OPC_SHRA_R_OB
:
16208 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16210 case OPC_SHRAV_R_OB
:
16212 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16216 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16220 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16222 case OPC_SHRA_R_PW
:
16224 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16226 case OPC_SHRAV_R_PW
:
16228 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16232 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16236 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16238 case OPC_SHRA_R_QH
:
16240 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16242 case OPC_SHRAV_R_QH
:
16244 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16248 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16252 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16256 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16260 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16262 default: /* Invalid */
16263 MIPS_INVAL("MASK SHLL.OB");
16264 generate_exception_end(ctx
, EXCP_RI
);
16272 tcg_temp_free(v1_t
);
16273 tcg_temp_free(v2_t
);
16276 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16277 int ret
, int v1
, int v2
, int check_ret
)
16283 if ((ret
== 0) && (check_ret
== 1)) {
16284 /* Treat as NOP. */
16288 t0
= tcg_temp_new_i32();
16289 v1_t
= tcg_temp_new();
16290 v2_t
= tcg_temp_new();
16292 tcg_gen_movi_i32(t0
, ret
);
16293 gen_load_gpr(v1_t
, v1
);
16294 gen_load_gpr(v2_t
, v2
);
16297 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16298 * the same mask and op1. */
16299 case OPC_MULT_G_2E
:
16303 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16306 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16309 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16311 case OPC_MULQ_RS_W
:
16312 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16316 case OPC_DPA_W_PH_DSP
:
16318 case OPC_DPAU_H_QBL
:
16320 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16322 case OPC_DPAU_H_QBR
:
16324 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16326 case OPC_DPSU_H_QBL
:
16328 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16330 case OPC_DPSU_H_QBR
:
16332 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16336 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16338 case OPC_DPAX_W_PH
:
16340 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16342 case OPC_DPAQ_S_W_PH
:
16344 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16346 case OPC_DPAQX_S_W_PH
:
16348 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16350 case OPC_DPAQX_SA_W_PH
:
16352 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16356 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16358 case OPC_DPSX_W_PH
:
16360 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16362 case OPC_DPSQ_S_W_PH
:
16364 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16366 case OPC_DPSQX_S_W_PH
:
16368 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16370 case OPC_DPSQX_SA_W_PH
:
16372 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16374 case OPC_MULSAQ_S_W_PH
:
16376 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16378 case OPC_DPAQ_SA_L_W
:
16380 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16382 case OPC_DPSQ_SA_L_W
:
16384 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16386 case OPC_MAQ_S_W_PHL
:
16388 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16390 case OPC_MAQ_S_W_PHR
:
16392 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16394 case OPC_MAQ_SA_W_PHL
:
16396 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16398 case OPC_MAQ_SA_W_PHR
:
16400 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16402 case OPC_MULSA_W_PH
:
16404 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16408 #ifdef TARGET_MIPS64
16409 case OPC_DPAQ_W_QH_DSP
:
16411 int ac
= ret
& 0x03;
16412 tcg_gen_movi_i32(t0
, ac
);
16417 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16421 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16425 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16429 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16433 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16435 case OPC_DPAQ_S_W_QH
:
16437 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16439 case OPC_DPAQ_SA_L_PW
:
16441 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16443 case OPC_DPAU_H_OBL
:
16445 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16447 case OPC_DPAU_H_OBR
:
16449 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16453 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16455 case OPC_DPSQ_S_W_QH
:
16457 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16459 case OPC_DPSQ_SA_L_PW
:
16461 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16463 case OPC_DPSU_H_OBL
:
16465 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16467 case OPC_DPSU_H_OBR
:
16469 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16471 case OPC_MAQ_S_L_PWL
:
16473 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16475 case OPC_MAQ_S_L_PWR
:
16477 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16479 case OPC_MAQ_S_W_QHLL
:
16481 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16483 case OPC_MAQ_SA_W_QHLL
:
16485 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16487 case OPC_MAQ_S_W_QHLR
:
16489 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16491 case OPC_MAQ_SA_W_QHLR
:
16493 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16495 case OPC_MAQ_S_W_QHRL
:
16497 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16499 case OPC_MAQ_SA_W_QHRL
:
16501 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16503 case OPC_MAQ_S_W_QHRR
:
16505 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16507 case OPC_MAQ_SA_W_QHRR
:
16509 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16511 case OPC_MULSAQ_S_L_PW
:
16513 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16515 case OPC_MULSAQ_S_W_QH
:
16517 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16523 case OPC_ADDU_QB_DSP
:
16525 case OPC_MULEU_S_PH_QBL
:
16527 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16529 case OPC_MULEU_S_PH_QBR
:
16531 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16533 case OPC_MULQ_RS_PH
:
16535 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16537 case OPC_MULEQ_S_W_PHL
:
16539 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16541 case OPC_MULEQ_S_W_PHR
:
16543 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16545 case OPC_MULQ_S_PH
:
16547 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16551 #ifdef TARGET_MIPS64
16552 case OPC_ADDU_OB_DSP
:
16554 case OPC_MULEQ_S_PW_QHL
:
16556 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16558 case OPC_MULEQ_S_PW_QHR
:
16560 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16562 case OPC_MULEU_S_QH_OBL
:
16564 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16566 case OPC_MULEU_S_QH_OBR
:
16568 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16570 case OPC_MULQ_RS_QH
:
16572 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16579 tcg_temp_free_i32(t0
);
16580 tcg_temp_free(v1_t
);
16581 tcg_temp_free(v2_t
);
16584 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16592 /* Treat as NOP. */
16596 t0
= tcg_temp_new();
16597 val_t
= tcg_temp_new();
16598 gen_load_gpr(val_t
, val
);
16601 case OPC_ABSQ_S_PH_DSP
:
16605 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16610 target_long result
;
16611 imm
= (ctx
->opcode
>> 16) & 0xFF;
16612 result
= (uint32_t)imm
<< 24 |
16613 (uint32_t)imm
<< 16 |
16614 (uint32_t)imm
<< 8 |
16616 result
= (int32_t)result
;
16617 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16622 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16623 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16624 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16625 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16626 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16627 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16632 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16633 imm
= (int16_t)(imm
<< 6) >> 6;
16634 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16635 (target_long
)((int32_t)imm
<< 16 | \
16641 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16642 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16643 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16644 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16648 #ifdef TARGET_MIPS64
16649 case OPC_ABSQ_S_QH_DSP
:
16656 imm
= (ctx
->opcode
>> 16) & 0xFF;
16657 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16658 temp
= (temp
<< 16) | temp
;
16659 temp
= (temp
<< 32) | temp
;
16660 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16668 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16669 imm
= (int16_t)(imm
<< 6) >> 6;
16670 temp
= ((target_long
)imm
<< 32) \
16671 | ((target_long
)imm
& 0xFFFFFFFF);
16672 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16680 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16681 imm
= (int16_t)(imm
<< 6) >> 6;
16683 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16684 ((uint64_t)(uint16_t)imm
<< 32) |
16685 ((uint64_t)(uint16_t)imm
<< 16) |
16686 (uint64_t)(uint16_t)imm
;
16687 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16692 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16693 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16694 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16695 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16696 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16697 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16698 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16702 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16703 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16704 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16708 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16709 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16710 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16711 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16712 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16719 tcg_temp_free(val_t
);
16722 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16723 uint32_t op1
, uint32_t op2
,
16724 int ret
, int v1
, int v2
, int check_ret
)
16730 if ((ret
== 0) && (check_ret
== 1)) {
16731 /* Treat as NOP. */
16735 t1
= tcg_temp_new();
16736 v1_t
= tcg_temp_new();
16737 v2_t
= tcg_temp_new();
16739 gen_load_gpr(v1_t
, v1
);
16740 gen_load_gpr(v2_t
, v2
);
16743 case OPC_CMPU_EQ_QB_DSP
:
16745 case OPC_CMPU_EQ_QB
:
16747 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16749 case OPC_CMPU_LT_QB
:
16751 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16753 case OPC_CMPU_LE_QB
:
16755 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16757 case OPC_CMPGU_EQ_QB
:
16759 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16761 case OPC_CMPGU_LT_QB
:
16763 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16765 case OPC_CMPGU_LE_QB
:
16767 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16769 case OPC_CMPGDU_EQ_QB
:
16771 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16772 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16773 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16774 tcg_gen_shli_tl(t1
, t1
, 24);
16775 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16777 case OPC_CMPGDU_LT_QB
:
16779 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16780 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16781 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16782 tcg_gen_shli_tl(t1
, t1
, 24);
16783 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16785 case OPC_CMPGDU_LE_QB
:
16787 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16788 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16789 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16790 tcg_gen_shli_tl(t1
, t1
, 24);
16791 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16793 case OPC_CMP_EQ_PH
:
16795 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16797 case OPC_CMP_LT_PH
:
16799 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16801 case OPC_CMP_LE_PH
:
16803 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16807 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16811 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16813 case OPC_PACKRL_PH
:
16815 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16819 #ifdef TARGET_MIPS64
16820 case OPC_CMPU_EQ_OB_DSP
:
16822 case OPC_CMP_EQ_PW
:
16824 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16826 case OPC_CMP_LT_PW
:
16828 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16830 case OPC_CMP_LE_PW
:
16832 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16834 case OPC_CMP_EQ_QH
:
16836 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16838 case OPC_CMP_LT_QH
:
16840 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16842 case OPC_CMP_LE_QH
:
16844 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16846 case OPC_CMPGDU_EQ_OB
:
16848 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16850 case OPC_CMPGDU_LT_OB
:
16852 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16854 case OPC_CMPGDU_LE_OB
:
16856 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16858 case OPC_CMPGU_EQ_OB
:
16860 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16862 case OPC_CMPGU_LT_OB
:
16864 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16866 case OPC_CMPGU_LE_OB
:
16868 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16870 case OPC_CMPU_EQ_OB
:
16872 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16874 case OPC_CMPU_LT_OB
:
16876 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16878 case OPC_CMPU_LE_OB
:
16880 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16882 case OPC_PACKRL_PW
:
16884 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16888 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16892 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16896 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16904 tcg_temp_free(v1_t
);
16905 tcg_temp_free(v2_t
);
16908 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16909 uint32_t op1
, int rt
, int rs
, int sa
)
16916 /* Treat as NOP. */
16920 t0
= tcg_temp_new();
16921 gen_load_gpr(t0
, rs
);
16924 case OPC_APPEND_DSP
:
16925 switch (MASK_APPEND(ctx
->opcode
)) {
16928 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16930 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16934 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16935 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16936 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16937 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16939 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16943 if (sa
!= 0 && sa
!= 2) {
16944 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16945 tcg_gen_ext32u_tl(t0
, t0
);
16946 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16947 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16949 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16951 default: /* Invalid */
16952 MIPS_INVAL("MASK APPEND");
16953 generate_exception_end(ctx
, EXCP_RI
);
16957 #ifdef TARGET_MIPS64
16958 case OPC_DAPPEND_DSP
:
16959 switch (MASK_DAPPEND(ctx
->opcode
)) {
16962 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16966 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16967 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16968 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16972 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16973 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16974 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16979 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16980 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16981 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16982 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16985 default: /* Invalid */
16986 MIPS_INVAL("MASK DAPPEND");
16987 generate_exception_end(ctx
, EXCP_RI
);
16996 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16997 int ret
, int v1
, int v2
, int check_ret
)
17006 if ((ret
== 0) && (check_ret
== 1)) {
17007 /* Treat as NOP. */
17011 t0
= tcg_temp_new();
17012 t1
= tcg_temp_new();
17013 v1_t
= tcg_temp_new();
17014 v2_t
= tcg_temp_new();
17016 gen_load_gpr(v1_t
, v1
);
17017 gen_load_gpr(v2_t
, v2
);
17020 case OPC_EXTR_W_DSP
:
17024 tcg_gen_movi_tl(t0
, v2
);
17025 tcg_gen_movi_tl(t1
, v1
);
17026 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17029 tcg_gen_movi_tl(t0
, v2
);
17030 tcg_gen_movi_tl(t1
, v1
);
17031 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17033 case OPC_EXTR_RS_W
:
17034 tcg_gen_movi_tl(t0
, v2
);
17035 tcg_gen_movi_tl(t1
, v1
);
17036 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17039 tcg_gen_movi_tl(t0
, v2
);
17040 tcg_gen_movi_tl(t1
, v1
);
17041 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17043 case OPC_EXTRV_S_H
:
17044 tcg_gen_movi_tl(t0
, v2
);
17045 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17048 tcg_gen_movi_tl(t0
, v2
);
17049 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17051 case OPC_EXTRV_R_W
:
17052 tcg_gen_movi_tl(t0
, v2
);
17053 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17055 case OPC_EXTRV_RS_W
:
17056 tcg_gen_movi_tl(t0
, v2
);
17057 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17060 tcg_gen_movi_tl(t0
, v2
);
17061 tcg_gen_movi_tl(t1
, v1
);
17062 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17065 tcg_gen_movi_tl(t0
, v2
);
17066 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17069 tcg_gen_movi_tl(t0
, v2
);
17070 tcg_gen_movi_tl(t1
, v1
);
17071 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17074 tcg_gen_movi_tl(t0
, v2
);
17075 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17078 imm
= (ctx
->opcode
>> 20) & 0x3F;
17079 tcg_gen_movi_tl(t0
, ret
);
17080 tcg_gen_movi_tl(t1
, imm
);
17081 gen_helper_shilo(t0
, t1
, cpu_env
);
17084 tcg_gen_movi_tl(t0
, ret
);
17085 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17088 tcg_gen_movi_tl(t0
, ret
);
17089 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17092 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17093 tcg_gen_movi_tl(t0
, imm
);
17094 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17097 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17098 tcg_gen_movi_tl(t0
, imm
);
17099 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17103 #ifdef TARGET_MIPS64
17104 case OPC_DEXTR_W_DSP
:
17108 tcg_gen_movi_tl(t0
, ret
);
17109 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17113 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17114 int ac
= (ctx
->opcode
>> 11) & 0x03;
17115 tcg_gen_movi_tl(t0
, shift
);
17116 tcg_gen_movi_tl(t1
, ac
);
17117 gen_helper_dshilo(t0
, t1
, cpu_env
);
17122 int ac
= (ctx
->opcode
>> 11) & 0x03;
17123 tcg_gen_movi_tl(t0
, ac
);
17124 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17128 tcg_gen_movi_tl(t0
, v2
);
17129 tcg_gen_movi_tl(t1
, v1
);
17131 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17134 tcg_gen_movi_tl(t0
, v2
);
17135 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17138 tcg_gen_movi_tl(t0
, v2
);
17139 tcg_gen_movi_tl(t1
, v1
);
17140 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17143 tcg_gen_movi_tl(t0
, v2
);
17144 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17147 tcg_gen_movi_tl(t0
, v2
);
17148 tcg_gen_movi_tl(t1
, v1
);
17149 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17151 case OPC_DEXTR_R_L
:
17152 tcg_gen_movi_tl(t0
, v2
);
17153 tcg_gen_movi_tl(t1
, v1
);
17154 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17156 case OPC_DEXTR_RS_L
:
17157 tcg_gen_movi_tl(t0
, v2
);
17158 tcg_gen_movi_tl(t1
, v1
);
17159 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17162 tcg_gen_movi_tl(t0
, v2
);
17163 tcg_gen_movi_tl(t1
, v1
);
17164 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17166 case OPC_DEXTR_R_W
:
17167 tcg_gen_movi_tl(t0
, v2
);
17168 tcg_gen_movi_tl(t1
, v1
);
17169 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17171 case OPC_DEXTR_RS_W
:
17172 tcg_gen_movi_tl(t0
, v2
);
17173 tcg_gen_movi_tl(t1
, v1
);
17174 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17176 case OPC_DEXTR_S_H
:
17177 tcg_gen_movi_tl(t0
, v2
);
17178 tcg_gen_movi_tl(t1
, v1
);
17179 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17181 case OPC_DEXTRV_S_H
:
17182 tcg_gen_movi_tl(t0
, v2
);
17183 tcg_gen_movi_tl(t1
, v1
);
17184 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17187 tcg_gen_movi_tl(t0
, v2
);
17188 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17190 case OPC_DEXTRV_R_L
:
17191 tcg_gen_movi_tl(t0
, v2
);
17192 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17194 case OPC_DEXTRV_RS_L
:
17195 tcg_gen_movi_tl(t0
, v2
);
17196 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17199 tcg_gen_movi_tl(t0
, v2
);
17200 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17202 case OPC_DEXTRV_R_W
:
17203 tcg_gen_movi_tl(t0
, v2
);
17204 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17206 case OPC_DEXTRV_RS_W
:
17207 tcg_gen_movi_tl(t0
, v2
);
17208 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17217 tcg_temp_free(v1_t
);
17218 tcg_temp_free(v2_t
);
17221 /* End MIPSDSP functions. */
17223 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17225 int rs
, rt
, rd
, sa
;
17228 rs
= (ctx
->opcode
>> 21) & 0x1f;
17229 rt
= (ctx
->opcode
>> 16) & 0x1f;
17230 rd
= (ctx
->opcode
>> 11) & 0x1f;
17231 sa
= (ctx
->opcode
>> 6) & 0x1f;
17233 op1
= MASK_SPECIAL(ctx
->opcode
);
17236 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17238 case OPC_MULT
... OPC_DIVU
:
17239 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17249 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17252 MIPS_INVAL("special_r6 muldiv");
17253 generate_exception_end(ctx
, EXCP_RI
);
17259 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17263 if (rt
== 0 && sa
== 1) {
17264 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17265 We need additionally to check other fields */
17266 gen_cl(ctx
, op1
, rd
, rs
);
17268 generate_exception_end(ctx
, EXCP_RI
);
17272 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17273 gen_helper_do_semihosting(cpu_env
);
17275 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17276 generate_exception_end(ctx
, EXCP_RI
);
17278 generate_exception_end(ctx
, EXCP_DBp
);
17282 #if defined(TARGET_MIPS64)
17284 check_mips_64(ctx
);
17285 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17289 if (rt
== 0 && sa
== 1) {
17290 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17291 We need additionally to check other fields */
17292 check_mips_64(ctx
);
17293 gen_cl(ctx
, op1
, rd
, rs
);
17295 generate_exception_end(ctx
, EXCP_RI
);
17298 case OPC_DMULT
... OPC_DDIVU
:
17299 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17309 check_mips_64(ctx
);
17310 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17313 MIPS_INVAL("special_r6 muldiv");
17314 generate_exception_end(ctx
, EXCP_RI
);
17319 default: /* Invalid */
17320 MIPS_INVAL("special_r6");
17321 generate_exception_end(ctx
, EXCP_RI
);
17326 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17328 int rs
, rt
, rd
, sa
;
17331 rs
= (ctx
->opcode
>> 21) & 0x1f;
17332 rt
= (ctx
->opcode
>> 16) & 0x1f;
17333 rd
= (ctx
->opcode
>> 11) & 0x1f;
17334 sa
= (ctx
->opcode
>> 6) & 0x1f;
17336 op1
= MASK_SPECIAL(ctx
->opcode
);
17338 case OPC_MOVN
: /* Conditional move */
17340 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17341 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17342 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17344 case OPC_MFHI
: /* Move from HI/LO */
17346 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17349 case OPC_MTLO
: /* Move to HI/LO */
17350 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17353 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17354 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17355 check_cp1_enabled(ctx
);
17356 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17357 (ctx
->opcode
>> 16) & 1);
17359 generate_exception_err(ctx
, EXCP_CpU
, 1);
17365 check_insn(ctx
, INSN_VR54XX
);
17366 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17367 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17369 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17374 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17376 #if defined(TARGET_MIPS64)
17377 case OPC_DMULT
... OPC_DDIVU
:
17378 check_insn(ctx
, ISA_MIPS3
);
17379 check_mips_64(ctx
);
17380 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17384 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17387 #ifdef MIPS_STRICT_STANDARD
17388 MIPS_INVAL("SPIM");
17389 generate_exception_end(ctx
, EXCP_RI
);
17391 /* Implemented as RI exception for now. */
17392 MIPS_INVAL("spim (unofficial)");
17393 generate_exception_end(ctx
, EXCP_RI
);
17396 default: /* Invalid */
17397 MIPS_INVAL("special_legacy");
17398 generate_exception_end(ctx
, EXCP_RI
);
17403 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17405 int rs
, rt
, rd
, sa
;
17408 rs
= (ctx
->opcode
>> 21) & 0x1f;
17409 rt
= (ctx
->opcode
>> 16) & 0x1f;
17410 rd
= (ctx
->opcode
>> 11) & 0x1f;
17411 sa
= (ctx
->opcode
>> 6) & 0x1f;
17413 op1
= MASK_SPECIAL(ctx
->opcode
);
17415 case OPC_SLL
: /* Shift with immediate */
17416 if (sa
== 5 && rd
== 0 &&
17417 rs
== 0 && rt
== 0) { /* PAUSE */
17418 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17419 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17420 generate_exception_end(ctx
, EXCP_RI
);
17426 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17429 switch ((ctx
->opcode
>> 21) & 0x1f) {
17431 /* rotr is decoded as srl on non-R2 CPUs */
17432 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17437 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17440 generate_exception_end(ctx
, EXCP_RI
);
17444 case OPC_ADD
... OPC_SUBU
:
17445 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17447 case OPC_SLLV
: /* Shifts */
17449 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17452 switch ((ctx
->opcode
>> 6) & 0x1f) {
17454 /* rotrv is decoded as srlv on non-R2 CPUs */
17455 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17460 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17463 generate_exception_end(ctx
, EXCP_RI
);
17467 case OPC_SLT
: /* Set on less than */
17469 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17471 case OPC_AND
: /* Logic*/
17475 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17478 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17480 case OPC_TGE
... OPC_TEQ
: /* Traps */
17482 check_insn(ctx
, ISA_MIPS2
);
17483 gen_trap(ctx
, op1
, rs
, rt
, -1);
17485 case OPC_LSA
: /* OPC_PMON */
17486 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17487 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17488 decode_opc_special_r6(env
, ctx
);
17490 /* Pmon entry point, also R4010 selsl */
17491 #ifdef MIPS_STRICT_STANDARD
17492 MIPS_INVAL("PMON / selsl");
17493 generate_exception_end(ctx
, EXCP_RI
);
17495 gen_helper_0e0i(pmon
, sa
);
17500 generate_exception_end(ctx
, EXCP_SYSCALL
);
17503 generate_exception_end(ctx
, EXCP_BREAK
);
17506 check_insn(ctx
, ISA_MIPS2
);
17507 gen_sync(extract32(ctx
->opcode
, 6, 5));
17510 #if defined(TARGET_MIPS64)
17511 /* MIPS64 specific opcodes */
17516 check_insn(ctx
, ISA_MIPS3
);
17517 check_mips_64(ctx
);
17518 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17521 switch ((ctx
->opcode
>> 21) & 0x1f) {
17523 /* drotr is decoded as dsrl on non-R2 CPUs */
17524 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17529 check_insn(ctx
, ISA_MIPS3
);
17530 check_mips_64(ctx
);
17531 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17534 generate_exception_end(ctx
, EXCP_RI
);
17539 switch ((ctx
->opcode
>> 21) & 0x1f) {
17541 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17542 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17547 check_insn(ctx
, ISA_MIPS3
);
17548 check_mips_64(ctx
);
17549 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17552 generate_exception_end(ctx
, EXCP_RI
);
17556 case OPC_DADD
... OPC_DSUBU
:
17557 check_insn(ctx
, ISA_MIPS3
);
17558 check_mips_64(ctx
);
17559 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17563 check_insn(ctx
, ISA_MIPS3
);
17564 check_mips_64(ctx
);
17565 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17568 switch ((ctx
->opcode
>> 6) & 0x1f) {
17570 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17571 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17576 check_insn(ctx
, ISA_MIPS3
);
17577 check_mips_64(ctx
);
17578 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17581 generate_exception_end(ctx
, EXCP_RI
);
17586 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17587 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17588 decode_opc_special_r6(env
, ctx
);
17593 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17594 decode_opc_special_r6(env
, ctx
);
17596 decode_opc_special_legacy(env
, ctx
);
17601 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17606 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17608 rs
= (ctx
->opcode
>> 21) & 0x1f;
17609 rt
= (ctx
->opcode
>> 16) & 0x1f;
17610 rd
= (ctx
->opcode
>> 11) & 0x1f;
17612 op1
= MASK_SPECIAL2(ctx
->opcode
);
17614 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17615 case OPC_MSUB
... OPC_MSUBU
:
17616 check_insn(ctx
, ISA_MIPS32
);
17617 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17620 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17623 case OPC_DIVU_G_2F
:
17624 case OPC_MULT_G_2F
:
17625 case OPC_MULTU_G_2F
:
17627 case OPC_MODU_G_2F
:
17628 check_insn(ctx
, INSN_LOONGSON2F
);
17629 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17633 check_insn(ctx
, ISA_MIPS32
);
17634 gen_cl(ctx
, op1
, rd
, rs
);
17637 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17638 gen_helper_do_semihosting(cpu_env
);
17640 /* XXX: not clear which exception should be raised
17641 * when in debug mode...
17643 check_insn(ctx
, ISA_MIPS32
);
17644 generate_exception_end(ctx
, EXCP_DBp
);
17647 #if defined(TARGET_MIPS64)
17650 check_insn(ctx
, ISA_MIPS64
);
17651 check_mips_64(ctx
);
17652 gen_cl(ctx
, op1
, rd
, rs
);
17654 case OPC_DMULT_G_2F
:
17655 case OPC_DMULTU_G_2F
:
17656 case OPC_DDIV_G_2F
:
17657 case OPC_DDIVU_G_2F
:
17658 case OPC_DMOD_G_2F
:
17659 case OPC_DMODU_G_2F
:
17660 check_insn(ctx
, INSN_LOONGSON2F
);
17661 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17664 default: /* Invalid */
17665 MIPS_INVAL("special2_legacy");
17666 generate_exception_end(ctx
, EXCP_RI
);
17671 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17673 int rs
, rt
, rd
, sa
;
17677 rs
= (ctx
->opcode
>> 21) & 0x1f;
17678 rt
= (ctx
->opcode
>> 16) & 0x1f;
17679 rd
= (ctx
->opcode
>> 11) & 0x1f;
17680 sa
= (ctx
->opcode
>> 6) & 0x1f;
17681 imm
= (int16_t)ctx
->opcode
>> 7;
17683 op1
= MASK_SPECIAL3(ctx
->opcode
);
17687 /* hint codes 24-31 are reserved and signal RI */
17688 generate_exception_end(ctx
, EXCP_RI
);
17690 /* Treat as NOP. */
17693 check_cp0_enabled(ctx
);
17694 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17695 gen_cache_operation(ctx
, rt
, rs
, imm
);
17699 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17702 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17707 /* Treat as NOP. */
17710 op2
= MASK_BSHFL(ctx
->opcode
);
17712 case OPC_ALIGN
... OPC_ALIGN_END
:
17713 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17716 gen_bitswap(ctx
, op2
, rd
, rt
);
17721 #if defined(TARGET_MIPS64)
17723 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17726 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17729 check_mips_64(ctx
);
17732 /* Treat as NOP. */
17735 op2
= MASK_DBSHFL(ctx
->opcode
);
17737 case OPC_DALIGN
... OPC_DALIGN_END
:
17738 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17741 gen_bitswap(ctx
, op2
, rd
, rt
);
17748 default: /* Invalid */
17749 MIPS_INVAL("special3_r6");
17750 generate_exception_end(ctx
, EXCP_RI
);
17755 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17760 rs
= (ctx
->opcode
>> 21) & 0x1f;
17761 rt
= (ctx
->opcode
>> 16) & 0x1f;
17762 rd
= (ctx
->opcode
>> 11) & 0x1f;
17764 op1
= MASK_SPECIAL3(ctx
->opcode
);
17766 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17767 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17768 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17769 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17770 * the same mask and op1. */
17771 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17772 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17775 case OPC_ADDUH_R_QB
:
17777 case OPC_ADDQH_R_PH
:
17779 case OPC_ADDQH_R_W
:
17781 case OPC_SUBUH_R_QB
:
17783 case OPC_SUBQH_R_PH
:
17785 case OPC_SUBQH_R_W
:
17786 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17791 case OPC_MULQ_RS_W
:
17792 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17795 MIPS_INVAL("MASK ADDUH.QB");
17796 generate_exception_end(ctx
, EXCP_RI
);
17799 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17800 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17802 generate_exception_end(ctx
, EXCP_RI
);
17806 op2
= MASK_LX(ctx
->opcode
);
17808 #if defined(TARGET_MIPS64)
17814 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17816 default: /* Invalid */
17817 MIPS_INVAL("MASK LX");
17818 generate_exception_end(ctx
, EXCP_RI
);
17822 case OPC_ABSQ_S_PH_DSP
:
17823 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17825 case OPC_ABSQ_S_QB
:
17826 case OPC_ABSQ_S_PH
:
17828 case OPC_PRECEQ_W_PHL
:
17829 case OPC_PRECEQ_W_PHR
:
17830 case OPC_PRECEQU_PH_QBL
:
17831 case OPC_PRECEQU_PH_QBR
:
17832 case OPC_PRECEQU_PH_QBLA
:
17833 case OPC_PRECEQU_PH_QBRA
:
17834 case OPC_PRECEU_PH_QBL
:
17835 case OPC_PRECEU_PH_QBR
:
17836 case OPC_PRECEU_PH_QBLA
:
17837 case OPC_PRECEU_PH_QBRA
:
17838 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17845 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17848 MIPS_INVAL("MASK ABSQ_S.PH");
17849 generate_exception_end(ctx
, EXCP_RI
);
17853 case OPC_ADDU_QB_DSP
:
17854 op2
= MASK_ADDU_QB(ctx
->opcode
);
17857 case OPC_ADDQ_S_PH
:
17860 case OPC_ADDU_S_QB
:
17862 case OPC_ADDU_S_PH
:
17864 case OPC_SUBQ_S_PH
:
17867 case OPC_SUBU_S_QB
:
17869 case OPC_SUBU_S_PH
:
17873 case OPC_RADDU_W_QB
:
17874 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17876 case OPC_MULEU_S_PH_QBL
:
17877 case OPC_MULEU_S_PH_QBR
:
17878 case OPC_MULQ_RS_PH
:
17879 case OPC_MULEQ_S_W_PHL
:
17880 case OPC_MULEQ_S_W_PHR
:
17881 case OPC_MULQ_S_PH
:
17882 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17884 default: /* Invalid */
17885 MIPS_INVAL("MASK ADDU.QB");
17886 generate_exception_end(ctx
, EXCP_RI
);
17891 case OPC_CMPU_EQ_QB_DSP
:
17892 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17894 case OPC_PRECR_SRA_PH_W
:
17895 case OPC_PRECR_SRA_R_PH_W
:
17896 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17898 case OPC_PRECR_QB_PH
:
17899 case OPC_PRECRQ_QB_PH
:
17900 case OPC_PRECRQ_PH_W
:
17901 case OPC_PRECRQ_RS_PH_W
:
17902 case OPC_PRECRQU_S_QB_PH
:
17903 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17905 case OPC_CMPU_EQ_QB
:
17906 case OPC_CMPU_LT_QB
:
17907 case OPC_CMPU_LE_QB
:
17908 case OPC_CMP_EQ_PH
:
17909 case OPC_CMP_LT_PH
:
17910 case OPC_CMP_LE_PH
:
17911 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17913 case OPC_CMPGU_EQ_QB
:
17914 case OPC_CMPGU_LT_QB
:
17915 case OPC_CMPGU_LE_QB
:
17916 case OPC_CMPGDU_EQ_QB
:
17917 case OPC_CMPGDU_LT_QB
:
17918 case OPC_CMPGDU_LE_QB
:
17921 case OPC_PACKRL_PH
:
17922 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17924 default: /* Invalid */
17925 MIPS_INVAL("MASK CMPU.EQ.QB");
17926 generate_exception_end(ctx
, EXCP_RI
);
17930 case OPC_SHLL_QB_DSP
:
17931 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17933 case OPC_DPA_W_PH_DSP
:
17934 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17936 case OPC_DPAU_H_QBL
:
17937 case OPC_DPAU_H_QBR
:
17938 case OPC_DPSU_H_QBL
:
17939 case OPC_DPSU_H_QBR
:
17941 case OPC_DPAX_W_PH
:
17942 case OPC_DPAQ_S_W_PH
:
17943 case OPC_DPAQX_S_W_PH
:
17944 case OPC_DPAQX_SA_W_PH
:
17946 case OPC_DPSX_W_PH
:
17947 case OPC_DPSQ_S_W_PH
:
17948 case OPC_DPSQX_S_W_PH
:
17949 case OPC_DPSQX_SA_W_PH
:
17950 case OPC_MULSAQ_S_W_PH
:
17951 case OPC_DPAQ_SA_L_W
:
17952 case OPC_DPSQ_SA_L_W
:
17953 case OPC_MAQ_S_W_PHL
:
17954 case OPC_MAQ_S_W_PHR
:
17955 case OPC_MAQ_SA_W_PHL
:
17956 case OPC_MAQ_SA_W_PHR
:
17957 case OPC_MULSA_W_PH
:
17958 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17960 default: /* Invalid */
17961 MIPS_INVAL("MASK DPAW.PH");
17962 generate_exception_end(ctx
, EXCP_RI
);
17967 op2
= MASK_INSV(ctx
->opcode
);
17978 t0
= tcg_temp_new();
17979 t1
= tcg_temp_new();
17981 gen_load_gpr(t0
, rt
);
17982 gen_load_gpr(t1
, rs
);
17984 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17990 default: /* Invalid */
17991 MIPS_INVAL("MASK INSV");
17992 generate_exception_end(ctx
, EXCP_RI
);
17996 case OPC_APPEND_DSP
:
17997 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17999 case OPC_EXTR_W_DSP
:
18000 op2
= MASK_EXTR_W(ctx
->opcode
);
18004 case OPC_EXTR_RS_W
:
18006 case OPC_EXTRV_S_H
:
18008 case OPC_EXTRV_R_W
:
18009 case OPC_EXTRV_RS_W
:
18014 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18017 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18023 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18025 default: /* Invalid */
18026 MIPS_INVAL("MASK EXTR.W");
18027 generate_exception_end(ctx
, EXCP_RI
);
18031 #if defined(TARGET_MIPS64)
18032 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
18033 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
18034 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
18035 check_insn(ctx
, INSN_LOONGSON2E
);
18036 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18038 case OPC_ABSQ_S_QH_DSP
:
18039 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18041 case OPC_PRECEQ_L_PWL
:
18042 case OPC_PRECEQ_L_PWR
:
18043 case OPC_PRECEQ_PW_QHL
:
18044 case OPC_PRECEQ_PW_QHR
:
18045 case OPC_PRECEQ_PW_QHLA
:
18046 case OPC_PRECEQ_PW_QHRA
:
18047 case OPC_PRECEQU_QH_OBL
:
18048 case OPC_PRECEQU_QH_OBR
:
18049 case OPC_PRECEQU_QH_OBLA
:
18050 case OPC_PRECEQU_QH_OBRA
:
18051 case OPC_PRECEU_QH_OBL
:
18052 case OPC_PRECEU_QH_OBR
:
18053 case OPC_PRECEU_QH_OBLA
:
18054 case OPC_PRECEU_QH_OBRA
:
18055 case OPC_ABSQ_S_OB
:
18056 case OPC_ABSQ_S_PW
:
18057 case OPC_ABSQ_S_QH
:
18058 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18066 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18068 default: /* Invalid */
18069 MIPS_INVAL("MASK ABSQ_S.QH");
18070 generate_exception_end(ctx
, EXCP_RI
);
18074 case OPC_ADDU_OB_DSP
:
18075 op2
= MASK_ADDU_OB(ctx
->opcode
);
18077 case OPC_RADDU_L_OB
:
18079 case OPC_SUBQ_S_PW
:
18081 case OPC_SUBQ_S_QH
:
18083 case OPC_SUBU_S_OB
:
18085 case OPC_SUBU_S_QH
:
18087 case OPC_SUBUH_R_OB
:
18089 case OPC_ADDQ_S_PW
:
18091 case OPC_ADDQ_S_QH
:
18093 case OPC_ADDU_S_OB
:
18095 case OPC_ADDU_S_QH
:
18097 case OPC_ADDUH_R_OB
:
18098 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18100 case OPC_MULEQ_S_PW_QHL
:
18101 case OPC_MULEQ_S_PW_QHR
:
18102 case OPC_MULEU_S_QH_OBL
:
18103 case OPC_MULEU_S_QH_OBR
:
18104 case OPC_MULQ_RS_QH
:
18105 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18107 default: /* Invalid */
18108 MIPS_INVAL("MASK ADDU.OB");
18109 generate_exception_end(ctx
, EXCP_RI
);
18113 case OPC_CMPU_EQ_OB_DSP
:
18114 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18116 case OPC_PRECR_SRA_QH_PW
:
18117 case OPC_PRECR_SRA_R_QH_PW
:
18118 /* Return value is rt. */
18119 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18121 case OPC_PRECR_OB_QH
:
18122 case OPC_PRECRQ_OB_QH
:
18123 case OPC_PRECRQ_PW_L
:
18124 case OPC_PRECRQ_QH_PW
:
18125 case OPC_PRECRQ_RS_QH_PW
:
18126 case OPC_PRECRQU_S_OB_QH
:
18127 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18129 case OPC_CMPU_EQ_OB
:
18130 case OPC_CMPU_LT_OB
:
18131 case OPC_CMPU_LE_OB
:
18132 case OPC_CMP_EQ_QH
:
18133 case OPC_CMP_LT_QH
:
18134 case OPC_CMP_LE_QH
:
18135 case OPC_CMP_EQ_PW
:
18136 case OPC_CMP_LT_PW
:
18137 case OPC_CMP_LE_PW
:
18138 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18140 case OPC_CMPGDU_EQ_OB
:
18141 case OPC_CMPGDU_LT_OB
:
18142 case OPC_CMPGDU_LE_OB
:
18143 case OPC_CMPGU_EQ_OB
:
18144 case OPC_CMPGU_LT_OB
:
18145 case OPC_CMPGU_LE_OB
:
18146 case OPC_PACKRL_PW
:
18150 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18152 default: /* Invalid */
18153 MIPS_INVAL("MASK CMPU_EQ.OB");
18154 generate_exception_end(ctx
, EXCP_RI
);
18158 case OPC_DAPPEND_DSP
:
18159 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18161 case OPC_DEXTR_W_DSP
:
18162 op2
= MASK_DEXTR_W(ctx
->opcode
);
18169 case OPC_DEXTR_R_L
:
18170 case OPC_DEXTR_RS_L
:
18172 case OPC_DEXTR_R_W
:
18173 case OPC_DEXTR_RS_W
:
18174 case OPC_DEXTR_S_H
:
18176 case OPC_DEXTRV_R_L
:
18177 case OPC_DEXTRV_RS_L
:
18178 case OPC_DEXTRV_S_H
:
18180 case OPC_DEXTRV_R_W
:
18181 case OPC_DEXTRV_RS_W
:
18182 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18187 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18189 default: /* Invalid */
18190 MIPS_INVAL("MASK EXTR.W");
18191 generate_exception_end(ctx
, EXCP_RI
);
18195 case OPC_DPAQ_W_QH_DSP
:
18196 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18198 case OPC_DPAU_H_OBL
:
18199 case OPC_DPAU_H_OBR
:
18200 case OPC_DPSU_H_OBL
:
18201 case OPC_DPSU_H_OBR
:
18203 case OPC_DPAQ_S_W_QH
:
18205 case OPC_DPSQ_S_W_QH
:
18206 case OPC_MULSAQ_S_W_QH
:
18207 case OPC_DPAQ_SA_L_PW
:
18208 case OPC_DPSQ_SA_L_PW
:
18209 case OPC_MULSAQ_S_L_PW
:
18210 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18212 case OPC_MAQ_S_W_QHLL
:
18213 case OPC_MAQ_S_W_QHLR
:
18214 case OPC_MAQ_S_W_QHRL
:
18215 case OPC_MAQ_S_W_QHRR
:
18216 case OPC_MAQ_SA_W_QHLL
:
18217 case OPC_MAQ_SA_W_QHLR
:
18218 case OPC_MAQ_SA_W_QHRL
:
18219 case OPC_MAQ_SA_W_QHRR
:
18220 case OPC_MAQ_S_L_PWL
:
18221 case OPC_MAQ_S_L_PWR
:
18226 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18228 default: /* Invalid */
18229 MIPS_INVAL("MASK DPAQ.W.QH");
18230 generate_exception_end(ctx
, EXCP_RI
);
18234 case OPC_DINSV_DSP
:
18235 op2
= MASK_INSV(ctx
->opcode
);
18246 t0
= tcg_temp_new();
18247 t1
= tcg_temp_new();
18249 gen_load_gpr(t0
, rt
);
18250 gen_load_gpr(t1
, rs
);
18252 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18258 default: /* Invalid */
18259 MIPS_INVAL("MASK DINSV");
18260 generate_exception_end(ctx
, EXCP_RI
);
18264 case OPC_SHLL_OB_DSP
:
18265 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18268 default: /* Invalid */
18269 MIPS_INVAL("special3_legacy");
18270 generate_exception_end(ctx
, EXCP_RI
);
18275 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18277 int rs
, rt
, rd
, sa
;
18281 rs
= (ctx
->opcode
>> 21) & 0x1f;
18282 rt
= (ctx
->opcode
>> 16) & 0x1f;
18283 rd
= (ctx
->opcode
>> 11) & 0x1f;
18284 sa
= (ctx
->opcode
>> 6) & 0x1f;
18285 imm
= sextract32(ctx
->opcode
, 7, 9);
18287 op1
= MASK_SPECIAL3(ctx
->opcode
);
18290 * EVA loads and stores overlap Loongson 2E instructions decoded by
18291 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18296 case OPC_LWLE
... OPC_LWRE
:
18297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18299 case OPC_LBUE
... OPC_LHUE
:
18300 case OPC_LBE
... OPC_LWE
:
18301 check_cp0_enabled(ctx
);
18302 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18304 case OPC_SWLE
... OPC_SWRE
:
18305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18307 case OPC_SBE
... OPC_SHE
:
18309 check_cp0_enabled(ctx
);
18310 gen_st(ctx
, op1
, rt
, rs
, imm
);
18313 check_cp0_enabled(ctx
);
18314 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18317 check_cp0_enabled(ctx
);
18318 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18319 gen_cache_operation(ctx
, rt
, rs
, imm
);
18321 /* Treat as NOP. */
18324 check_cp0_enabled(ctx
);
18325 /* Treat as NOP. */
18333 check_insn(ctx
, ISA_MIPS32R2
);
18334 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18337 op2
= MASK_BSHFL(ctx
->opcode
);
18339 case OPC_ALIGN
... OPC_ALIGN_END
:
18341 check_insn(ctx
, ISA_MIPS32R6
);
18342 decode_opc_special3_r6(env
, ctx
);
18345 check_insn(ctx
, ISA_MIPS32R2
);
18346 gen_bshfl(ctx
, op2
, rt
, rd
);
18350 #if defined(TARGET_MIPS64)
18351 case OPC_DEXTM
... OPC_DEXT
:
18352 case OPC_DINSM
... OPC_DINS
:
18353 check_insn(ctx
, ISA_MIPS64R2
);
18354 check_mips_64(ctx
);
18355 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18358 op2
= MASK_DBSHFL(ctx
->opcode
);
18360 case OPC_DALIGN
... OPC_DALIGN_END
:
18362 check_insn(ctx
, ISA_MIPS32R6
);
18363 decode_opc_special3_r6(env
, ctx
);
18366 check_insn(ctx
, ISA_MIPS64R2
);
18367 check_mips_64(ctx
);
18368 op2
= MASK_DBSHFL(ctx
->opcode
);
18369 gen_bshfl(ctx
, op2
, rt
, rd
);
18375 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18378 check_insn(ctx
, ASE_MT
);
18380 TCGv t0
= tcg_temp_new();
18381 TCGv t1
= tcg_temp_new();
18383 gen_load_gpr(t0
, rt
);
18384 gen_load_gpr(t1
, rs
);
18385 gen_helper_fork(t0
, t1
);
18391 check_insn(ctx
, ASE_MT
);
18393 TCGv t0
= tcg_temp_new();
18395 gen_load_gpr(t0
, rs
);
18396 gen_helper_yield(t0
, cpu_env
, t0
);
18397 gen_store_gpr(t0
, rd
);
18402 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18403 decode_opc_special3_r6(env
, ctx
);
18405 decode_opc_special3_legacy(env
, ctx
);
18410 /* MIPS SIMD Architecture (MSA) */
18411 static inline int check_msa_access(DisasContext
*ctx
)
18413 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18414 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18415 generate_exception_end(ctx
, EXCP_RI
);
18419 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18420 if (ctx
->insn_flags
& ASE_MSA
) {
18421 generate_exception_end(ctx
, EXCP_MSADIS
);
18424 generate_exception_end(ctx
, EXCP_RI
);
18431 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18433 /* generates tcg ops to check if any element is 0 */
18434 /* Note this function only works with MSA_WRLEN = 128 */
18435 uint64_t eval_zero_or_big
= 0;
18436 uint64_t eval_big
= 0;
18437 TCGv_i64 t0
= tcg_temp_new_i64();
18438 TCGv_i64 t1
= tcg_temp_new_i64();
18441 eval_zero_or_big
= 0x0101010101010101ULL
;
18442 eval_big
= 0x8080808080808080ULL
;
18445 eval_zero_or_big
= 0x0001000100010001ULL
;
18446 eval_big
= 0x8000800080008000ULL
;
18449 eval_zero_or_big
= 0x0000000100000001ULL
;
18450 eval_big
= 0x8000000080000000ULL
;
18453 eval_zero_or_big
= 0x0000000000000001ULL
;
18454 eval_big
= 0x8000000000000000ULL
;
18457 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18458 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18459 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18460 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18461 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18462 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18463 tcg_gen_or_i64(t0
, t0
, t1
);
18464 /* if all bits are zero then all elements are not zero */
18465 /* if some bit is non-zero then some element is zero */
18466 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18467 tcg_gen_trunc_i64_tl(tresult
, t0
);
18468 tcg_temp_free_i64(t0
);
18469 tcg_temp_free_i64(t1
);
18472 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18474 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18475 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18476 int64_t s16
= (int16_t)ctx
->opcode
;
18478 check_msa_access(ctx
);
18480 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18481 generate_exception_end(ctx
, EXCP_RI
);
18488 TCGv_i64 t0
= tcg_temp_new_i64();
18489 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18490 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18491 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18492 tcg_gen_trunc_i64_tl(bcond
, t0
);
18493 tcg_temp_free_i64(t0
);
18500 gen_check_zero_element(bcond
, df
, wt
);
18506 gen_check_zero_element(bcond
, df
, wt
);
18507 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18511 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18513 ctx
->hflags
|= MIPS_HFLAG_BC
;
18514 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18517 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18519 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18520 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18521 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18522 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18524 TCGv_i32 twd
= tcg_const_i32(wd
);
18525 TCGv_i32 tws
= tcg_const_i32(ws
);
18526 TCGv_i32 ti8
= tcg_const_i32(i8
);
18528 switch (MASK_MSA_I8(ctx
->opcode
)) {
18530 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18533 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18536 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18539 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18542 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18545 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18548 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18554 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18555 if (df
== DF_DOUBLE
) {
18556 generate_exception_end(ctx
, EXCP_RI
);
18558 TCGv_i32 tdf
= tcg_const_i32(df
);
18559 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18560 tcg_temp_free_i32(tdf
);
18565 MIPS_INVAL("MSA instruction");
18566 generate_exception_end(ctx
, EXCP_RI
);
18570 tcg_temp_free_i32(twd
);
18571 tcg_temp_free_i32(tws
);
18572 tcg_temp_free_i32(ti8
);
18575 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18577 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18578 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18579 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18580 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18581 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18582 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18584 TCGv_i32 tdf
= tcg_const_i32(df
);
18585 TCGv_i32 twd
= tcg_const_i32(wd
);
18586 TCGv_i32 tws
= tcg_const_i32(ws
);
18587 TCGv_i32 timm
= tcg_temp_new_i32();
18588 tcg_gen_movi_i32(timm
, u5
);
18590 switch (MASK_MSA_I5(ctx
->opcode
)) {
18592 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18595 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18597 case OPC_MAXI_S_df
:
18598 tcg_gen_movi_i32(timm
, s5
);
18599 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18601 case OPC_MAXI_U_df
:
18602 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18604 case OPC_MINI_S_df
:
18605 tcg_gen_movi_i32(timm
, s5
);
18606 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18608 case OPC_MINI_U_df
:
18609 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18612 tcg_gen_movi_i32(timm
, s5
);
18613 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18615 case OPC_CLTI_S_df
:
18616 tcg_gen_movi_i32(timm
, s5
);
18617 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18619 case OPC_CLTI_U_df
:
18620 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18622 case OPC_CLEI_S_df
:
18623 tcg_gen_movi_i32(timm
, s5
);
18624 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18626 case OPC_CLEI_U_df
:
18627 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18631 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18632 tcg_gen_movi_i32(timm
, s10
);
18633 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18637 MIPS_INVAL("MSA instruction");
18638 generate_exception_end(ctx
, EXCP_RI
);
18642 tcg_temp_free_i32(tdf
);
18643 tcg_temp_free_i32(twd
);
18644 tcg_temp_free_i32(tws
);
18645 tcg_temp_free_i32(timm
);
18648 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18650 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18651 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18652 uint32_t df
= 0, m
= 0;
18653 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18654 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18661 if ((dfm
& 0x40) == 0x00) {
18664 } else if ((dfm
& 0x60) == 0x40) {
18667 } else if ((dfm
& 0x70) == 0x60) {
18670 } else if ((dfm
& 0x78) == 0x70) {
18674 generate_exception_end(ctx
, EXCP_RI
);
18678 tdf
= tcg_const_i32(df
);
18679 tm
= tcg_const_i32(m
);
18680 twd
= tcg_const_i32(wd
);
18681 tws
= tcg_const_i32(ws
);
18683 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18685 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18688 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18691 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18694 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18697 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18700 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18702 case OPC_BINSLI_df
:
18703 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18705 case OPC_BINSRI_df
:
18706 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18709 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18712 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18715 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18718 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18721 MIPS_INVAL("MSA instruction");
18722 generate_exception_end(ctx
, EXCP_RI
);
18726 tcg_temp_free_i32(tdf
);
18727 tcg_temp_free_i32(tm
);
18728 tcg_temp_free_i32(twd
);
18729 tcg_temp_free_i32(tws
);
18732 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18734 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18735 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18736 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18737 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18738 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18740 TCGv_i32 tdf
= tcg_const_i32(df
);
18741 TCGv_i32 twd
= tcg_const_i32(wd
);
18742 TCGv_i32 tws
= tcg_const_i32(ws
);
18743 TCGv_i32 twt
= tcg_const_i32(wt
);
18745 switch (MASK_MSA_3R(ctx
->opcode
)) {
18747 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18750 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18753 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18756 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18758 case OPC_SUBS_S_df
:
18759 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18762 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18765 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18768 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18771 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18774 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18776 case OPC_ADDS_A_df
:
18777 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18779 case OPC_SUBS_U_df
:
18780 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18783 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18786 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18789 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18792 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18795 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18798 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18800 case OPC_ADDS_S_df
:
18801 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18803 case OPC_SUBSUS_U_df
:
18804 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18807 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18822 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18824 case OPC_ADDS_U_df
:
18825 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18827 case OPC_SUBSUU_S_df
:
18828 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18834 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18840 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18843 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18845 case OPC_ASUB_S_df
:
18846 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18849 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18852 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18858 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18861 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18864 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18866 case OPC_ASUB_U_df
:
18867 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18870 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18873 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18879 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18881 case OPC_AVER_S_df
:
18882 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18885 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18888 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18891 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18894 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18896 case OPC_AVER_U_df
:
18897 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18900 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18903 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18906 case OPC_DOTP_S_df
:
18907 case OPC_DOTP_U_df
:
18908 case OPC_DPADD_S_df
:
18909 case OPC_DPADD_U_df
:
18910 case OPC_DPSUB_S_df
:
18911 case OPC_HADD_S_df
:
18912 case OPC_DPSUB_U_df
:
18913 case OPC_HADD_U_df
:
18914 case OPC_HSUB_S_df
:
18915 case OPC_HSUB_U_df
:
18916 if (df
== DF_BYTE
) {
18917 generate_exception_end(ctx
, EXCP_RI
);
18920 switch (MASK_MSA_3R(ctx
->opcode
)) {
18921 case OPC_DOTP_S_df
:
18922 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18924 case OPC_DOTP_U_df
:
18925 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18927 case OPC_DPADD_S_df
:
18928 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18930 case OPC_DPADD_U_df
:
18931 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18933 case OPC_DPSUB_S_df
:
18934 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18936 case OPC_HADD_S_df
:
18937 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18939 case OPC_DPSUB_U_df
:
18940 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18942 case OPC_HADD_U_df
:
18943 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18945 case OPC_HSUB_S_df
:
18946 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18948 case OPC_HSUB_U_df
:
18949 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18954 MIPS_INVAL("MSA instruction");
18955 generate_exception_end(ctx
, EXCP_RI
);
18958 tcg_temp_free_i32(twd
);
18959 tcg_temp_free_i32(tws
);
18960 tcg_temp_free_i32(twt
);
18961 tcg_temp_free_i32(tdf
);
18964 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18966 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18967 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18968 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18969 TCGv telm
= tcg_temp_new();
18970 TCGv_i32 tsr
= tcg_const_i32(source
);
18971 TCGv_i32 tdt
= tcg_const_i32(dest
);
18973 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18975 gen_load_gpr(telm
, source
);
18976 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18979 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18980 gen_store_gpr(telm
, dest
);
18983 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18986 MIPS_INVAL("MSA instruction");
18987 generate_exception_end(ctx
, EXCP_RI
);
18991 tcg_temp_free(telm
);
18992 tcg_temp_free_i32(tdt
);
18993 tcg_temp_free_i32(tsr
);
18996 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18999 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19000 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19001 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19003 TCGv_i32 tws
= tcg_const_i32(ws
);
19004 TCGv_i32 twd
= tcg_const_i32(wd
);
19005 TCGv_i32 tn
= tcg_const_i32(n
);
19006 TCGv_i32 tdf
= tcg_const_i32(df
);
19008 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19010 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
19012 case OPC_SPLATI_df
:
19013 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
19016 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
19018 case OPC_COPY_S_df
:
19019 case OPC_COPY_U_df
:
19020 case OPC_INSERT_df
:
19021 #if !defined(TARGET_MIPS64)
19022 /* Double format valid only for MIPS64 */
19023 if (df
== DF_DOUBLE
) {
19024 generate_exception_end(ctx
, EXCP_RI
);
19028 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19029 case OPC_COPY_S_df
:
19030 if (likely(wd
!= 0)) {
19031 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19034 case OPC_COPY_U_df
:
19035 if (likely(wd
!= 0)) {
19036 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19039 case OPC_INSERT_df
:
19040 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19045 MIPS_INVAL("MSA instruction");
19046 generate_exception_end(ctx
, EXCP_RI
);
19048 tcg_temp_free_i32(twd
);
19049 tcg_temp_free_i32(tws
);
19050 tcg_temp_free_i32(tn
);
19051 tcg_temp_free_i32(tdf
);
19054 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19056 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19057 uint32_t df
= 0, n
= 0;
19059 if ((dfn
& 0x30) == 0x00) {
19062 } else if ((dfn
& 0x38) == 0x20) {
19065 } else if ((dfn
& 0x3c) == 0x30) {
19068 } else if ((dfn
& 0x3e) == 0x38) {
19071 } else if (dfn
== 0x3E) {
19072 /* CTCMSA, CFCMSA, MOVE.V */
19073 gen_msa_elm_3e(env
, ctx
);
19076 generate_exception_end(ctx
, EXCP_RI
);
19080 gen_msa_elm_df(env
, ctx
, df
, n
);
19083 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19085 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19086 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19087 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19088 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19089 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19091 TCGv_i32 twd
= tcg_const_i32(wd
);
19092 TCGv_i32 tws
= tcg_const_i32(ws
);
19093 TCGv_i32 twt
= tcg_const_i32(wt
);
19094 TCGv_i32 tdf
= tcg_temp_new_i32();
19096 /* adjust df value for floating-point instruction */
19097 tcg_gen_movi_i32(tdf
, df
+ 2);
19099 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19101 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19104 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19107 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19110 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19113 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19116 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19119 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19122 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19125 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19128 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19131 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19134 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19137 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19140 tcg_gen_movi_i32(tdf
, df
+ 1);
19141 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19144 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19147 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19149 case OPC_MADD_Q_df
:
19150 tcg_gen_movi_i32(tdf
, df
+ 1);
19151 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19154 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19156 case OPC_MSUB_Q_df
:
19157 tcg_gen_movi_i32(tdf
, df
+ 1);
19158 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19161 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19164 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19167 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19170 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19173 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19176 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19179 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19182 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19185 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19188 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19191 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19194 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19197 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19199 case OPC_MULR_Q_df
:
19200 tcg_gen_movi_i32(tdf
, df
+ 1);
19201 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19204 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19206 case OPC_FMIN_A_df
:
19207 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19209 case OPC_MADDR_Q_df
:
19210 tcg_gen_movi_i32(tdf
, df
+ 1);
19211 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19214 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19217 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19219 case OPC_MSUBR_Q_df
:
19220 tcg_gen_movi_i32(tdf
, df
+ 1);
19221 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19224 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19226 case OPC_FMAX_A_df
:
19227 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19230 MIPS_INVAL("MSA instruction");
19231 generate_exception_end(ctx
, EXCP_RI
);
19235 tcg_temp_free_i32(twd
);
19236 tcg_temp_free_i32(tws
);
19237 tcg_temp_free_i32(twt
);
19238 tcg_temp_free_i32(tdf
);
19241 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19243 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19244 (op & (0x7 << 18)))
19245 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19246 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19247 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19248 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19249 TCGv_i32 twd
= tcg_const_i32(wd
);
19250 TCGv_i32 tws
= tcg_const_i32(ws
);
19251 TCGv_i32 twt
= tcg_const_i32(wt
);
19252 TCGv_i32 tdf
= tcg_const_i32(df
);
19254 switch (MASK_MSA_2R(ctx
->opcode
)) {
19256 #if !defined(TARGET_MIPS64)
19257 /* Double format valid only for MIPS64 */
19258 if (df
== DF_DOUBLE
) {
19259 generate_exception_end(ctx
, EXCP_RI
);
19263 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19266 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19269 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19272 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19275 MIPS_INVAL("MSA instruction");
19276 generate_exception_end(ctx
, EXCP_RI
);
19280 tcg_temp_free_i32(twd
);
19281 tcg_temp_free_i32(tws
);
19282 tcg_temp_free_i32(twt
);
19283 tcg_temp_free_i32(tdf
);
19286 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19288 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19289 (op & (0xf << 17)))
19290 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19291 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19292 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19293 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19294 TCGv_i32 twd
= tcg_const_i32(wd
);
19295 TCGv_i32 tws
= tcg_const_i32(ws
);
19296 TCGv_i32 twt
= tcg_const_i32(wt
);
19297 /* adjust df value for floating-point instruction */
19298 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19300 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19301 case OPC_FCLASS_df
:
19302 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19304 case OPC_FTRUNC_S_df
:
19305 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19307 case OPC_FTRUNC_U_df
:
19308 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19311 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19313 case OPC_FRSQRT_df
:
19314 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19317 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19320 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19323 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19325 case OPC_FEXUPL_df
:
19326 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19328 case OPC_FEXUPR_df
:
19329 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19332 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19335 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19337 case OPC_FTINT_S_df
:
19338 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19340 case OPC_FTINT_U_df
:
19341 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19343 case OPC_FFINT_S_df
:
19344 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19346 case OPC_FFINT_U_df
:
19347 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19351 tcg_temp_free_i32(twd
);
19352 tcg_temp_free_i32(tws
);
19353 tcg_temp_free_i32(twt
);
19354 tcg_temp_free_i32(tdf
);
19357 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19359 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19360 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19361 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19362 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19363 TCGv_i32 twd
= tcg_const_i32(wd
);
19364 TCGv_i32 tws
= tcg_const_i32(ws
);
19365 TCGv_i32 twt
= tcg_const_i32(wt
);
19367 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19369 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19372 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19375 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19378 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19381 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19384 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19387 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19390 MIPS_INVAL("MSA instruction");
19391 generate_exception_end(ctx
, EXCP_RI
);
19395 tcg_temp_free_i32(twd
);
19396 tcg_temp_free_i32(tws
);
19397 tcg_temp_free_i32(twt
);
19400 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19402 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19410 gen_msa_vec_v(env
, ctx
);
19413 gen_msa_2r(env
, ctx
);
19416 gen_msa_2rf(env
, ctx
);
19419 MIPS_INVAL("MSA instruction");
19420 generate_exception_end(ctx
, EXCP_RI
);
19425 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19427 uint32_t opcode
= ctx
->opcode
;
19428 check_insn(ctx
, ASE_MSA
);
19429 check_msa_access(ctx
);
19431 switch (MASK_MSA_MINOR(opcode
)) {
19432 case OPC_MSA_I8_00
:
19433 case OPC_MSA_I8_01
:
19434 case OPC_MSA_I8_02
:
19435 gen_msa_i8(env
, ctx
);
19437 case OPC_MSA_I5_06
:
19438 case OPC_MSA_I5_07
:
19439 gen_msa_i5(env
, ctx
);
19441 case OPC_MSA_BIT_09
:
19442 case OPC_MSA_BIT_0A
:
19443 gen_msa_bit(env
, ctx
);
19445 case OPC_MSA_3R_0D
:
19446 case OPC_MSA_3R_0E
:
19447 case OPC_MSA_3R_0F
:
19448 case OPC_MSA_3R_10
:
19449 case OPC_MSA_3R_11
:
19450 case OPC_MSA_3R_12
:
19451 case OPC_MSA_3R_13
:
19452 case OPC_MSA_3R_14
:
19453 case OPC_MSA_3R_15
:
19454 gen_msa_3r(env
, ctx
);
19457 gen_msa_elm(env
, ctx
);
19459 case OPC_MSA_3RF_1A
:
19460 case OPC_MSA_3RF_1B
:
19461 case OPC_MSA_3RF_1C
:
19462 gen_msa_3rf(env
, ctx
);
19465 gen_msa_vec(env
, ctx
);
19476 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19477 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19478 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19479 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19481 TCGv_i32 twd
= tcg_const_i32(wd
);
19482 TCGv taddr
= tcg_temp_new();
19483 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19485 switch (MASK_MSA_MINOR(opcode
)) {
19487 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19490 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19493 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19496 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19499 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19502 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19505 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19508 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19512 tcg_temp_free_i32(twd
);
19513 tcg_temp_free(taddr
);
19517 MIPS_INVAL("MSA instruction");
19518 generate_exception_end(ctx
, EXCP_RI
);
19524 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19527 int rs
, rt
, rd
, sa
;
19531 /* make sure instructions are on a word boundary */
19532 if (ctx
->pc
& 0x3) {
19533 env
->CP0_BadVAddr
= ctx
->pc
;
19534 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19538 /* Handle blikely not taken case */
19539 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19540 TCGLabel
*l1
= gen_new_label();
19542 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19543 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19544 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19548 op
= MASK_OP_MAJOR(ctx
->opcode
);
19549 rs
= (ctx
->opcode
>> 21) & 0x1f;
19550 rt
= (ctx
->opcode
>> 16) & 0x1f;
19551 rd
= (ctx
->opcode
>> 11) & 0x1f;
19552 sa
= (ctx
->opcode
>> 6) & 0x1f;
19553 imm
= (int16_t)ctx
->opcode
;
19556 decode_opc_special(env
, ctx
);
19559 decode_opc_special2_legacy(env
, ctx
);
19562 decode_opc_special3(env
, ctx
);
19565 op1
= MASK_REGIMM(ctx
->opcode
);
19567 case OPC_BLTZL
: /* REGIMM branches */
19571 check_insn(ctx
, ISA_MIPS2
);
19572 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19576 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19580 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19582 /* OPC_NAL, OPC_BAL */
19583 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19585 generate_exception_end(ctx
, EXCP_RI
);
19588 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19591 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19593 check_insn(ctx
, ISA_MIPS2
);
19594 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19595 gen_trap(ctx
, op1
, rs
, -1, imm
);
19598 check_insn(ctx
, ISA_MIPS32R6
);
19599 generate_exception_end(ctx
, EXCP_RI
);
19602 check_insn(ctx
, ISA_MIPS32R2
);
19603 /* Break the TB to be able to sync copied instructions
19605 ctx
->bstate
= BS_STOP
;
19607 case OPC_BPOSGE32
: /* MIPS DSP branch */
19608 #if defined(TARGET_MIPS64)
19612 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19614 #if defined(TARGET_MIPS64)
19616 check_insn(ctx
, ISA_MIPS32R6
);
19617 check_mips_64(ctx
);
19619 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19623 check_insn(ctx
, ISA_MIPS32R6
);
19624 check_mips_64(ctx
);
19626 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19630 default: /* Invalid */
19631 MIPS_INVAL("regimm");
19632 generate_exception_end(ctx
, EXCP_RI
);
19637 check_cp0_enabled(ctx
);
19638 op1
= MASK_CP0(ctx
->opcode
);
19646 #if defined(TARGET_MIPS64)
19650 #ifndef CONFIG_USER_ONLY
19651 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19652 #endif /* !CONFIG_USER_ONLY */
19654 case OPC_C0_FIRST
... OPC_C0_LAST
:
19655 #ifndef CONFIG_USER_ONLY
19656 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19657 #endif /* !CONFIG_USER_ONLY */
19660 #ifndef CONFIG_USER_ONLY
19663 TCGv t0
= tcg_temp_new();
19665 op2
= MASK_MFMC0(ctx
->opcode
);
19668 check_insn(ctx
, ASE_MT
);
19669 gen_helper_dmt(t0
);
19670 gen_store_gpr(t0
, rt
);
19673 check_insn(ctx
, ASE_MT
);
19674 gen_helper_emt(t0
);
19675 gen_store_gpr(t0
, rt
);
19678 check_insn(ctx
, ASE_MT
);
19679 gen_helper_dvpe(t0
, cpu_env
);
19680 gen_store_gpr(t0
, rt
);
19683 check_insn(ctx
, ASE_MT
);
19684 gen_helper_evpe(t0
, cpu_env
);
19685 gen_store_gpr(t0
, rt
);
19688 check_insn(ctx
, ISA_MIPS32R6
);
19690 gen_helper_dvp(t0
, cpu_env
);
19691 gen_store_gpr(t0
, rt
);
19695 check_insn(ctx
, ISA_MIPS32R6
);
19697 gen_helper_evp(t0
, cpu_env
);
19698 gen_store_gpr(t0
, rt
);
19702 check_insn(ctx
, ISA_MIPS32R2
);
19703 save_cpu_state(ctx
, 1);
19704 gen_helper_di(t0
, cpu_env
);
19705 gen_store_gpr(t0
, rt
);
19706 /* Stop translation as we may have switched
19707 the execution mode. */
19708 ctx
->bstate
= BS_STOP
;
19711 check_insn(ctx
, ISA_MIPS32R2
);
19712 save_cpu_state(ctx
, 1);
19713 gen_helper_ei(t0
, cpu_env
);
19714 gen_store_gpr(t0
, rt
);
19715 /* BS_STOP isn't sufficient, we need to ensure we break out
19716 of translated code to check for pending interrupts. */
19717 gen_save_pc(ctx
->pc
+ 4);
19718 ctx
->bstate
= BS_EXCP
;
19720 default: /* Invalid */
19721 MIPS_INVAL("mfmc0");
19722 generate_exception_end(ctx
, EXCP_RI
);
19727 #endif /* !CONFIG_USER_ONLY */
19730 check_insn(ctx
, ISA_MIPS32R2
);
19731 gen_load_srsgpr(rt
, rd
);
19734 check_insn(ctx
, ISA_MIPS32R2
);
19735 gen_store_srsgpr(rt
, rd
);
19739 generate_exception_end(ctx
, EXCP_RI
);
19743 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19744 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19745 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19746 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19749 /* Arithmetic with immediate opcode */
19750 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19754 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19756 case OPC_SLTI
: /* Set on less than with immediate opcode */
19758 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19760 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19761 case OPC_LUI
: /* OPC_AUI */
19764 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19766 case OPC_J
... OPC_JAL
: /* Jump */
19767 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19768 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19771 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19772 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19774 generate_exception_end(ctx
, EXCP_RI
);
19777 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19778 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19781 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19784 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19785 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19787 generate_exception_end(ctx
, EXCP_RI
);
19790 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19791 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19794 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19797 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19800 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19802 check_insn(ctx
, ISA_MIPS32R6
);
19803 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19804 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19807 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19810 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19812 check_insn(ctx
, ISA_MIPS32R6
);
19813 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19814 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19819 check_insn(ctx
, ISA_MIPS2
);
19820 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19824 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19826 case OPC_LL
: /* Load and stores */
19827 check_insn(ctx
, ISA_MIPS2
);
19831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19833 case OPC_LB
... OPC_LH
:
19834 case OPC_LW
... OPC_LHU
:
19835 gen_ld(ctx
, op
, rt
, rs
, imm
);
19839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19841 case OPC_SB
... OPC_SH
:
19843 gen_st(ctx
, op
, rt
, rs
, imm
);
19846 check_insn(ctx
, ISA_MIPS2
);
19847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19848 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19852 check_cp0_enabled(ctx
);
19853 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19854 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19855 gen_cache_operation(ctx
, rt
, rs
, imm
);
19857 /* Treat as NOP. */
19860 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19861 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19862 /* Treat as NOP. */
19865 /* Floating point (COP1). */
19870 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19874 op1
= MASK_CP1(ctx
->opcode
);
19879 check_cp1_enabled(ctx
);
19880 check_insn(ctx
, ISA_MIPS32R2
);
19885 check_cp1_enabled(ctx
);
19886 gen_cp1(ctx
, op1
, rt
, rd
);
19888 #if defined(TARGET_MIPS64)
19891 check_cp1_enabled(ctx
);
19892 check_insn(ctx
, ISA_MIPS3
);
19893 check_mips_64(ctx
);
19894 gen_cp1(ctx
, op1
, rt
, rd
);
19897 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19898 check_cp1_enabled(ctx
);
19899 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19901 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19906 check_insn(ctx
, ASE_MIPS3D
);
19907 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19908 (rt
>> 2) & 0x7, imm
<< 2);
19912 check_cp1_enabled(ctx
);
19913 check_insn(ctx
, ISA_MIPS32R6
);
19914 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19918 check_cp1_enabled(ctx
);
19919 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19921 check_insn(ctx
, ASE_MIPS3D
);
19924 check_cp1_enabled(ctx
);
19925 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19926 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19927 (rt
>> 2) & 0x7, imm
<< 2);
19934 check_cp1_enabled(ctx
);
19935 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19941 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19942 check_cp1_enabled(ctx
);
19943 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19945 case R6_OPC_CMP_AF_S
:
19946 case R6_OPC_CMP_UN_S
:
19947 case R6_OPC_CMP_EQ_S
:
19948 case R6_OPC_CMP_UEQ_S
:
19949 case R6_OPC_CMP_LT_S
:
19950 case R6_OPC_CMP_ULT_S
:
19951 case R6_OPC_CMP_LE_S
:
19952 case R6_OPC_CMP_ULE_S
:
19953 case R6_OPC_CMP_SAF_S
:
19954 case R6_OPC_CMP_SUN_S
:
19955 case R6_OPC_CMP_SEQ_S
:
19956 case R6_OPC_CMP_SEUQ_S
:
19957 case R6_OPC_CMP_SLT_S
:
19958 case R6_OPC_CMP_SULT_S
:
19959 case R6_OPC_CMP_SLE_S
:
19960 case R6_OPC_CMP_SULE_S
:
19961 case R6_OPC_CMP_OR_S
:
19962 case R6_OPC_CMP_UNE_S
:
19963 case R6_OPC_CMP_NE_S
:
19964 case R6_OPC_CMP_SOR_S
:
19965 case R6_OPC_CMP_SUNE_S
:
19966 case R6_OPC_CMP_SNE_S
:
19967 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19969 case R6_OPC_CMP_AF_D
:
19970 case R6_OPC_CMP_UN_D
:
19971 case R6_OPC_CMP_EQ_D
:
19972 case R6_OPC_CMP_UEQ_D
:
19973 case R6_OPC_CMP_LT_D
:
19974 case R6_OPC_CMP_ULT_D
:
19975 case R6_OPC_CMP_LE_D
:
19976 case R6_OPC_CMP_ULE_D
:
19977 case R6_OPC_CMP_SAF_D
:
19978 case R6_OPC_CMP_SUN_D
:
19979 case R6_OPC_CMP_SEQ_D
:
19980 case R6_OPC_CMP_SEUQ_D
:
19981 case R6_OPC_CMP_SLT_D
:
19982 case R6_OPC_CMP_SULT_D
:
19983 case R6_OPC_CMP_SLE_D
:
19984 case R6_OPC_CMP_SULE_D
:
19985 case R6_OPC_CMP_OR_D
:
19986 case R6_OPC_CMP_UNE_D
:
19987 case R6_OPC_CMP_NE_D
:
19988 case R6_OPC_CMP_SOR_D
:
19989 case R6_OPC_CMP_SUNE_D
:
19990 case R6_OPC_CMP_SNE_D
:
19991 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19994 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19995 rt
, rd
, sa
, (imm
>> 8) & 0x7);
20000 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20015 check_insn(ctx
, ASE_MSA
);
20016 gen_msa_branch(env
, ctx
, op1
);
20020 generate_exception_end(ctx
, EXCP_RI
);
20025 /* Compact branches [R6] and COP2 [non-R6] */
20026 case OPC_BC
: /* OPC_LWC2 */
20027 case OPC_BALC
: /* OPC_SWC2 */
20028 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20029 /* OPC_BC, OPC_BALC */
20030 gen_compute_compact_branch(ctx
, op
, 0, 0,
20031 sextract32(ctx
->opcode
<< 2, 0, 28));
20033 /* OPC_LWC2, OPC_SWC2 */
20034 /* COP2: Not implemented. */
20035 generate_exception_err(ctx
, EXCP_CpU
, 2);
20038 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20039 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20040 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20042 /* OPC_BEQZC, OPC_BNEZC */
20043 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20044 sextract32(ctx
->opcode
<< 2, 0, 23));
20046 /* OPC_JIC, OPC_JIALC */
20047 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20050 /* OPC_LWC2, OPC_SWC2 */
20051 /* COP2: Not implemented. */
20052 generate_exception_err(ctx
, EXCP_CpU
, 2);
20056 check_insn(ctx
, INSN_LOONGSON2F
);
20057 /* Note that these instructions use different fields. */
20058 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20062 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20063 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20064 check_cp1_enabled(ctx
);
20065 op1
= MASK_CP3(ctx
->opcode
);
20069 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20075 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20076 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20079 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20080 /* Treat as NOP. */
20083 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20097 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20098 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20102 generate_exception_end(ctx
, EXCP_RI
);
20106 generate_exception_err(ctx
, EXCP_CpU
, 1);
20110 #if defined(TARGET_MIPS64)
20111 /* MIPS64 opcodes */
20112 case OPC_LDL
... OPC_LDR
:
20114 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20118 check_insn(ctx
, ISA_MIPS3
);
20119 check_mips_64(ctx
);
20120 gen_ld(ctx
, op
, rt
, rs
, imm
);
20122 case OPC_SDL
... OPC_SDR
:
20123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20126 check_insn(ctx
, ISA_MIPS3
);
20127 check_mips_64(ctx
);
20128 gen_st(ctx
, op
, rt
, rs
, imm
);
20131 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20132 check_insn(ctx
, ISA_MIPS3
);
20133 check_mips_64(ctx
);
20134 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20136 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20137 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20138 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20139 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20142 check_insn(ctx
, ISA_MIPS3
);
20143 check_mips_64(ctx
);
20144 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20148 check_insn(ctx
, ISA_MIPS3
);
20149 check_mips_64(ctx
);
20150 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20153 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20154 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20155 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20157 MIPS_INVAL("major opcode");
20158 generate_exception_end(ctx
, EXCP_RI
);
20162 case OPC_DAUI
: /* OPC_JALX */
20163 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20164 #if defined(TARGET_MIPS64)
20166 check_mips_64(ctx
);
20168 generate_exception(ctx
, EXCP_RI
);
20169 } else if (rt
!= 0) {
20170 TCGv t0
= tcg_temp_new();
20171 gen_load_gpr(t0
, rs
);
20172 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20176 generate_exception_end(ctx
, EXCP_RI
);
20177 MIPS_INVAL("major opcode");
20181 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20182 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20183 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20186 case OPC_MSA
: /* OPC_MDMX */
20187 /* MDMX: Not implemented. */
20191 check_insn(ctx
, ISA_MIPS32R6
);
20192 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
20194 default: /* Invalid */
20195 MIPS_INVAL("major opcode");
20196 generate_exception_end(ctx
, EXCP_RI
);
20201 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20203 CPUMIPSState
*env
= cs
->env_ptr
;
20205 target_ulong pc_start
;
20206 target_ulong next_page_start
;
20213 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
20216 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
20217 ctx
.insn_flags
= env
->insn_flags
;
20218 ctx
.CP0_Config1
= env
->CP0_Config1
;
20220 ctx
.bstate
= BS_NONE
;
20222 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20223 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20224 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20225 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20226 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20227 ctx
.PAMask
= env
->PAMask
;
20228 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20229 ctx
.eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20230 ctx
.sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20231 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20232 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20233 /* Restore delay slot state from the tb context. */
20234 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
20235 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20236 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20237 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20238 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20239 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20240 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20241 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20242 restore_cpu_state(env
, &ctx
);
20243 #ifdef CONFIG_USER_ONLY
20244 ctx
.mem_idx
= MIPS_HFLAG_UM
;
20246 ctx
.mem_idx
= hflags_mmu_index(ctx
.hflags
);
20248 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
20249 MO_UNALN
: MO_ALIGN
;
20251 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
20252 if (max_insns
== 0) {
20253 max_insns
= CF_COUNT_MASK
;
20255 if (max_insns
> TCG_MAX_INSNS
) {
20256 max_insns
= TCG_MAX_INSNS
;
20259 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
20261 while (ctx
.bstate
== BS_NONE
) {
20262 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
20265 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
20266 save_cpu_state(&ctx
, 1);
20267 ctx
.bstate
= BS_BRANCH
;
20268 gen_helper_raise_exception_debug(cpu_env
);
20269 /* The address covered by the breakpoint must be included in
20270 [tb->pc, tb->pc + tb->size) in order to for it to be
20271 properly cleared -- thus we increment the PC here so that
20272 the logic setting tb->size below does the right thing. */
20274 goto done_generating
;
20277 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
20281 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
20282 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
20283 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
20285 decode_opc(env
, &ctx
);
20286 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
20287 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20288 insn_bytes
= decode_micromips_opc(env
, &ctx
);
20289 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
20290 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20291 insn_bytes
= decode_mips16_opc(env
, &ctx
);
20293 generate_exception_end(&ctx
, EXCP_RI
);
20297 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
20298 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20299 MIPS_HFLAG_FBNSLOT
))) {
20300 /* force to generate branch as there is neither delay nor
20304 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
20305 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
20306 /* Force to generate branch as microMIPS R6 doesn't restrict
20307 branches in the forbidden slot. */
20312 gen_branch(&ctx
, insn_bytes
);
20314 ctx
.pc
+= insn_bytes
;
20316 /* Execute a branch and its delay slot as a single instruction.
20317 This is what GDB expects and is consistent with what the
20318 hardware does (e.g. if a delay slot instruction faults, the
20319 reported PC is the PC of the branch). */
20320 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20324 if (ctx
.pc
>= next_page_start
) {
20328 if (tcg_op_buf_full()) {
20332 if (num_insns
>= max_insns
)
20338 if (tb
->cflags
& CF_LAST_IO
) {
20341 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20342 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20343 gen_helper_raise_exception_debug(cpu_env
);
20345 switch (ctx
.bstate
) {
20347 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20350 save_cpu_state(&ctx
, 0);
20351 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20354 tcg_gen_exit_tb(0);
20362 gen_tb_end(tb
, num_insns
);
20364 tb
->size
= ctx
.pc
- pc_start
;
20365 tb
->icount
= num_insns
;
20369 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20370 && qemu_log_in_addr_range(pc_start
)) {
20372 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20373 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20380 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20384 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20386 #define printfpr(fp) \
20389 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20390 " fd:%13g fs:%13g psu: %13g\n", \
20391 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20392 (double)(fp)->fd, \
20393 (double)(fp)->fs[FP_ENDIAN_IDX], \
20394 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20397 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20398 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20399 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20400 " fd:%13g fs:%13g psu:%13g\n", \
20401 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20403 (double)tmp.fs[FP_ENDIAN_IDX], \
20404 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20409 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20410 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20411 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20412 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20413 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20414 printfpr(&env
->active_fpu
.fpr
[i
]);
20420 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20423 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20424 CPUMIPSState
*env
= &cpu
->env
;
20427 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20428 " LO=0x" TARGET_FMT_lx
" ds %04x "
20429 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20430 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20431 env
->hflags
, env
->btarget
, env
->bcond
);
20432 for (i
= 0; i
< 32; i
++) {
20434 cpu_fprintf(f
, "GPR%02d:", i
);
20435 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20437 cpu_fprintf(f
, "\n");
20440 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20441 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20442 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20444 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20445 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20446 env
->CP0_Config2
, env
->CP0_Config3
);
20447 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20448 env
->CP0_Config4
, env
->CP0_Config5
);
20449 if (env
->hflags
& MIPS_HFLAG_FPU
)
20450 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20453 void mips_tcg_init(void)
20458 /* Initialize various static tables. */
20462 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20463 tcg_ctx
.tcg_env
= cpu_env
;
20465 TCGV_UNUSED(cpu_gpr
[0]);
20466 for (i
= 1; i
< 32; i
++)
20467 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20468 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20471 for (i
= 0; i
< 32; i
++) {
20472 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20474 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20475 /* The scalar floating-point unit (FPU) registers are mapped on
20476 * the MSA vector registers. */
20477 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20478 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20479 msa_wr_d
[i
* 2 + 1] =
20480 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20483 cpu_PC
= tcg_global_mem_new(cpu_env
,
20484 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20485 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20486 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20487 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20489 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20490 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20493 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20494 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20496 bcond
= tcg_global_mem_new(cpu_env
,
20497 offsetof(CPUMIPSState
, bcond
), "bcond");
20498 btarget
= tcg_global_mem_new(cpu_env
,
20499 offsetof(CPUMIPSState
, btarget
), "btarget");
20500 hflags
= tcg_global_mem_new_i32(cpu_env
,
20501 offsetof(CPUMIPSState
, hflags
), "hflags");
20503 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20504 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20506 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20507 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20513 #include "translate_init.c"
20515 void cpu_mips_realize_env(CPUMIPSState
*env
)
20517 env
->exception_base
= (int32_t)0xBFC00000;
20519 #ifndef CONFIG_USER_ONLY
20520 mmu_init(env
, env
->cpu_model
);
20522 fpu_init(env
, env
->cpu_model
);
20523 mvp_init(env
, env
->cpu_model
);
20526 bool cpu_supports_cps_smp(const char *cpu_model
)
20528 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20533 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20536 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20538 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20543 return (def
->insn_flags
& isa
) != 0;
20546 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20548 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20549 vp
->env
.exception_base
= address
;
20552 void cpu_state_reset(CPUMIPSState
*env
)
20554 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20555 CPUState
*cs
= CPU(cpu
);
20557 /* Reset registers to their default values */
20558 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20559 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20560 #ifdef TARGET_WORDS_BIGENDIAN
20561 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20563 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20564 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20565 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20566 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20567 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20568 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20569 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20570 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20571 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20572 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20573 << env
->cpu_model
->CP0_LLAddr_shift
;
20574 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20575 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20576 env
->CCRes
= env
->cpu_model
->CCRes
;
20577 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20578 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20579 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20580 env
->current_tc
= 0;
20581 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20582 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20583 #if defined(TARGET_MIPS64)
20584 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20585 env
->SEGMask
|= 3ULL << 62;
20588 env
->PABITS
= env
->cpu_model
->PABITS
;
20589 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20590 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20591 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20592 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20593 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20594 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20595 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20596 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20597 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20598 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20599 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20600 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20601 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20602 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20603 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20604 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20605 env
->msair
= env
->cpu_model
->MSAIR
;
20606 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20608 #if defined(CONFIG_USER_ONLY)
20609 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20610 # ifdef TARGET_MIPS64
20611 /* Enable 64-bit register mode. */
20612 env
->CP0_Status
|= (1 << CP0St_PX
);
20614 # ifdef TARGET_ABI_MIPSN64
20615 /* Enable 64-bit address mode. */
20616 env
->CP0_Status
|= (1 << CP0St_UX
);
20618 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20619 hardware registers. */
20620 env
->CP0_HWREna
|= 0x0000000F;
20621 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20622 env
->CP0_Status
|= (1 << CP0St_CU1
);
20624 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20625 env
->CP0_Status
|= (1 << CP0St_MX
);
20627 # if defined(TARGET_MIPS64)
20628 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20629 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20630 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20631 env
->CP0_Status
|= (1 << CP0St_FR
);
20635 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20636 /* If the exception was raised from a delay slot,
20637 come back to the jump. */
20638 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20639 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20641 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20643 env
->active_tc
.PC
= env
->exception_base
;
20644 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20645 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20646 env
->CP0_Wired
= 0;
20647 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20648 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20649 if (mips_um_ksegs_enabled()) {
20650 env
->CP0_EBase
|= 0x40000000;
20652 env
->CP0_EBase
|= (int32_t)0x80000000;
20654 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20655 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20657 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20659 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20660 /* vectored interrupts not implemented, timer on int 7,
20661 no performance counters. */
20662 env
->CP0_IntCtl
= 0xe0000000;
20666 for (i
= 0; i
< 7; i
++) {
20667 env
->CP0_WatchLo
[i
] = 0;
20668 env
->CP0_WatchHi
[i
] = 0x80000000;
20670 env
->CP0_WatchLo
[7] = 0;
20671 env
->CP0_WatchHi
[7] = 0;
20673 /* Count register increments in debug mode, EJTAG version 1 */
20674 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20676 cpu_mips_store_count(env
, 1);
20678 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20681 /* Only TC0 on VPE 0 starts as active. */
20682 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20683 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20684 env
->tcs
[i
].CP0_TCHalt
= 1;
20686 env
->active_tc
.CP0_TCHalt
= 1;
20689 if (cs
->cpu_index
== 0) {
20690 /* VPE0 starts up enabled. */
20691 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20692 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20694 /* TC0 starts up unhalted. */
20696 env
->active_tc
.CP0_TCHalt
= 0;
20697 env
->tcs
[0].CP0_TCHalt
= 0;
20698 /* With thread 0 active. */
20699 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20700 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20705 * Configure default legacy segmentation control. We use this regardless of
20706 * whether segmentation control is presented to the guest.
20708 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20709 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20710 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20711 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20712 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20713 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20715 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20716 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20717 (3 << CP0SC_C
)) << 16;
20718 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20719 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20720 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20721 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20722 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20723 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20724 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20725 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20727 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20728 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20729 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20730 env
->CP0_Status
|= (1 << CP0St_FR
);
20734 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20738 compute_hflags(env
);
20739 restore_fp_status(env
);
20740 restore_pamask(env
);
20741 cs
->exception_index
= EXCP_NONE
;
20743 if (semihosting_get_argc()) {
20744 /* UHI interface can be used to obtain argc and argv */
20745 env
->active_tc
.gpr
[4] = -1;
20749 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20750 target_ulong
*data
)
20752 env
->active_tc
.PC
= data
[0];
20753 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20754 env
->hflags
|= data
[1];
20755 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20756 case MIPS_HFLAG_BR
:
20758 case MIPS_HFLAG_BC
:
20759 case MIPS_HFLAG_BL
:
20761 env
->btarget
= data
[2];