2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
906 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
910 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
913 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
914 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
915 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
916 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
917 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
918 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
919 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
920 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
923 /* Coprocessor 0 (with rs == C0) */
924 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
927 OPC_TLBR
= 0x01 | OPC_C0
,
928 OPC_TLBWI
= 0x02 | OPC_C0
,
929 OPC_TLBINV
= 0x03 | OPC_C0
,
930 OPC_TLBINVF
= 0x04 | OPC_C0
,
931 OPC_TLBWR
= 0x06 | OPC_C0
,
932 OPC_TLBP
= 0x08 | OPC_C0
,
933 OPC_RFE
= 0x10 | OPC_C0
,
934 OPC_ERET
= 0x18 | OPC_C0
,
935 OPC_DERET
= 0x1F | OPC_C0
,
936 OPC_WAIT
= 0x20 | OPC_C0
,
939 /* Coprocessor 1 (rs field) */
940 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
942 /* Values for the fmt field in FP instructions */
944 /* 0 - 15 are reserved */
945 FMT_S
= 16, /* single fp */
946 FMT_D
= 17, /* double fp */
947 FMT_E
= 18, /* extended fp */
948 FMT_Q
= 19, /* quad fp */
949 FMT_W
= 20, /* 32-bit fixed */
950 FMT_L
= 21, /* 64-bit fixed */
951 FMT_PS
= 22, /* paired single fp */
952 /* 23 - 31 are reserved */
956 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
957 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
958 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
959 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
960 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
961 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
962 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
963 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
964 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
965 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
966 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
967 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
968 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
969 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
970 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
971 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
972 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
973 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
974 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
975 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
976 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
977 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
978 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
979 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
980 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
981 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
982 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
983 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
984 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
985 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
988 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
989 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
992 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
993 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
994 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
995 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
999 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1000 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1004 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1005 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1008 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1011 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1012 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1013 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1014 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1015 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1016 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1017 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1018 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1019 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1020 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1021 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1024 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1027 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1028 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1029 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1030 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1031 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1032 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1033 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1034 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1036 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1037 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1038 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1039 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1040 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1041 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1042 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1043 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1045 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1046 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1047 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1048 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1049 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1050 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1051 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1052 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1054 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1055 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1056 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1057 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1058 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1059 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1060 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1061 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1063 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1064 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1065 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1066 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1067 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1068 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1070 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1071 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1072 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1073 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1074 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1075 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1077 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1078 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1079 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1080 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1081 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1082 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1084 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1085 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1086 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1087 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1088 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1089 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1091 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1092 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1093 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1094 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1095 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1096 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1098 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1099 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1100 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1101 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1102 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1103 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1105 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1106 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1107 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1108 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1109 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1110 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1112 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1113 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1114 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1115 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1116 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1117 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1121 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1124 OPC_LWXC1
= 0x00 | OPC_CP3
,
1125 OPC_LDXC1
= 0x01 | OPC_CP3
,
1126 OPC_LUXC1
= 0x05 | OPC_CP3
,
1127 OPC_SWXC1
= 0x08 | OPC_CP3
,
1128 OPC_SDXC1
= 0x09 | OPC_CP3
,
1129 OPC_SUXC1
= 0x0D | OPC_CP3
,
1130 OPC_PREFX
= 0x0F | OPC_CP3
,
1131 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1132 OPC_MADD_S
= 0x20 | OPC_CP3
,
1133 OPC_MADD_D
= 0x21 | OPC_CP3
,
1134 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1135 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1136 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1137 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1138 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1139 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1140 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1141 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1142 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1143 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1147 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1149 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1150 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1151 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1152 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1153 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1154 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1155 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1156 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1157 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1158 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1159 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1160 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1161 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1162 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1163 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1164 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1165 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1166 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1167 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1168 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1169 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1171 /* MI10 instruction */
1172 OPC_LD_B
= (0x20) | OPC_MSA
,
1173 OPC_LD_H
= (0x21) | OPC_MSA
,
1174 OPC_LD_W
= (0x22) | OPC_MSA
,
1175 OPC_LD_D
= (0x23) | OPC_MSA
,
1176 OPC_ST_B
= (0x24) | OPC_MSA
,
1177 OPC_ST_H
= (0x25) | OPC_MSA
,
1178 OPC_ST_W
= (0x26) | OPC_MSA
,
1179 OPC_ST_D
= (0x27) | OPC_MSA
,
1183 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1184 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1185 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1186 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1187 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1188 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1189 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1190 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1191 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1192 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1193 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1194 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1195 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1197 /* I8 instruction */
1198 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1199 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1200 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1201 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1202 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1203 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1204 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1205 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1206 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1207 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1209 /* VEC/2R/2RF instruction */
1210 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1211 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1212 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1213 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1214 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1215 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1216 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1218 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1219 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1221 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1222 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1223 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1224 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1225 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1227 /* 2RF instruction df(bit 16) = _w, _d */
1228 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1229 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1230 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1231 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1232 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1233 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1234 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1235 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1236 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1237 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1238 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1239 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1240 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1241 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1242 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1243 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1245 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1246 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1247 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1248 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1249 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1250 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1251 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1252 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1253 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1254 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1255 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1256 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1257 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1258 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1259 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1260 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1261 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1262 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1263 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1264 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1265 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1266 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1267 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1268 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1269 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1270 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1271 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1272 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1273 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1274 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1275 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1276 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1277 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1278 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1279 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1280 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1281 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1282 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1283 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1284 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1285 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1286 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1287 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1288 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1289 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1290 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1291 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1292 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1293 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1294 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1295 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1296 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1297 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1298 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1299 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1300 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1301 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1302 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1303 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1304 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1305 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1306 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1307 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1308 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1310 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1311 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1312 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1313 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1314 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1315 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1316 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1317 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1318 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1319 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1321 /* 3RF instruction _df(bit 21) = _w, _d */
1322 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1326 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1327 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1328 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1329 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1332 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1340 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1341 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1342 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1343 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1347 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1348 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1349 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1350 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1351 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1353 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1354 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1355 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1356 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1357 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1358 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1360 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1361 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1362 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1364 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1365 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1366 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1367 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1368 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1369 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1370 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1371 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1372 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1373 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1374 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1375 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1376 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1379 /* global register indices */
1380 static TCGv cpu_gpr
[32], cpu_PC
;
1381 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1382 static TCGv cpu_dspctrl
, btarget
, bcond
;
1383 static TCGv_i32 hflags
;
1384 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1385 static TCGv_i64 fpu_f64
[32];
1386 static TCGv_i64 msa_wr_d
[64];
1388 #include "exec/gen-icount.h"
1390 #define gen_helper_0e0i(name, arg) do { \
1391 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1392 gen_helper_##name(cpu_env, helper_tmp); \
1393 tcg_temp_free_i32(helper_tmp); \
1396 #define gen_helper_0e1i(name, arg1, arg2) do { \
1397 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1398 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1399 tcg_temp_free_i32(helper_tmp); \
1402 #define gen_helper_1e0i(name, ret, arg1) do { \
1403 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1404 gen_helper_##name(ret, cpu_env, helper_tmp); \
1405 tcg_temp_free_i32(helper_tmp); \
1408 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1409 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1410 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1411 tcg_temp_free_i32(helper_tmp); \
1414 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1415 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1416 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1417 tcg_temp_free_i32(helper_tmp); \
1420 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1421 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1422 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1423 tcg_temp_free_i32(helper_tmp); \
1426 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1427 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1428 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1429 tcg_temp_free_i32(helper_tmp); \
1432 typedef struct DisasContext
{
1433 DisasContextBase base
;
1434 target_ulong saved_pc
;
1435 target_ulong page_start
;
1438 int32_t CP0_Config1
;
1439 /* Routine used to access memory */
1441 TCGMemOp default_tcg_memop_mask
;
1442 uint32_t hflags
, saved_hflags
;
1443 target_ulong btarget
;
1454 int CP0_LLAddr_shift
;
1463 #define DISAS_STOP DISAS_TARGET_0
1464 #define DISAS_EXIT DISAS_TARGET_1
1466 static const char * const regnames
[] = {
1467 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1468 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1469 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1470 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1473 static const char * const regnames_HI
[] = {
1474 "HI0", "HI1", "HI2", "HI3",
1477 static const char * const regnames_LO
[] = {
1478 "LO0", "LO1", "LO2", "LO3",
1481 static const char * const fregnames
[] = {
1482 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1483 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1484 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1485 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1488 static const char * const msaregnames
[] = {
1489 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1490 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1491 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1492 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1493 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1494 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1495 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1496 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1497 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1498 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1499 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1500 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1501 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1502 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1503 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1504 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1507 #define LOG_DISAS(...) \
1509 if (MIPS_DEBUG_DISAS) { \
1510 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1514 #define MIPS_INVAL(op) \
1516 if (MIPS_DEBUG_DISAS) { \
1517 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1518 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1519 ctx->base.pc_next, ctx->opcode, op, \
1520 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1521 ((ctx->opcode >> 16) & 0x1F)); \
1525 /* General purpose registers moves. */
1526 static inline void gen_load_gpr (TCGv t
, int reg
)
1529 tcg_gen_movi_tl(t
, 0);
1531 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1534 static inline void gen_store_gpr (TCGv t
, int reg
)
1537 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1540 /* Moves to/from shadow registers. */
1541 static inline void gen_load_srsgpr (int from
, int to
)
1543 TCGv t0
= tcg_temp_new();
1546 tcg_gen_movi_tl(t0
, 0);
1548 TCGv_i32 t2
= tcg_temp_new_i32();
1549 TCGv_ptr addr
= tcg_temp_new_ptr();
1551 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1552 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1553 tcg_gen_andi_i32(t2
, t2
, 0xf);
1554 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1555 tcg_gen_ext_i32_ptr(addr
, t2
);
1556 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1558 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1559 tcg_temp_free_ptr(addr
);
1560 tcg_temp_free_i32(t2
);
1562 gen_store_gpr(t0
, to
);
1566 static inline void gen_store_srsgpr (int from
, int to
)
1569 TCGv t0
= tcg_temp_new();
1570 TCGv_i32 t2
= tcg_temp_new_i32();
1571 TCGv_ptr addr
= tcg_temp_new_ptr();
1573 gen_load_gpr(t0
, from
);
1574 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1575 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1576 tcg_gen_andi_i32(t2
, t2
, 0xf);
1577 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1578 tcg_gen_ext_i32_ptr(addr
, t2
);
1579 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1581 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1582 tcg_temp_free_ptr(addr
);
1583 tcg_temp_free_i32(t2
);
1589 static inline void gen_save_pc(target_ulong pc
)
1591 tcg_gen_movi_tl(cpu_PC
, pc
);
1594 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1596 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1597 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1598 gen_save_pc(ctx
->base
.pc_next
);
1599 ctx
->saved_pc
= ctx
->base
.pc_next
;
1601 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1602 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1603 ctx
->saved_hflags
= ctx
->hflags
;
1604 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1610 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1616 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1618 ctx
->saved_hflags
= ctx
->hflags
;
1619 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1625 ctx
->btarget
= env
->btarget
;
1630 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1632 TCGv_i32 texcp
= tcg_const_i32(excp
);
1633 TCGv_i32 terr
= tcg_const_i32(err
);
1634 save_cpu_state(ctx
, 1);
1635 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1636 tcg_temp_free_i32(terr
);
1637 tcg_temp_free_i32(texcp
);
1638 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1641 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1643 gen_helper_0e0i(raise_exception
, excp
);
1646 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1648 generate_exception_err(ctx
, excp
, 0);
1651 /* Floating point register moves. */
1652 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1654 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1655 generate_exception(ctx
, EXCP_RI
);
1657 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1660 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1663 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1664 generate_exception(ctx
, EXCP_RI
);
1666 t64
= tcg_temp_new_i64();
1667 tcg_gen_extu_i32_i64(t64
, t
);
1668 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1669 tcg_temp_free_i64(t64
);
1672 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1674 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1675 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1677 gen_load_fpr32(ctx
, t
, reg
| 1);
1681 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1683 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1684 TCGv_i64 t64
= tcg_temp_new_i64();
1685 tcg_gen_extu_i32_i64(t64
, t
);
1686 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1687 tcg_temp_free_i64(t64
);
1689 gen_store_fpr32(ctx
, t
, reg
| 1);
1693 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1695 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1696 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1698 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1702 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1704 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1705 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1708 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1709 t0
= tcg_temp_new_i64();
1710 tcg_gen_shri_i64(t0
, t
, 32);
1711 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1712 tcg_temp_free_i64(t0
);
1716 static inline int get_fp_bit (int cc
)
1724 /* Addresses computation */
1725 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1727 tcg_gen_add_tl(ret
, arg0
, arg1
);
1729 #if defined(TARGET_MIPS64)
1730 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1731 tcg_gen_ext32s_i64(ret
, ret
);
1736 /* Addresses computation (translation time) */
1737 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1740 target_long sum
= base
+ offset
;
1742 #if defined(TARGET_MIPS64)
1743 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1750 /* Sign-extract the low 32-bits to a target_long. */
1751 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1753 #if defined(TARGET_MIPS64)
1754 tcg_gen_ext32s_i64(ret
, arg
);
1756 tcg_gen_extrl_i64_i32(ret
, arg
);
1760 /* Sign-extract the high 32-bits to a target_long. */
1761 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1763 #if defined(TARGET_MIPS64)
1764 tcg_gen_sari_i64(ret
, arg
, 32);
1766 tcg_gen_extrh_i64_i32(ret
, arg
);
1770 static inline void check_cp0_enabled(DisasContext
*ctx
)
1772 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1773 generate_exception_err(ctx
, EXCP_CpU
, 0);
1776 static inline void check_cp1_enabled(DisasContext
*ctx
)
1778 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1779 generate_exception_err(ctx
, EXCP_CpU
, 1);
1782 /* Verify that the processor is running with COP1X instructions enabled.
1783 This is associated with the nabla symbol in the MIPS32 and MIPS64
1786 static inline void check_cop1x(DisasContext
*ctx
)
1788 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1789 generate_exception_end(ctx
, EXCP_RI
);
1792 /* Verify that the processor is running with 64-bit floating-point
1793 operations enabled. */
1795 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1797 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1798 generate_exception_end(ctx
, EXCP_RI
);
1802 * Verify if floating point register is valid; an operation is not defined
1803 * if bit 0 of any register specification is set and the FR bit in the
1804 * Status register equals zero, since the register numbers specify an
1805 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1806 * in the Status register equals one, both even and odd register numbers
1807 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1809 * Multiple 64 bit wide registers can be checked by calling
1810 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1812 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1814 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1815 generate_exception_end(ctx
, EXCP_RI
);
1818 /* Verify that the processor is running with DSP instructions enabled.
1819 This is enabled by CP0 Status register MX(24) bit.
1822 static inline void check_dsp(DisasContext
*ctx
)
1824 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1825 if (ctx
->insn_flags
& ASE_DSP
) {
1826 generate_exception_end(ctx
, EXCP_DSPDIS
);
1828 generate_exception_end(ctx
, EXCP_RI
);
1833 static inline void check_dspr2(DisasContext
*ctx
)
1835 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1836 if (ctx
->insn_flags
& ASE_DSP
) {
1837 generate_exception_end(ctx
, EXCP_DSPDIS
);
1839 generate_exception_end(ctx
, EXCP_RI
);
1844 /* This code generates a "reserved instruction" exception if the
1845 CPU does not support the instruction set corresponding to flags. */
1846 static inline void check_insn(DisasContext
*ctx
, int flags
)
1848 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1849 generate_exception_end(ctx
, EXCP_RI
);
1853 /* This code generates a "reserved instruction" exception if the
1854 CPU has corresponding flag set which indicates that the instruction
1855 has been removed. */
1856 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1858 if (unlikely(ctx
->insn_flags
& flags
)) {
1859 generate_exception_end(ctx
, EXCP_RI
);
1863 /* This code generates a "reserved instruction" exception if the
1864 CPU does not support 64-bit paired-single (PS) floating point data type */
1865 static inline void check_ps(DisasContext
*ctx
)
1867 if (unlikely(!ctx
->ps
)) {
1868 generate_exception(ctx
, EXCP_RI
);
1870 check_cp1_64bitmode(ctx
);
1873 #ifdef TARGET_MIPS64
1874 /* This code generates a "reserved instruction" exception if 64-bit
1875 instructions are not enabled. */
1876 static inline void check_mips_64(DisasContext
*ctx
)
1878 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1879 generate_exception_end(ctx
, EXCP_RI
);
1883 #ifndef CONFIG_USER_ONLY
1884 static inline void check_mvh(DisasContext
*ctx
)
1886 if (unlikely(!ctx
->mvh
)) {
1887 generate_exception(ctx
, EXCP_RI
);
1892 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1893 calling interface for 32 and 64-bit FPRs. No sense in changing
1894 all callers for gen_load_fpr32 when we need the CTX parameter for
1896 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1897 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1898 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1899 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1900 int ft, int fs, int cc) \
1902 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1903 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1912 check_cp1_registers(ctx, fs | ft); \
1920 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1921 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1923 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1924 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1925 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1926 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1927 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1928 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1929 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1930 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1931 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1932 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1933 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1934 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1935 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1936 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1937 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1938 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1941 tcg_temp_free_i##bits (fp0); \
1942 tcg_temp_free_i##bits (fp1); \
1945 FOP_CONDS(, 0, d
, FMT_D
, 64)
1946 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1947 FOP_CONDS(, 0, s
, FMT_S
, 32)
1948 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1949 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1950 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1953 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1954 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1955 int ft, int fs, int fd) \
1957 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1958 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1959 if (ifmt == FMT_D) { \
1960 check_cp1_registers(ctx, fs | ft | fd); \
1962 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1963 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1966 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2002 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2005 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2008 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2011 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2014 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2017 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2020 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2023 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2026 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2029 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2035 tcg_temp_free_i ## bits (fp0); \
2036 tcg_temp_free_i ## bits (fp1); \
2039 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2040 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2042 #undef gen_ldcmp_fpr32
2043 #undef gen_ldcmp_fpr64
2045 /* load/store instructions. */
2046 #ifdef CONFIG_USER_ONLY
2047 #define OP_LD_ATOMIC(insn,fname) \
2048 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2049 DisasContext *ctx) \
2051 TCGv t0 = tcg_temp_new(); \
2052 tcg_gen_mov_tl(t0, arg1); \
2053 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2054 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2055 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2056 tcg_temp_free(t0); \
2059 #define OP_LD_ATOMIC(insn,fname) \
2060 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2061 DisasContext *ctx) \
2063 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2066 OP_LD_ATOMIC(ll
,ld32s
);
2067 #if defined(TARGET_MIPS64)
2068 OP_LD_ATOMIC(lld
,ld64
);
2072 #ifdef CONFIG_USER_ONLY
2073 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2074 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2075 DisasContext *ctx) \
2077 TCGv t0 = tcg_temp_new(); \
2078 TCGLabel *l1 = gen_new_label(); \
2079 TCGLabel *l2 = gen_new_label(); \
2081 tcg_gen_andi_tl(t0, arg2, almask); \
2082 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2083 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2084 generate_exception(ctx, EXCP_AdES); \
2085 gen_set_label(l1); \
2086 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2087 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2088 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2089 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2090 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2091 generate_exception_end(ctx, EXCP_SC); \
2092 gen_set_label(l2); \
2093 tcg_gen_movi_tl(t0, 0); \
2094 gen_store_gpr(t0, rt); \
2095 tcg_temp_free(t0); \
2098 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2099 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2100 DisasContext *ctx) \
2102 TCGv t0 = tcg_temp_new(); \
2103 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2104 gen_store_gpr(t0, rt); \
2105 tcg_temp_free(t0); \
2108 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2109 #if defined(TARGET_MIPS64)
2110 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2114 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2115 int base
, int16_t offset
)
2118 tcg_gen_movi_tl(addr
, offset
);
2119 } else if (offset
== 0) {
2120 gen_load_gpr(addr
, base
);
2122 tcg_gen_movi_tl(addr
, offset
);
2123 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2127 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2129 target_ulong pc
= ctx
->base
.pc_next
;
2131 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2132 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2137 pc
&= ~(target_ulong
)3;
2142 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2143 int rt
, int base
, int16_t offset
)
2146 int mem_idx
= ctx
->mem_idx
;
2148 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2149 /* Loongson CPU uses a load to zero register for prefetch.
2150 We emulate it as a NOP. On other CPU we must perform the
2151 actual memory access. */
2155 t0
= tcg_temp_new();
2156 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2159 #if defined(TARGET_MIPS64)
2161 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2162 ctx
->default_tcg_memop_mask
);
2163 gen_store_gpr(t0
, rt
);
2166 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2167 ctx
->default_tcg_memop_mask
);
2168 gen_store_gpr(t0
, rt
);
2172 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2173 gen_store_gpr(t0
, rt
);
2176 t1
= tcg_temp_new();
2177 /* Do a byte access to possibly trigger a page
2178 fault with the unaligned address. */
2179 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2180 tcg_gen_andi_tl(t1
, t0
, 7);
2181 #ifndef TARGET_WORDS_BIGENDIAN
2182 tcg_gen_xori_tl(t1
, t1
, 7);
2184 tcg_gen_shli_tl(t1
, t1
, 3);
2185 tcg_gen_andi_tl(t0
, t0
, ~7);
2186 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2187 tcg_gen_shl_tl(t0
, t0
, t1
);
2188 t2
= tcg_const_tl(-1);
2189 tcg_gen_shl_tl(t2
, t2
, t1
);
2190 gen_load_gpr(t1
, rt
);
2191 tcg_gen_andc_tl(t1
, t1
, t2
);
2193 tcg_gen_or_tl(t0
, t0
, t1
);
2195 gen_store_gpr(t0
, rt
);
2198 t1
= tcg_temp_new();
2199 /* Do a byte access to possibly trigger a page
2200 fault with the unaligned address. */
2201 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2202 tcg_gen_andi_tl(t1
, t0
, 7);
2203 #ifdef TARGET_WORDS_BIGENDIAN
2204 tcg_gen_xori_tl(t1
, t1
, 7);
2206 tcg_gen_shli_tl(t1
, t1
, 3);
2207 tcg_gen_andi_tl(t0
, t0
, ~7);
2208 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2209 tcg_gen_shr_tl(t0
, t0
, t1
);
2210 tcg_gen_xori_tl(t1
, t1
, 63);
2211 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2212 tcg_gen_shl_tl(t2
, t2
, t1
);
2213 gen_load_gpr(t1
, rt
);
2214 tcg_gen_and_tl(t1
, t1
, t2
);
2216 tcg_gen_or_tl(t0
, t0
, t1
);
2218 gen_store_gpr(t0
, rt
);
2221 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2222 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2225 gen_store_gpr(t0
, rt
);
2229 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2230 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2232 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2233 gen_store_gpr(t0
, rt
);
2236 mem_idx
= MIPS_HFLAG_UM
;
2239 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2240 ctx
->default_tcg_memop_mask
);
2241 gen_store_gpr(t0
, rt
);
2244 mem_idx
= MIPS_HFLAG_UM
;
2247 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2248 ctx
->default_tcg_memop_mask
);
2249 gen_store_gpr(t0
, rt
);
2252 mem_idx
= MIPS_HFLAG_UM
;
2255 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2256 ctx
->default_tcg_memop_mask
);
2257 gen_store_gpr(t0
, rt
);
2260 mem_idx
= MIPS_HFLAG_UM
;
2263 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2264 gen_store_gpr(t0
, rt
);
2267 mem_idx
= MIPS_HFLAG_UM
;
2270 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2271 gen_store_gpr(t0
, rt
);
2274 mem_idx
= MIPS_HFLAG_UM
;
2277 t1
= tcg_temp_new();
2278 /* Do a byte access to possibly trigger a page
2279 fault with the unaligned address. */
2280 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2281 tcg_gen_andi_tl(t1
, t0
, 3);
2282 #ifndef TARGET_WORDS_BIGENDIAN
2283 tcg_gen_xori_tl(t1
, t1
, 3);
2285 tcg_gen_shli_tl(t1
, t1
, 3);
2286 tcg_gen_andi_tl(t0
, t0
, ~3);
2287 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2288 tcg_gen_shl_tl(t0
, t0
, t1
);
2289 t2
= tcg_const_tl(-1);
2290 tcg_gen_shl_tl(t2
, t2
, t1
);
2291 gen_load_gpr(t1
, rt
);
2292 tcg_gen_andc_tl(t1
, t1
, t2
);
2294 tcg_gen_or_tl(t0
, t0
, t1
);
2296 tcg_gen_ext32s_tl(t0
, t0
);
2297 gen_store_gpr(t0
, rt
);
2300 mem_idx
= MIPS_HFLAG_UM
;
2303 t1
= tcg_temp_new();
2304 /* Do a byte access to possibly trigger a page
2305 fault with the unaligned address. */
2306 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2307 tcg_gen_andi_tl(t1
, t0
, 3);
2308 #ifdef TARGET_WORDS_BIGENDIAN
2309 tcg_gen_xori_tl(t1
, t1
, 3);
2311 tcg_gen_shli_tl(t1
, t1
, 3);
2312 tcg_gen_andi_tl(t0
, t0
, ~3);
2313 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2314 tcg_gen_shr_tl(t0
, t0
, t1
);
2315 tcg_gen_xori_tl(t1
, t1
, 31);
2316 t2
= tcg_const_tl(0xfffffffeull
);
2317 tcg_gen_shl_tl(t2
, t2
, t1
);
2318 gen_load_gpr(t1
, rt
);
2319 tcg_gen_and_tl(t1
, t1
, t2
);
2321 tcg_gen_or_tl(t0
, t0
, t1
);
2323 tcg_gen_ext32s_tl(t0
, t0
);
2324 gen_store_gpr(t0
, rt
);
2327 mem_idx
= MIPS_HFLAG_UM
;
2331 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2332 gen_store_gpr(t0
, rt
);
2339 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2340 int base
, int16_t offset
)
2342 TCGv t0
= tcg_temp_new();
2343 TCGv t1
= tcg_temp_new();
2344 int mem_idx
= ctx
->mem_idx
;
2346 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2347 gen_load_gpr(t1
, rt
);
2349 #if defined(TARGET_MIPS64)
2351 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2352 ctx
->default_tcg_memop_mask
);
2355 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2358 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2362 mem_idx
= MIPS_HFLAG_UM
;
2365 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2366 ctx
->default_tcg_memop_mask
);
2369 mem_idx
= MIPS_HFLAG_UM
;
2372 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2373 ctx
->default_tcg_memop_mask
);
2376 mem_idx
= MIPS_HFLAG_UM
;
2379 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2382 mem_idx
= MIPS_HFLAG_UM
;
2385 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2388 mem_idx
= MIPS_HFLAG_UM
;
2391 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2399 /* Store conditional */
2400 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2401 int base
, int16_t offset
)
2404 int mem_idx
= ctx
->mem_idx
;
2406 #ifdef CONFIG_USER_ONLY
2407 t0
= tcg_temp_local_new();
2408 t1
= tcg_temp_local_new();
2410 t0
= tcg_temp_new();
2411 t1
= tcg_temp_new();
2413 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2414 gen_load_gpr(t1
, rt
);
2416 #if defined(TARGET_MIPS64)
2419 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2423 mem_idx
= MIPS_HFLAG_UM
;
2427 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2434 /* Load and store */
2435 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2436 int base
, int16_t offset
)
2438 TCGv t0
= tcg_temp_new();
2440 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2441 /* Don't do NOP if destination is zero: we must perform the actual
2446 TCGv_i32 fp0
= tcg_temp_new_i32();
2447 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2448 ctx
->default_tcg_memop_mask
);
2449 gen_store_fpr32(ctx
, fp0
, ft
);
2450 tcg_temp_free_i32(fp0
);
2455 TCGv_i32 fp0
= tcg_temp_new_i32();
2456 gen_load_fpr32(ctx
, fp0
, ft
);
2457 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2458 ctx
->default_tcg_memop_mask
);
2459 tcg_temp_free_i32(fp0
);
2464 TCGv_i64 fp0
= tcg_temp_new_i64();
2465 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2466 ctx
->default_tcg_memop_mask
);
2467 gen_store_fpr64(ctx
, fp0
, ft
);
2468 tcg_temp_free_i64(fp0
);
2473 TCGv_i64 fp0
= tcg_temp_new_i64();
2474 gen_load_fpr64(ctx
, fp0
, ft
);
2475 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2476 ctx
->default_tcg_memop_mask
);
2477 tcg_temp_free_i64(fp0
);
2481 MIPS_INVAL("flt_ldst");
2482 generate_exception_end(ctx
, EXCP_RI
);
2489 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2490 int rs
, int16_t imm
)
2492 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2493 check_cp1_enabled(ctx
);
2497 check_insn(ctx
, ISA_MIPS2
);
2500 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2503 generate_exception_err(ctx
, EXCP_CpU
, 1);
2507 /* Arithmetic with immediate operand */
2508 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2509 int rt
, int rs
, int16_t imm
)
2511 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2513 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2514 /* If no destination, treat it as a NOP.
2515 For addi, we must generate the overflow exception when needed. */
2521 TCGv t0
= tcg_temp_local_new();
2522 TCGv t1
= tcg_temp_new();
2523 TCGv t2
= tcg_temp_new();
2524 TCGLabel
*l1
= gen_new_label();
2526 gen_load_gpr(t1
, rs
);
2527 tcg_gen_addi_tl(t0
, t1
, uimm
);
2528 tcg_gen_ext32s_tl(t0
, t0
);
2530 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2531 tcg_gen_xori_tl(t2
, t0
, uimm
);
2532 tcg_gen_and_tl(t1
, t1
, t2
);
2534 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2536 /* operands of same sign, result different sign */
2537 generate_exception(ctx
, EXCP_OVERFLOW
);
2539 tcg_gen_ext32s_tl(t0
, t0
);
2540 gen_store_gpr(t0
, rt
);
2546 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2547 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2549 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2552 #if defined(TARGET_MIPS64)
2555 TCGv t0
= tcg_temp_local_new();
2556 TCGv t1
= tcg_temp_new();
2557 TCGv t2
= tcg_temp_new();
2558 TCGLabel
*l1
= gen_new_label();
2560 gen_load_gpr(t1
, rs
);
2561 tcg_gen_addi_tl(t0
, t1
, uimm
);
2563 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2564 tcg_gen_xori_tl(t2
, t0
, uimm
);
2565 tcg_gen_and_tl(t1
, t1
, t2
);
2567 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2569 /* operands of same sign, result different sign */
2570 generate_exception(ctx
, EXCP_OVERFLOW
);
2572 gen_store_gpr(t0
, rt
);
2578 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2580 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2587 /* Logic with immediate operand */
2588 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2589 int rt
, int rs
, int16_t imm
)
2594 /* If no destination, treat it as a NOP. */
2597 uimm
= (uint16_t)imm
;
2600 if (likely(rs
!= 0))
2601 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2603 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2607 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2609 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2612 if (likely(rs
!= 0))
2613 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2615 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2618 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2620 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2621 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2623 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2632 /* Set on less than with immediate operand */
2633 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2634 int rt
, int rs
, int16_t imm
)
2636 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2640 /* If no destination, treat it as a NOP. */
2643 t0
= tcg_temp_new();
2644 gen_load_gpr(t0
, rs
);
2647 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2650 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2656 /* Shifts with immediate operand */
2657 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2658 int rt
, int rs
, int16_t imm
)
2660 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2664 /* If no destination, treat it as a NOP. */
2668 t0
= tcg_temp_new();
2669 gen_load_gpr(t0
, rs
);
2672 tcg_gen_shli_tl(t0
, t0
, uimm
);
2673 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2676 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2680 tcg_gen_ext32u_tl(t0
, t0
);
2681 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2683 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2688 TCGv_i32 t1
= tcg_temp_new_i32();
2690 tcg_gen_trunc_tl_i32(t1
, t0
);
2691 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2692 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2693 tcg_temp_free_i32(t1
);
2695 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2698 #if defined(TARGET_MIPS64)
2700 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2703 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2706 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2710 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2712 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2716 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2719 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2722 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2725 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2733 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2734 int rd
, int rs
, int rt
)
2736 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2737 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2738 /* If no destination, treat it as a NOP.
2739 For add & sub, we must generate the overflow exception when needed. */
2746 TCGv t0
= tcg_temp_local_new();
2747 TCGv t1
= tcg_temp_new();
2748 TCGv t2
= tcg_temp_new();
2749 TCGLabel
*l1
= gen_new_label();
2751 gen_load_gpr(t1
, rs
);
2752 gen_load_gpr(t2
, rt
);
2753 tcg_gen_add_tl(t0
, t1
, t2
);
2754 tcg_gen_ext32s_tl(t0
, t0
);
2755 tcg_gen_xor_tl(t1
, t1
, t2
);
2756 tcg_gen_xor_tl(t2
, t0
, t2
);
2757 tcg_gen_andc_tl(t1
, t2
, t1
);
2759 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2761 /* operands of same sign, result different sign */
2762 generate_exception(ctx
, EXCP_OVERFLOW
);
2764 gen_store_gpr(t0
, rd
);
2769 if (rs
!= 0 && rt
!= 0) {
2770 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2772 } else if (rs
== 0 && rt
!= 0) {
2773 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2774 } else if (rs
!= 0 && rt
== 0) {
2775 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2777 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2782 TCGv t0
= tcg_temp_local_new();
2783 TCGv t1
= tcg_temp_new();
2784 TCGv t2
= tcg_temp_new();
2785 TCGLabel
*l1
= gen_new_label();
2787 gen_load_gpr(t1
, rs
);
2788 gen_load_gpr(t2
, rt
);
2789 tcg_gen_sub_tl(t0
, t1
, t2
);
2790 tcg_gen_ext32s_tl(t0
, t0
);
2791 tcg_gen_xor_tl(t2
, t1
, t2
);
2792 tcg_gen_xor_tl(t1
, t0
, t1
);
2793 tcg_gen_and_tl(t1
, t1
, t2
);
2795 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2797 /* operands of different sign, first operand and result different sign */
2798 generate_exception(ctx
, EXCP_OVERFLOW
);
2800 gen_store_gpr(t0
, rd
);
2805 if (rs
!= 0 && rt
!= 0) {
2806 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2807 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2808 } else if (rs
== 0 && rt
!= 0) {
2809 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2810 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2811 } else if (rs
!= 0 && rt
== 0) {
2812 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2814 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2817 #if defined(TARGET_MIPS64)
2820 TCGv t0
= tcg_temp_local_new();
2821 TCGv t1
= tcg_temp_new();
2822 TCGv t2
= tcg_temp_new();
2823 TCGLabel
*l1
= gen_new_label();
2825 gen_load_gpr(t1
, rs
);
2826 gen_load_gpr(t2
, rt
);
2827 tcg_gen_add_tl(t0
, t1
, t2
);
2828 tcg_gen_xor_tl(t1
, t1
, t2
);
2829 tcg_gen_xor_tl(t2
, t0
, t2
);
2830 tcg_gen_andc_tl(t1
, t2
, t1
);
2832 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2834 /* operands of same sign, result different sign */
2835 generate_exception(ctx
, EXCP_OVERFLOW
);
2837 gen_store_gpr(t0
, rd
);
2842 if (rs
!= 0 && rt
!= 0) {
2843 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2844 } else if (rs
== 0 && rt
!= 0) {
2845 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2846 } else if (rs
!= 0 && rt
== 0) {
2847 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2849 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2854 TCGv t0
= tcg_temp_local_new();
2855 TCGv t1
= tcg_temp_new();
2856 TCGv t2
= tcg_temp_new();
2857 TCGLabel
*l1
= gen_new_label();
2859 gen_load_gpr(t1
, rs
);
2860 gen_load_gpr(t2
, rt
);
2861 tcg_gen_sub_tl(t0
, t1
, t2
);
2862 tcg_gen_xor_tl(t2
, t1
, t2
);
2863 tcg_gen_xor_tl(t1
, t0
, t1
);
2864 tcg_gen_and_tl(t1
, t1
, t2
);
2866 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2868 /* operands of different sign, first operand and result different sign */
2869 generate_exception(ctx
, EXCP_OVERFLOW
);
2871 gen_store_gpr(t0
, rd
);
2876 if (rs
!= 0 && rt
!= 0) {
2877 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2878 } else if (rs
== 0 && rt
!= 0) {
2879 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2880 } else if (rs
!= 0 && rt
== 0) {
2881 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2883 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2888 if (likely(rs
!= 0 && rt
!= 0)) {
2889 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2890 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2898 /* Conditional move */
2899 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2900 int rd
, int rs
, int rt
)
2905 /* If no destination, treat it as a NOP. */
2909 t0
= tcg_temp_new();
2910 gen_load_gpr(t0
, rt
);
2911 t1
= tcg_const_tl(0);
2912 t2
= tcg_temp_new();
2913 gen_load_gpr(t2
, rs
);
2916 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2919 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2922 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2925 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2934 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2935 int rd
, int rs
, int rt
)
2938 /* If no destination, treat it as a NOP. */
2944 if (likely(rs
!= 0 && rt
!= 0)) {
2945 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2947 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2951 if (rs
!= 0 && rt
!= 0) {
2952 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2953 } else if (rs
== 0 && rt
!= 0) {
2954 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2955 } else if (rs
!= 0 && rt
== 0) {
2956 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2958 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2962 if (likely(rs
!= 0 && rt
!= 0)) {
2963 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2964 } else if (rs
== 0 && rt
!= 0) {
2965 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2966 } else if (rs
!= 0 && rt
== 0) {
2967 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2969 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2973 if (likely(rs
!= 0 && rt
!= 0)) {
2974 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2975 } else if (rs
== 0 && rt
!= 0) {
2976 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2977 } else if (rs
!= 0 && rt
== 0) {
2978 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2980 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2986 /* Set on lower than */
2987 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2988 int rd
, int rs
, int rt
)
2993 /* If no destination, treat it as a NOP. */
2997 t0
= tcg_temp_new();
2998 t1
= tcg_temp_new();
2999 gen_load_gpr(t0
, rs
);
3000 gen_load_gpr(t1
, rt
);
3003 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3006 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3014 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3015 int rd
, int rs
, int rt
)
3020 /* If no destination, treat it as a NOP.
3021 For add & sub, we must generate the overflow exception when needed. */
3025 t0
= tcg_temp_new();
3026 t1
= tcg_temp_new();
3027 gen_load_gpr(t0
, rs
);
3028 gen_load_gpr(t1
, rt
);
3031 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3032 tcg_gen_shl_tl(t0
, t1
, t0
);
3033 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3036 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3037 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3040 tcg_gen_ext32u_tl(t1
, t1
);
3041 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3042 tcg_gen_shr_tl(t0
, t1
, t0
);
3043 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3047 TCGv_i32 t2
= tcg_temp_new_i32();
3048 TCGv_i32 t3
= tcg_temp_new_i32();
3050 tcg_gen_trunc_tl_i32(t2
, t0
);
3051 tcg_gen_trunc_tl_i32(t3
, t1
);
3052 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3053 tcg_gen_rotr_i32(t2
, t3
, t2
);
3054 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3055 tcg_temp_free_i32(t2
);
3056 tcg_temp_free_i32(t3
);
3059 #if defined(TARGET_MIPS64)
3061 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3062 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3065 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3066 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3069 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3070 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3073 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3074 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3082 /* Arithmetic on HI/LO registers */
3083 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3085 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3096 #if defined(TARGET_MIPS64)
3098 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3102 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3106 #if defined(TARGET_MIPS64)
3108 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3112 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3117 #if defined(TARGET_MIPS64)
3119 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3123 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3126 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3131 #if defined(TARGET_MIPS64)
3133 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3137 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3140 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3146 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3149 TCGv t0
= tcg_const_tl(addr
);
3150 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3151 gen_store_gpr(t0
, reg
);
3155 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3161 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3164 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3165 addr
= addr_add(ctx
, pc
, offset
);
3166 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3170 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3171 addr
= addr_add(ctx
, pc
, offset
);
3172 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3174 #if defined(TARGET_MIPS64)
3177 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3178 addr
= addr_add(ctx
, pc
, offset
);
3179 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3183 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3186 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3187 addr
= addr_add(ctx
, pc
, offset
);
3188 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3193 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3194 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3195 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3198 #if defined(TARGET_MIPS64)
3199 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3200 case R6_OPC_LDPC
+ (1 << 16):
3201 case R6_OPC_LDPC
+ (2 << 16):
3202 case R6_OPC_LDPC
+ (3 << 16):
3204 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3205 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3206 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3210 MIPS_INVAL("OPC_PCREL");
3211 generate_exception_end(ctx
, EXCP_RI
);
3218 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3227 t0
= tcg_temp_new();
3228 t1
= tcg_temp_new();
3230 gen_load_gpr(t0
, rs
);
3231 gen_load_gpr(t1
, rt
);
3236 TCGv t2
= tcg_temp_new();
3237 TCGv t3
= tcg_temp_new();
3238 tcg_gen_ext32s_tl(t0
, t0
);
3239 tcg_gen_ext32s_tl(t1
, t1
);
3240 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3241 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3242 tcg_gen_and_tl(t2
, t2
, t3
);
3243 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3244 tcg_gen_or_tl(t2
, t2
, t3
);
3245 tcg_gen_movi_tl(t3
, 0);
3246 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3247 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3248 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3255 TCGv t2
= tcg_temp_new();
3256 TCGv t3
= tcg_temp_new();
3257 tcg_gen_ext32s_tl(t0
, t0
);
3258 tcg_gen_ext32s_tl(t1
, t1
);
3259 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3260 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3261 tcg_gen_and_tl(t2
, t2
, t3
);
3262 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3263 tcg_gen_or_tl(t2
, t2
, t3
);
3264 tcg_gen_movi_tl(t3
, 0);
3265 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3266 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3267 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3274 TCGv t2
= tcg_const_tl(0);
3275 TCGv t3
= tcg_const_tl(1);
3276 tcg_gen_ext32u_tl(t0
, t0
);
3277 tcg_gen_ext32u_tl(t1
, t1
);
3278 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3279 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3280 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3287 TCGv t2
= tcg_const_tl(0);
3288 TCGv t3
= tcg_const_tl(1);
3289 tcg_gen_ext32u_tl(t0
, t0
);
3290 tcg_gen_ext32u_tl(t1
, t1
);
3291 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3292 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3293 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3300 TCGv_i32 t2
= tcg_temp_new_i32();
3301 TCGv_i32 t3
= tcg_temp_new_i32();
3302 tcg_gen_trunc_tl_i32(t2
, t0
);
3303 tcg_gen_trunc_tl_i32(t3
, t1
);
3304 tcg_gen_mul_i32(t2
, t2
, t3
);
3305 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3306 tcg_temp_free_i32(t2
);
3307 tcg_temp_free_i32(t3
);
3312 TCGv_i32 t2
= tcg_temp_new_i32();
3313 TCGv_i32 t3
= tcg_temp_new_i32();
3314 tcg_gen_trunc_tl_i32(t2
, t0
);
3315 tcg_gen_trunc_tl_i32(t3
, t1
);
3316 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3317 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3318 tcg_temp_free_i32(t2
);
3319 tcg_temp_free_i32(t3
);
3324 TCGv_i32 t2
= tcg_temp_new_i32();
3325 TCGv_i32 t3
= tcg_temp_new_i32();
3326 tcg_gen_trunc_tl_i32(t2
, t0
);
3327 tcg_gen_trunc_tl_i32(t3
, t1
);
3328 tcg_gen_mul_i32(t2
, t2
, t3
);
3329 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3330 tcg_temp_free_i32(t2
);
3331 tcg_temp_free_i32(t3
);
3336 TCGv_i32 t2
= tcg_temp_new_i32();
3337 TCGv_i32 t3
= tcg_temp_new_i32();
3338 tcg_gen_trunc_tl_i32(t2
, t0
);
3339 tcg_gen_trunc_tl_i32(t3
, t1
);
3340 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3341 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3342 tcg_temp_free_i32(t2
);
3343 tcg_temp_free_i32(t3
);
3346 #if defined(TARGET_MIPS64)
3349 TCGv t2
= tcg_temp_new();
3350 TCGv t3
= tcg_temp_new();
3351 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3352 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3353 tcg_gen_and_tl(t2
, t2
, t3
);
3354 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3355 tcg_gen_or_tl(t2
, t2
, t3
);
3356 tcg_gen_movi_tl(t3
, 0);
3357 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3358 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3365 TCGv t2
= tcg_temp_new();
3366 TCGv t3
= tcg_temp_new();
3367 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3368 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3369 tcg_gen_and_tl(t2
, t2
, t3
);
3370 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3371 tcg_gen_or_tl(t2
, t2
, t3
);
3372 tcg_gen_movi_tl(t3
, 0);
3373 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3374 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3381 TCGv t2
= tcg_const_tl(0);
3382 TCGv t3
= tcg_const_tl(1);
3383 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3384 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3391 TCGv t2
= tcg_const_tl(0);
3392 TCGv t3
= tcg_const_tl(1);
3393 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3394 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3400 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3404 TCGv t2
= tcg_temp_new();
3405 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3410 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3414 TCGv t2
= tcg_temp_new();
3415 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3421 MIPS_INVAL("r6 mul/div");
3422 generate_exception_end(ctx
, EXCP_RI
);
3430 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3431 int acc
, int rs
, int rt
)
3435 t0
= tcg_temp_new();
3436 t1
= tcg_temp_new();
3438 gen_load_gpr(t0
, rs
);
3439 gen_load_gpr(t1
, rt
);
3448 TCGv t2
= tcg_temp_new();
3449 TCGv t3
= tcg_temp_new();
3450 tcg_gen_ext32s_tl(t0
, t0
);
3451 tcg_gen_ext32s_tl(t1
, t1
);
3452 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3453 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3454 tcg_gen_and_tl(t2
, t2
, t3
);
3455 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3456 tcg_gen_or_tl(t2
, t2
, t3
);
3457 tcg_gen_movi_tl(t3
, 0);
3458 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3459 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3460 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3461 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3462 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3469 TCGv t2
= tcg_const_tl(0);
3470 TCGv t3
= tcg_const_tl(1);
3471 tcg_gen_ext32u_tl(t0
, t0
);
3472 tcg_gen_ext32u_tl(t1
, t1
);
3473 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3474 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3475 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3476 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3477 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3484 TCGv_i32 t2
= tcg_temp_new_i32();
3485 TCGv_i32 t3
= tcg_temp_new_i32();
3486 tcg_gen_trunc_tl_i32(t2
, t0
);
3487 tcg_gen_trunc_tl_i32(t3
, t1
);
3488 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3489 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3490 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3491 tcg_temp_free_i32(t2
);
3492 tcg_temp_free_i32(t3
);
3497 TCGv_i32 t2
= tcg_temp_new_i32();
3498 TCGv_i32 t3
= tcg_temp_new_i32();
3499 tcg_gen_trunc_tl_i32(t2
, t0
);
3500 tcg_gen_trunc_tl_i32(t3
, t1
);
3501 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3502 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3503 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3504 tcg_temp_free_i32(t2
);
3505 tcg_temp_free_i32(t3
);
3508 #if defined(TARGET_MIPS64)
3511 TCGv t2
= tcg_temp_new();
3512 TCGv t3
= tcg_temp_new();
3513 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3514 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3515 tcg_gen_and_tl(t2
, t2
, t3
);
3516 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3517 tcg_gen_or_tl(t2
, t2
, t3
);
3518 tcg_gen_movi_tl(t3
, 0);
3519 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3520 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3521 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3528 TCGv t2
= tcg_const_tl(0);
3529 TCGv t3
= tcg_const_tl(1);
3530 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3531 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3532 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3538 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3541 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3546 TCGv_i64 t2
= tcg_temp_new_i64();
3547 TCGv_i64 t3
= tcg_temp_new_i64();
3549 tcg_gen_ext_tl_i64(t2
, t0
);
3550 tcg_gen_ext_tl_i64(t3
, t1
);
3551 tcg_gen_mul_i64(t2
, t2
, t3
);
3552 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3553 tcg_gen_add_i64(t2
, t2
, t3
);
3554 tcg_temp_free_i64(t3
);
3555 gen_move_low32(cpu_LO
[acc
], t2
);
3556 gen_move_high32(cpu_HI
[acc
], t2
);
3557 tcg_temp_free_i64(t2
);
3562 TCGv_i64 t2
= tcg_temp_new_i64();
3563 TCGv_i64 t3
= tcg_temp_new_i64();
3565 tcg_gen_ext32u_tl(t0
, t0
);
3566 tcg_gen_ext32u_tl(t1
, t1
);
3567 tcg_gen_extu_tl_i64(t2
, t0
);
3568 tcg_gen_extu_tl_i64(t3
, t1
);
3569 tcg_gen_mul_i64(t2
, t2
, t3
);
3570 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3571 tcg_gen_add_i64(t2
, t2
, t3
);
3572 tcg_temp_free_i64(t3
);
3573 gen_move_low32(cpu_LO
[acc
], t2
);
3574 gen_move_high32(cpu_HI
[acc
], t2
);
3575 tcg_temp_free_i64(t2
);
3580 TCGv_i64 t2
= tcg_temp_new_i64();
3581 TCGv_i64 t3
= tcg_temp_new_i64();
3583 tcg_gen_ext_tl_i64(t2
, t0
);
3584 tcg_gen_ext_tl_i64(t3
, t1
);
3585 tcg_gen_mul_i64(t2
, t2
, t3
);
3586 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3587 tcg_gen_sub_i64(t2
, t3
, t2
);
3588 tcg_temp_free_i64(t3
);
3589 gen_move_low32(cpu_LO
[acc
], t2
);
3590 gen_move_high32(cpu_HI
[acc
], t2
);
3591 tcg_temp_free_i64(t2
);
3596 TCGv_i64 t2
= tcg_temp_new_i64();
3597 TCGv_i64 t3
= tcg_temp_new_i64();
3599 tcg_gen_ext32u_tl(t0
, t0
);
3600 tcg_gen_ext32u_tl(t1
, t1
);
3601 tcg_gen_extu_tl_i64(t2
, t0
);
3602 tcg_gen_extu_tl_i64(t3
, t1
);
3603 tcg_gen_mul_i64(t2
, t2
, t3
);
3604 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3605 tcg_gen_sub_i64(t2
, t3
, t2
);
3606 tcg_temp_free_i64(t3
);
3607 gen_move_low32(cpu_LO
[acc
], t2
);
3608 gen_move_high32(cpu_HI
[acc
], t2
);
3609 tcg_temp_free_i64(t2
);
3613 MIPS_INVAL("mul/div");
3614 generate_exception_end(ctx
, EXCP_RI
);
3622 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3623 int rd
, int rs
, int rt
)
3625 TCGv t0
= tcg_temp_new();
3626 TCGv t1
= tcg_temp_new();
3628 gen_load_gpr(t0
, rs
);
3629 gen_load_gpr(t1
, rt
);
3632 case OPC_VR54XX_MULS
:
3633 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3635 case OPC_VR54XX_MULSU
:
3636 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3638 case OPC_VR54XX_MACC
:
3639 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3641 case OPC_VR54XX_MACCU
:
3642 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3644 case OPC_VR54XX_MSAC
:
3645 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3647 case OPC_VR54XX_MSACU
:
3648 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3650 case OPC_VR54XX_MULHI
:
3651 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3653 case OPC_VR54XX_MULHIU
:
3654 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3656 case OPC_VR54XX_MULSHI
:
3657 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3659 case OPC_VR54XX_MULSHIU
:
3660 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3662 case OPC_VR54XX_MACCHI
:
3663 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3665 case OPC_VR54XX_MACCHIU
:
3666 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3668 case OPC_VR54XX_MSACHI
:
3669 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3671 case OPC_VR54XX_MSACHIU
:
3672 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3675 MIPS_INVAL("mul vr54xx");
3676 generate_exception_end(ctx
, EXCP_RI
);
3679 gen_store_gpr(t0
, rd
);
3686 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3696 gen_load_gpr(t0
, rs
);
3701 #if defined(TARGET_MIPS64)
3705 tcg_gen_not_tl(t0
, t0
);
3714 tcg_gen_ext32u_tl(t0
, t0
);
3715 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3716 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3718 #if defined(TARGET_MIPS64)
3723 tcg_gen_clzi_i64(t0
, t0
, 64);
3729 /* Godson integer instructions */
3730 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3731 int rd
, int rs
, int rt
)
3743 case OPC_MULTU_G_2E
:
3744 case OPC_MULTU_G_2F
:
3745 #if defined(TARGET_MIPS64)
3746 case OPC_DMULT_G_2E
:
3747 case OPC_DMULT_G_2F
:
3748 case OPC_DMULTU_G_2E
:
3749 case OPC_DMULTU_G_2F
:
3751 t0
= tcg_temp_new();
3752 t1
= tcg_temp_new();
3755 t0
= tcg_temp_local_new();
3756 t1
= tcg_temp_local_new();
3760 gen_load_gpr(t0
, rs
);
3761 gen_load_gpr(t1
, rt
);
3766 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3767 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3769 case OPC_MULTU_G_2E
:
3770 case OPC_MULTU_G_2F
:
3771 tcg_gen_ext32u_tl(t0
, t0
);
3772 tcg_gen_ext32u_tl(t1
, t1
);
3773 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3774 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3779 TCGLabel
*l1
= gen_new_label();
3780 TCGLabel
*l2
= gen_new_label();
3781 TCGLabel
*l3
= gen_new_label();
3782 tcg_gen_ext32s_tl(t0
, t0
);
3783 tcg_gen_ext32s_tl(t1
, t1
);
3784 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3785 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3789 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3790 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3793 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3794 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3801 TCGLabel
*l1
= gen_new_label();
3802 TCGLabel
*l2
= gen_new_label();
3803 tcg_gen_ext32u_tl(t0
, t0
);
3804 tcg_gen_ext32u_tl(t1
, t1
);
3805 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3809 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3810 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3817 TCGLabel
*l1
= gen_new_label();
3818 TCGLabel
*l2
= gen_new_label();
3819 TCGLabel
*l3
= gen_new_label();
3820 tcg_gen_ext32u_tl(t0
, t0
);
3821 tcg_gen_ext32u_tl(t1
, t1
);
3822 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3823 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3824 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3829 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3830 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3837 TCGLabel
*l1
= gen_new_label();
3838 TCGLabel
*l2
= gen_new_label();
3839 tcg_gen_ext32u_tl(t0
, t0
);
3840 tcg_gen_ext32u_tl(t1
, t1
);
3841 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3842 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3845 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3846 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3850 #if defined(TARGET_MIPS64)
3851 case OPC_DMULT_G_2E
:
3852 case OPC_DMULT_G_2F
:
3853 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3855 case OPC_DMULTU_G_2E
:
3856 case OPC_DMULTU_G_2F
:
3857 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3862 TCGLabel
*l1
= gen_new_label();
3863 TCGLabel
*l2
= gen_new_label();
3864 TCGLabel
*l3
= gen_new_label();
3865 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3866 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3869 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3870 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3871 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3874 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3878 case OPC_DDIVU_G_2E
:
3879 case OPC_DDIVU_G_2F
:
3881 TCGLabel
*l1
= gen_new_label();
3882 TCGLabel
*l2
= gen_new_label();
3883 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3884 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3887 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3894 TCGLabel
*l1
= gen_new_label();
3895 TCGLabel
*l2
= gen_new_label();
3896 TCGLabel
*l3
= gen_new_label();
3897 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3898 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3899 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3901 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3904 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3908 case OPC_DMODU_G_2E
:
3909 case OPC_DMODU_G_2F
:
3911 TCGLabel
*l1
= gen_new_label();
3912 TCGLabel
*l2
= gen_new_label();
3913 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3914 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3917 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3928 /* Loongson multimedia instructions */
3929 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3931 uint32_t opc
, shift_max
;
3934 opc
= MASK_LMI(ctx
->opcode
);
3940 t0
= tcg_temp_local_new_i64();
3941 t1
= tcg_temp_local_new_i64();
3944 t0
= tcg_temp_new_i64();
3945 t1
= tcg_temp_new_i64();
3949 check_cp1_enabled(ctx
);
3950 gen_load_fpr64(ctx
, t0
, rs
);
3951 gen_load_fpr64(ctx
, t1
, rt
);
3953 #define LMI_HELPER(UP, LO) \
3954 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3955 #define LMI_HELPER_1(UP, LO) \
3956 case OPC_##UP: gen_helper_##LO(t0, t0); break
3957 #define LMI_DIRECT(UP, LO, OP) \
3958 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3961 LMI_HELPER(PADDSH
, paddsh
);
3962 LMI_HELPER(PADDUSH
, paddush
);
3963 LMI_HELPER(PADDH
, paddh
);
3964 LMI_HELPER(PADDW
, paddw
);
3965 LMI_HELPER(PADDSB
, paddsb
);
3966 LMI_HELPER(PADDUSB
, paddusb
);
3967 LMI_HELPER(PADDB
, paddb
);
3969 LMI_HELPER(PSUBSH
, psubsh
);
3970 LMI_HELPER(PSUBUSH
, psubush
);
3971 LMI_HELPER(PSUBH
, psubh
);
3972 LMI_HELPER(PSUBW
, psubw
);
3973 LMI_HELPER(PSUBSB
, psubsb
);
3974 LMI_HELPER(PSUBUSB
, psubusb
);
3975 LMI_HELPER(PSUBB
, psubb
);
3977 LMI_HELPER(PSHUFH
, pshufh
);
3978 LMI_HELPER(PACKSSWH
, packsswh
);
3979 LMI_HELPER(PACKSSHB
, packsshb
);
3980 LMI_HELPER(PACKUSHB
, packushb
);
3982 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3983 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3984 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3985 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3986 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3987 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3989 LMI_HELPER(PAVGH
, pavgh
);
3990 LMI_HELPER(PAVGB
, pavgb
);
3991 LMI_HELPER(PMAXSH
, pmaxsh
);
3992 LMI_HELPER(PMINSH
, pminsh
);
3993 LMI_HELPER(PMAXUB
, pmaxub
);
3994 LMI_HELPER(PMINUB
, pminub
);
3996 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3997 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3998 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3999 LMI_HELPER(PCMPGTH
, pcmpgth
);
4000 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4001 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4003 LMI_HELPER(PSLLW
, psllw
);
4004 LMI_HELPER(PSLLH
, psllh
);
4005 LMI_HELPER(PSRLW
, psrlw
);
4006 LMI_HELPER(PSRLH
, psrlh
);
4007 LMI_HELPER(PSRAW
, psraw
);
4008 LMI_HELPER(PSRAH
, psrah
);
4010 LMI_HELPER(PMULLH
, pmullh
);
4011 LMI_HELPER(PMULHH
, pmulhh
);
4012 LMI_HELPER(PMULHUH
, pmulhuh
);
4013 LMI_HELPER(PMADDHW
, pmaddhw
);
4015 LMI_HELPER(PASUBUB
, pasubub
);
4016 LMI_HELPER_1(BIADD
, biadd
);
4017 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4019 LMI_DIRECT(PADDD
, paddd
, add
);
4020 LMI_DIRECT(PSUBD
, psubd
, sub
);
4021 LMI_DIRECT(XOR_CP2
, xor, xor);
4022 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4023 LMI_DIRECT(AND_CP2
, and, and);
4024 LMI_DIRECT(OR_CP2
, or, or);
4027 tcg_gen_andc_i64(t0
, t1
, t0
);
4031 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4034 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4037 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4040 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4044 tcg_gen_andi_i64(t1
, t1
, 3);
4045 tcg_gen_shli_i64(t1
, t1
, 4);
4046 tcg_gen_shr_i64(t0
, t0
, t1
);
4047 tcg_gen_ext16u_i64(t0
, t0
);
4051 tcg_gen_add_i64(t0
, t0
, t1
);
4052 tcg_gen_ext32s_i64(t0
, t0
);
4055 tcg_gen_sub_i64(t0
, t0
, t1
);
4056 tcg_gen_ext32s_i64(t0
, t0
);
4078 /* Make sure shift count isn't TCG undefined behaviour. */
4079 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4084 tcg_gen_shl_i64(t0
, t0
, t1
);
4088 /* Since SRA is UndefinedResult without sign-extended inputs,
4089 we can treat SRA and DSRA the same. */
4090 tcg_gen_sar_i64(t0
, t0
, t1
);
4093 /* We want to shift in zeros for SRL; zero-extend first. */
4094 tcg_gen_ext32u_i64(t0
, t0
);
4097 tcg_gen_shr_i64(t0
, t0
, t1
);
4101 if (shift_max
== 32) {
4102 tcg_gen_ext32s_i64(t0
, t0
);
4105 /* Shifts larger than MAX produce zero. */
4106 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4107 tcg_gen_neg_i64(t1
, t1
);
4108 tcg_gen_and_i64(t0
, t0
, t1
);
4114 TCGv_i64 t2
= tcg_temp_new_i64();
4115 TCGLabel
*lab
= gen_new_label();
4117 tcg_gen_mov_i64(t2
, t0
);
4118 tcg_gen_add_i64(t0
, t1
, t2
);
4119 if (opc
== OPC_ADD_CP2
) {
4120 tcg_gen_ext32s_i64(t0
, t0
);
4122 tcg_gen_xor_i64(t1
, t1
, t2
);
4123 tcg_gen_xor_i64(t2
, t2
, t0
);
4124 tcg_gen_andc_i64(t1
, t2
, t1
);
4125 tcg_temp_free_i64(t2
);
4126 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4127 generate_exception(ctx
, EXCP_OVERFLOW
);
4135 TCGv_i64 t2
= tcg_temp_new_i64();
4136 TCGLabel
*lab
= gen_new_label();
4138 tcg_gen_mov_i64(t2
, t0
);
4139 tcg_gen_sub_i64(t0
, t1
, t2
);
4140 if (opc
== OPC_SUB_CP2
) {
4141 tcg_gen_ext32s_i64(t0
, t0
);
4143 tcg_gen_xor_i64(t1
, t1
, t2
);
4144 tcg_gen_xor_i64(t2
, t2
, t0
);
4145 tcg_gen_and_i64(t1
, t1
, t2
);
4146 tcg_temp_free_i64(t2
);
4147 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4148 generate_exception(ctx
, EXCP_OVERFLOW
);
4154 tcg_gen_ext32u_i64(t0
, t0
);
4155 tcg_gen_ext32u_i64(t1
, t1
);
4156 tcg_gen_mul_i64(t0
, t0
, t1
);
4165 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4166 FD field is the CC field? */
4168 MIPS_INVAL("loongson_cp2");
4169 generate_exception_end(ctx
, EXCP_RI
);
4176 gen_store_fpr64(ctx
, t0
, rd
);
4178 tcg_temp_free_i64(t0
);
4179 tcg_temp_free_i64(t1
);
4183 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4184 int rs
, int rt
, int16_t imm
)
4187 TCGv t0
= tcg_temp_new();
4188 TCGv t1
= tcg_temp_new();
4191 /* Load needed operands */
4199 /* Compare two registers */
4201 gen_load_gpr(t0
, rs
);
4202 gen_load_gpr(t1
, rt
);
4212 /* Compare register to immediate */
4213 if (rs
!= 0 || imm
!= 0) {
4214 gen_load_gpr(t0
, rs
);
4215 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4222 case OPC_TEQ
: /* rs == rs */
4223 case OPC_TEQI
: /* r0 == 0 */
4224 case OPC_TGE
: /* rs >= rs */
4225 case OPC_TGEI
: /* r0 >= 0 */
4226 case OPC_TGEU
: /* rs >= rs unsigned */
4227 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4229 generate_exception_end(ctx
, EXCP_TRAP
);
4231 case OPC_TLT
: /* rs < rs */
4232 case OPC_TLTI
: /* r0 < 0 */
4233 case OPC_TLTU
: /* rs < rs unsigned */
4234 case OPC_TLTIU
: /* r0 < 0 unsigned */
4235 case OPC_TNE
: /* rs != rs */
4236 case OPC_TNEI
: /* r0 != 0 */
4237 /* Never trap: treat as NOP. */
4241 TCGLabel
*l1
= gen_new_label();
4246 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4250 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4254 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4258 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4262 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4266 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4269 generate_exception(ctx
, EXCP_TRAP
);
4276 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4278 if (unlikely(ctx
->base
.singlestep_enabled
)) {
4282 #ifndef CONFIG_USER_ONLY
4283 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4289 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4291 if (use_goto_tb(ctx
, dest
)) {
4294 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4297 if (ctx
->base
.singlestep_enabled
) {
4298 save_cpu_state(ctx
, 0);
4299 gen_helper_raise_exception_debug(cpu_env
);
4301 tcg_gen_lookup_and_goto_ptr();
4305 /* Branches (before delay slot) */
4306 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4308 int rs
, int rt
, int32_t offset
,
4311 target_ulong btgt
= -1;
4313 int bcond_compute
= 0;
4314 TCGv t0
= tcg_temp_new();
4315 TCGv t1
= tcg_temp_new();
4317 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4318 #ifdef MIPS_DEBUG_DISAS
4319 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4320 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4322 generate_exception_end(ctx
, EXCP_RI
);
4326 /* Load needed operands */
4332 /* Compare two registers */
4334 gen_load_gpr(t0
, rs
);
4335 gen_load_gpr(t1
, rt
);
4338 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4352 /* Compare to zero */
4354 gen_load_gpr(t0
, rs
);
4357 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4360 #if defined(TARGET_MIPS64)
4362 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4364 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4367 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4372 /* Jump to immediate */
4373 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4378 /* Jump to register */
4379 if (offset
!= 0 && offset
!= 16) {
4380 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4381 others are reserved. */
4382 MIPS_INVAL("jump hint");
4383 generate_exception_end(ctx
, EXCP_RI
);
4386 gen_load_gpr(btarget
, rs
);
4389 MIPS_INVAL("branch/jump");
4390 generate_exception_end(ctx
, EXCP_RI
);
4393 if (bcond_compute
== 0) {
4394 /* No condition to be computed */
4396 case OPC_BEQ
: /* rx == rx */
4397 case OPC_BEQL
: /* rx == rx likely */
4398 case OPC_BGEZ
: /* 0 >= 0 */
4399 case OPC_BGEZL
: /* 0 >= 0 likely */
4400 case OPC_BLEZ
: /* 0 <= 0 */
4401 case OPC_BLEZL
: /* 0 <= 0 likely */
4403 ctx
->hflags
|= MIPS_HFLAG_B
;
4405 case OPC_BGEZAL
: /* 0 >= 0 */
4406 case OPC_BGEZALL
: /* 0 >= 0 likely */
4407 /* Always take and link */
4409 ctx
->hflags
|= MIPS_HFLAG_B
;
4411 case OPC_BNE
: /* rx != rx */
4412 case OPC_BGTZ
: /* 0 > 0 */
4413 case OPC_BLTZ
: /* 0 < 0 */
4416 case OPC_BLTZAL
: /* 0 < 0 */
4417 /* Handle as an unconditional branch to get correct delay
4420 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4421 ctx
->hflags
|= MIPS_HFLAG_B
;
4423 case OPC_BLTZALL
: /* 0 < 0 likely */
4424 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4425 /* Skip the instruction in the delay slot */
4426 ctx
->base
.pc_next
+= 4;
4428 case OPC_BNEL
: /* rx != rx likely */
4429 case OPC_BGTZL
: /* 0 > 0 likely */
4430 case OPC_BLTZL
: /* 0 < 0 likely */
4431 /* Skip the instruction in the delay slot */
4432 ctx
->base
.pc_next
+= 4;
4435 ctx
->hflags
|= MIPS_HFLAG_B
;
4438 ctx
->hflags
|= MIPS_HFLAG_BX
;
4442 ctx
->hflags
|= MIPS_HFLAG_B
;
4445 ctx
->hflags
|= MIPS_HFLAG_BR
;
4449 ctx
->hflags
|= MIPS_HFLAG_BR
;
4452 MIPS_INVAL("branch/jump");
4453 generate_exception_end(ctx
, EXCP_RI
);
4459 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4462 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4465 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4468 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4471 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4474 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4477 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4481 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4485 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4488 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4491 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4494 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4497 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4500 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4503 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4505 #if defined(TARGET_MIPS64)
4507 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4511 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4514 ctx
->hflags
|= MIPS_HFLAG_BC
;
4517 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4520 ctx
->hflags
|= MIPS_HFLAG_BL
;
4523 MIPS_INVAL("conditional branch/jump");
4524 generate_exception_end(ctx
, EXCP_RI
);
4529 ctx
->btarget
= btgt
;
4531 switch (delayslot_size
) {
4533 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4536 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4541 int post_delay
= insn_bytes
+ delayslot_size
;
4542 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4544 tcg_gen_movi_tl(cpu_gpr
[blink
],
4545 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
4549 if (insn_bytes
== 2)
4550 ctx
->hflags
|= MIPS_HFLAG_B16
;
4555 /* special3 bitfield operations */
4556 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4557 int rs
, int lsb
, int msb
)
4559 TCGv t0
= tcg_temp_new();
4560 TCGv t1
= tcg_temp_new();
4562 gen_load_gpr(t1
, rs
);
4565 if (lsb
+ msb
> 31) {
4569 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4571 /* The two checks together imply that lsb == 0,
4572 so this is a simple sign-extension. */
4573 tcg_gen_ext32s_tl(t0
, t1
);
4576 #if defined(TARGET_MIPS64)
4585 if (lsb
+ msb
> 63) {
4588 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4595 gen_load_gpr(t0
, rt
);
4596 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4597 tcg_gen_ext32s_tl(t0
, t0
);
4599 #if defined(TARGET_MIPS64)
4610 gen_load_gpr(t0
, rt
);
4611 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4616 MIPS_INVAL("bitops");
4617 generate_exception_end(ctx
, EXCP_RI
);
4622 gen_store_gpr(t0
, rt
);
4627 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4632 /* If no destination, treat it as a NOP. */
4636 t0
= tcg_temp_new();
4637 gen_load_gpr(t0
, rt
);
4641 TCGv t1
= tcg_temp_new();
4642 TCGv t2
= tcg_const_tl(0x00FF00FF);
4644 tcg_gen_shri_tl(t1
, t0
, 8);
4645 tcg_gen_and_tl(t1
, t1
, t2
);
4646 tcg_gen_and_tl(t0
, t0
, t2
);
4647 tcg_gen_shli_tl(t0
, t0
, 8);
4648 tcg_gen_or_tl(t0
, t0
, t1
);
4651 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4655 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4658 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4660 #if defined(TARGET_MIPS64)
4663 TCGv t1
= tcg_temp_new();
4664 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4666 tcg_gen_shri_tl(t1
, t0
, 8);
4667 tcg_gen_and_tl(t1
, t1
, t2
);
4668 tcg_gen_and_tl(t0
, t0
, t2
);
4669 tcg_gen_shli_tl(t0
, t0
, 8);
4670 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4677 TCGv t1
= tcg_temp_new();
4678 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4680 tcg_gen_shri_tl(t1
, t0
, 16);
4681 tcg_gen_and_tl(t1
, t1
, t2
);
4682 tcg_gen_and_tl(t0
, t0
, t2
);
4683 tcg_gen_shli_tl(t0
, t0
, 16);
4684 tcg_gen_or_tl(t0
, t0
, t1
);
4685 tcg_gen_shri_tl(t1
, t0
, 32);
4686 tcg_gen_shli_tl(t0
, t0
, 32);
4687 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4694 MIPS_INVAL("bsfhl");
4695 generate_exception_end(ctx
, EXCP_RI
);
4702 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4711 t0
= tcg_temp_new();
4712 t1
= tcg_temp_new();
4713 gen_load_gpr(t0
, rs
);
4714 gen_load_gpr(t1
, rt
);
4715 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4716 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4717 if (opc
== OPC_LSA
) {
4718 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4727 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4735 t0
= tcg_temp_new();
4736 gen_load_gpr(t0
, rt
);
4740 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4742 #if defined(TARGET_MIPS64)
4744 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4749 TCGv t1
= tcg_temp_new();
4750 gen_load_gpr(t1
, rs
);
4754 TCGv_i64 t2
= tcg_temp_new_i64();
4755 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4756 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4757 gen_move_low32(cpu_gpr
[rd
], t2
);
4758 tcg_temp_free_i64(t2
);
4761 #if defined(TARGET_MIPS64)
4763 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4764 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4765 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4775 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4782 t0
= tcg_temp_new();
4783 gen_load_gpr(t0
, rt
);
4786 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4788 #if defined(TARGET_MIPS64)
4790 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4797 #ifndef CONFIG_USER_ONLY
4798 /* CP0 (MMU and control) */
4799 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4801 TCGv_i64 t0
= tcg_temp_new_i64();
4802 TCGv_i64 t1
= tcg_temp_new_i64();
4804 tcg_gen_ext_tl_i64(t0
, arg
);
4805 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4806 #if defined(TARGET_MIPS64)
4807 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4809 tcg_gen_concat32_i64(t1
, t1
, t0
);
4811 tcg_gen_st_i64(t1
, cpu_env
, off
);
4812 tcg_temp_free_i64(t1
);
4813 tcg_temp_free_i64(t0
);
4816 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4818 TCGv_i64 t0
= tcg_temp_new_i64();
4819 TCGv_i64 t1
= tcg_temp_new_i64();
4821 tcg_gen_ext_tl_i64(t0
, arg
);
4822 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4823 tcg_gen_concat32_i64(t1
, t1
, t0
);
4824 tcg_gen_st_i64(t1
, cpu_env
, off
);
4825 tcg_temp_free_i64(t1
);
4826 tcg_temp_free_i64(t0
);
4829 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4831 TCGv_i64 t0
= tcg_temp_new_i64();
4833 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4834 #if defined(TARGET_MIPS64)
4835 tcg_gen_shri_i64(t0
, t0
, 30);
4837 tcg_gen_shri_i64(t0
, t0
, 32);
4839 gen_move_low32(arg
, t0
);
4840 tcg_temp_free_i64(t0
);
4843 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4845 TCGv_i64 t0
= tcg_temp_new_i64();
4847 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4848 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4849 gen_move_low32(arg
, t0
);
4850 tcg_temp_free_i64(t0
);
4853 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4855 TCGv_i32 t0
= tcg_temp_new_i32();
4857 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4858 tcg_gen_ext_i32_tl(arg
, t0
);
4859 tcg_temp_free_i32(t0
);
4862 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4864 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4865 tcg_gen_ext32s_tl(arg
, arg
);
4868 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4870 TCGv_i32 t0
= tcg_temp_new_i32();
4872 tcg_gen_trunc_tl_i32(t0
, arg
);
4873 tcg_gen_st_i32(t0
, cpu_env
, off
);
4874 tcg_temp_free_i32(t0
);
4877 #define CP0_CHECK(c) \
4880 goto cp0_unimplemented; \
4884 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4886 const char *rn
= "invalid";
4888 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4894 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4898 goto cp0_unimplemented
;
4904 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4908 goto cp0_unimplemented
;
4914 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4915 ctx
->CP0_LLAddr_shift
);
4919 CP0_CHECK(ctx
->mrp
);
4920 gen_helper_mfhc0_maar(arg
, cpu_env
);
4924 goto cp0_unimplemented
;
4933 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4937 goto cp0_unimplemented
;
4941 goto cp0_unimplemented
;
4943 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4947 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4948 tcg_gen_movi_tl(arg
, 0);
4951 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4953 const char *rn
= "invalid";
4954 uint64_t mask
= ctx
->PAMask
>> 36;
4956 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4962 tcg_gen_andi_tl(arg
, arg
, mask
);
4963 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4967 goto cp0_unimplemented
;
4973 tcg_gen_andi_tl(arg
, arg
, mask
);
4974 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4978 goto cp0_unimplemented
;
4984 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4985 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4986 relevant for modern MIPS cores supporting MTHC0, therefore
4987 treating MTHC0 to LLAddr as NOP. */
4991 CP0_CHECK(ctx
->mrp
);
4992 gen_helper_mthc0_maar(cpu_env
, arg
);
4996 goto cp0_unimplemented
;
5005 tcg_gen_andi_tl(arg
, arg
, mask
);
5006 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5010 goto cp0_unimplemented
;
5014 goto cp0_unimplemented
;
5016 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5019 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5022 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5024 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5025 tcg_gen_movi_tl(arg
, 0);
5027 tcg_gen_movi_tl(arg
, ~0);
5031 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5033 const char *rn
= "invalid";
5036 check_insn(ctx
, ISA_MIPS32
);
5042 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5047 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5052 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5056 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5057 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5062 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5066 goto cp0_unimplemented
;
5072 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5073 gen_helper_mfc0_random(arg
, cpu_env
);
5077 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5078 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5082 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5083 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5087 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5088 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5092 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5093 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5097 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5098 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5102 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5103 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5104 rn
= "VPEScheFBack";
5107 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5112 goto cp0_unimplemented
;
5119 TCGv_i64 tmp
= tcg_temp_new_i64();
5120 tcg_gen_ld_i64(tmp
, cpu_env
,
5121 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5122 #if defined(TARGET_MIPS64)
5124 /* Move RI/XI fields to bits 31:30 */
5125 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5126 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5129 gen_move_low32(arg
, tmp
);
5130 tcg_temp_free_i64(tmp
);
5135 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5136 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5140 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5141 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5145 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5146 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5150 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5151 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5155 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5156 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5160 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5161 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5165 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5166 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5170 goto cp0_unimplemented
;
5177 TCGv_i64 tmp
= tcg_temp_new_i64();
5178 tcg_gen_ld_i64(tmp
, cpu_env
,
5179 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5180 #if defined(TARGET_MIPS64)
5182 /* Move RI/XI fields to bits 31:30 */
5183 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5184 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5187 gen_move_low32(arg
, tmp
);
5188 tcg_temp_free_i64(tmp
);
5194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5195 rn
= "GlobalNumber";
5198 goto cp0_unimplemented
;
5204 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5205 tcg_gen_ext32s_tl(arg
, arg
);
5209 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5210 rn
= "ContextConfig";
5211 goto cp0_unimplemented
;
5213 CP0_CHECK(ctx
->ulri
);
5214 tcg_gen_ld_tl(arg
, cpu_env
,
5215 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5216 tcg_gen_ext32s_tl(arg
, arg
);
5220 goto cp0_unimplemented
;
5226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5230 check_insn(ctx
, ISA_MIPS32R2
);
5231 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5236 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5237 tcg_gen_ext32s_tl(arg
, arg
);
5242 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5243 tcg_gen_ext32s_tl(arg
, arg
);
5248 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5249 tcg_gen_ext32s_tl(arg
, arg
);
5253 goto cp0_unimplemented
;
5259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5263 check_insn(ctx
, ISA_MIPS32R2
);
5264 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5268 check_insn(ctx
, ISA_MIPS32R2
);
5269 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5273 check_insn(ctx
, ISA_MIPS32R2
);
5274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5278 check_insn(ctx
, ISA_MIPS32R2
);
5279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5283 check_insn(ctx
, ISA_MIPS32R2
);
5284 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5288 goto cp0_unimplemented
;
5294 check_insn(ctx
, ISA_MIPS32R2
);
5295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5299 goto cp0_unimplemented
;
5305 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5306 tcg_gen_ext32s_tl(arg
, arg
);
5311 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5320 goto cp0_unimplemented
;
5326 /* Mark as an IO operation because we read the time. */
5327 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5330 gen_helper_mfc0_count(arg
, cpu_env
);
5331 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5334 /* Break the TB to be able to take timer interrupts immediately
5335 after reading count. DISAS_STOP isn't sufficient, we need to
5336 ensure we break completely out of translated code. */
5337 gen_save_pc(ctx
->base
.pc_next
+ 4);
5338 ctx
->base
.is_jmp
= DISAS_EXIT
;
5341 /* 6,7 are implementation dependent */
5343 goto cp0_unimplemented
;
5349 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5350 tcg_gen_ext32s_tl(arg
, arg
);
5354 goto cp0_unimplemented
;
5360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5363 /* 6,7 are implementation dependent */
5365 goto cp0_unimplemented
;
5371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5375 check_insn(ctx
, ISA_MIPS32R2
);
5376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5380 check_insn(ctx
, ISA_MIPS32R2
);
5381 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5385 check_insn(ctx
, ISA_MIPS32R2
);
5386 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5390 goto cp0_unimplemented
;
5396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5400 goto cp0_unimplemented
;
5406 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5407 tcg_gen_ext32s_tl(arg
, arg
);
5411 goto cp0_unimplemented
;
5417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5421 check_insn(ctx
, ISA_MIPS32R2
);
5422 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5423 tcg_gen_ext32s_tl(arg
, arg
);
5427 check_insn(ctx
, ISA_MIPS32R2
);
5428 CP0_CHECK(ctx
->cmgcr
);
5429 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5430 tcg_gen_ext32s_tl(arg
, arg
);
5434 goto cp0_unimplemented
;
5440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5444 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5448 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5463 /* 6,7 are implementation dependent */
5465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5473 goto cp0_unimplemented
;
5479 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5483 CP0_CHECK(ctx
->mrp
);
5484 gen_helper_mfc0_maar(arg
, cpu_env
);
5488 CP0_CHECK(ctx
->mrp
);
5489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5493 goto cp0_unimplemented
;
5499 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5503 goto cp0_unimplemented
;
5509 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5513 goto cp0_unimplemented
;
5519 #if defined(TARGET_MIPS64)
5520 check_insn(ctx
, ISA_MIPS3
);
5521 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5522 tcg_gen_ext32s_tl(arg
, arg
);
5527 goto cp0_unimplemented
;
5531 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5532 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5539 goto cp0_unimplemented
;
5543 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5544 rn
= "'Diagnostic"; /* implementation dependent */
5549 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5553 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5554 rn
= "TraceControl";
5555 goto cp0_unimplemented
;
5557 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5558 rn
= "TraceControl2";
5559 goto cp0_unimplemented
;
5561 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5562 rn
= "UserTraceData";
5563 goto cp0_unimplemented
;
5565 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5567 goto cp0_unimplemented
;
5569 goto cp0_unimplemented
;
5576 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5577 tcg_gen_ext32s_tl(arg
, arg
);
5581 goto cp0_unimplemented
;
5587 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5588 rn
= "Performance0";
5591 // gen_helper_mfc0_performance1(arg);
5592 rn
= "Performance1";
5593 goto cp0_unimplemented
;
5595 // gen_helper_mfc0_performance2(arg);
5596 rn
= "Performance2";
5597 goto cp0_unimplemented
;
5599 // gen_helper_mfc0_performance3(arg);
5600 rn
= "Performance3";
5601 goto cp0_unimplemented
;
5603 // gen_helper_mfc0_performance4(arg);
5604 rn
= "Performance4";
5605 goto cp0_unimplemented
;
5607 // gen_helper_mfc0_performance5(arg);
5608 rn
= "Performance5";
5609 goto cp0_unimplemented
;
5611 // gen_helper_mfc0_performance6(arg);
5612 rn
= "Performance6";
5613 goto cp0_unimplemented
;
5615 // gen_helper_mfc0_performance7(arg);
5616 rn
= "Performance7";
5617 goto cp0_unimplemented
;
5619 goto cp0_unimplemented
;
5625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5629 goto cp0_unimplemented
;
5635 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5639 goto cp0_unimplemented
;
5649 TCGv_i64 tmp
= tcg_temp_new_i64();
5650 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5651 gen_move_low32(arg
, tmp
);
5652 tcg_temp_free_i64(tmp
);
5660 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5664 goto cp0_unimplemented
;
5673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5684 goto cp0_unimplemented
;
5690 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5691 tcg_gen_ext32s_tl(arg
, arg
);
5695 goto cp0_unimplemented
;
5702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5706 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5707 tcg_gen_ld_tl(arg
, cpu_env
,
5708 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5709 tcg_gen_ext32s_tl(arg
, arg
);
5713 goto cp0_unimplemented
;
5717 goto cp0_unimplemented
;
5719 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5723 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5724 gen_mfc0_unimplemented(ctx
, arg
);
5727 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5729 const char *rn
= "invalid";
5732 check_insn(ctx
, ISA_MIPS32
);
5734 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5742 gen_helper_mtc0_index(cpu_env
, arg
);
5746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5747 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5756 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5766 goto cp0_unimplemented
;
5776 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5777 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5781 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5782 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5786 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5787 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5791 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5792 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5796 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5797 tcg_gen_st_tl(arg
, cpu_env
,
5798 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5802 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5803 tcg_gen_st_tl(arg
, cpu_env
,
5804 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5805 rn
= "VPEScheFBack";
5808 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5809 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5813 goto cp0_unimplemented
;
5819 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5823 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5824 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5828 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5829 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5833 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5834 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5838 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5839 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5843 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5844 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5849 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5854 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5858 goto cp0_unimplemented
;
5864 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5870 rn
= "GlobalNumber";
5873 goto cp0_unimplemented
;
5879 gen_helper_mtc0_context(cpu_env
, arg
);
5883 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5884 rn
= "ContextConfig";
5885 goto cp0_unimplemented
;
5887 CP0_CHECK(ctx
->ulri
);
5888 tcg_gen_st_tl(arg
, cpu_env
,
5889 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5893 goto cp0_unimplemented
;
5899 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5903 check_insn(ctx
, ISA_MIPS32R2
);
5904 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5906 ctx
->base
.is_jmp
= DISAS_STOP
;
5910 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5915 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5920 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5924 goto cp0_unimplemented
;
5930 gen_helper_mtc0_wired(cpu_env
, arg
);
5934 check_insn(ctx
, ISA_MIPS32R2
);
5935 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5939 check_insn(ctx
, ISA_MIPS32R2
);
5940 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5944 check_insn(ctx
, ISA_MIPS32R2
);
5945 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5949 check_insn(ctx
, ISA_MIPS32R2
);
5950 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5954 check_insn(ctx
, ISA_MIPS32R2
);
5955 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5959 goto cp0_unimplemented
;
5965 check_insn(ctx
, ISA_MIPS32R2
);
5966 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5967 ctx
->base
.is_jmp
= DISAS_STOP
;
5971 goto cp0_unimplemented
;
5989 goto cp0_unimplemented
;
5995 gen_helper_mtc0_count(cpu_env
, arg
);
5998 /* 6,7 are implementation dependent */
6000 goto cp0_unimplemented
;
6006 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6010 goto cp0_unimplemented
;
6016 gen_helper_mtc0_compare(cpu_env
, arg
);
6019 /* 6,7 are implementation dependent */
6021 goto cp0_unimplemented
;
6027 save_cpu_state(ctx
, 1);
6028 gen_helper_mtc0_status(cpu_env
, arg
);
6029 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6030 gen_save_pc(ctx
->base
.pc_next
+ 4);
6031 ctx
->base
.is_jmp
= DISAS_EXIT
;
6035 check_insn(ctx
, ISA_MIPS32R2
);
6036 gen_helper_mtc0_intctl(cpu_env
, arg
);
6037 /* Stop translation as we may have switched the execution mode */
6038 ctx
->base
.is_jmp
= DISAS_STOP
;
6042 check_insn(ctx
, ISA_MIPS32R2
);
6043 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6044 /* Stop translation as we may have switched the execution mode */
6045 ctx
->base
.is_jmp
= DISAS_STOP
;
6049 check_insn(ctx
, ISA_MIPS32R2
);
6050 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6051 /* Stop translation as we may have switched the execution mode */
6052 ctx
->base
.is_jmp
= DISAS_STOP
;
6056 goto cp0_unimplemented
;
6062 save_cpu_state(ctx
, 1);
6063 gen_helper_mtc0_cause(cpu_env
, arg
);
6064 /* Stop translation as we may have triggered an interrupt.
6065 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6066 * translated code to check for pending interrupts. */
6067 gen_save_pc(ctx
->base
.pc_next
+ 4);
6068 ctx
->base
.is_jmp
= DISAS_EXIT
;
6072 goto cp0_unimplemented
;
6078 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6082 goto cp0_unimplemented
;
6092 check_insn(ctx
, ISA_MIPS32R2
);
6093 gen_helper_mtc0_ebase(cpu_env
, arg
);
6097 goto cp0_unimplemented
;
6103 gen_helper_mtc0_config0(cpu_env
, arg
);
6105 /* Stop translation as we may have switched the execution mode */
6106 ctx
->base
.is_jmp
= DISAS_STOP
;
6109 /* ignored, read only */
6113 gen_helper_mtc0_config2(cpu_env
, arg
);
6115 /* Stop translation as we may have switched the execution mode */
6116 ctx
->base
.is_jmp
= DISAS_STOP
;
6119 gen_helper_mtc0_config3(cpu_env
, arg
);
6121 /* Stop translation as we may have switched the execution mode */
6122 ctx
->base
.is_jmp
= DISAS_STOP
;
6125 gen_helper_mtc0_config4(cpu_env
, arg
);
6127 ctx
->base
.is_jmp
= DISAS_STOP
;
6130 gen_helper_mtc0_config5(cpu_env
, arg
);
6132 /* Stop translation as we may have switched the execution mode */
6133 ctx
->base
.is_jmp
= DISAS_STOP
;
6135 /* 6,7 are implementation dependent */
6145 rn
= "Invalid config selector";
6146 goto cp0_unimplemented
;
6152 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6156 CP0_CHECK(ctx
->mrp
);
6157 gen_helper_mtc0_maar(cpu_env
, arg
);
6161 CP0_CHECK(ctx
->mrp
);
6162 gen_helper_mtc0_maari(cpu_env
, arg
);
6166 goto cp0_unimplemented
;
6172 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6176 goto cp0_unimplemented
;
6182 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6186 goto cp0_unimplemented
;
6192 #if defined(TARGET_MIPS64)
6193 check_insn(ctx
, ISA_MIPS3
);
6194 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6199 goto cp0_unimplemented
;
6203 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6204 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6207 gen_helper_mtc0_framemask(cpu_env
, arg
);
6211 goto cp0_unimplemented
;
6216 rn
= "Diagnostic"; /* implementation dependent */
6221 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6222 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6223 gen_save_pc(ctx
->base
.pc_next
+ 4);
6224 ctx
->base
.is_jmp
= DISAS_EXIT
;
6228 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6229 rn
= "TraceControl";
6230 /* Stop translation as we may have switched the execution mode */
6231 ctx
->base
.is_jmp
= DISAS_STOP
;
6232 goto cp0_unimplemented
;
6234 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6235 rn
= "TraceControl2";
6236 /* Stop translation as we may have switched the execution mode */
6237 ctx
->base
.is_jmp
= DISAS_STOP
;
6238 goto cp0_unimplemented
;
6240 /* Stop translation as we may have switched the execution mode */
6241 ctx
->base
.is_jmp
= DISAS_STOP
;
6242 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6243 rn
= "UserTraceData";
6244 /* Stop translation as we may have switched the execution mode */
6245 ctx
->base
.is_jmp
= DISAS_STOP
;
6246 goto cp0_unimplemented
;
6248 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6249 /* Stop translation as we may have switched the execution mode */
6250 ctx
->base
.is_jmp
= DISAS_STOP
;
6252 goto cp0_unimplemented
;
6254 goto cp0_unimplemented
;
6261 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6265 goto cp0_unimplemented
;
6271 gen_helper_mtc0_performance0(cpu_env
, arg
);
6272 rn
= "Performance0";
6275 // gen_helper_mtc0_performance1(arg);
6276 rn
= "Performance1";
6277 goto cp0_unimplemented
;
6279 // gen_helper_mtc0_performance2(arg);
6280 rn
= "Performance2";
6281 goto cp0_unimplemented
;
6283 // gen_helper_mtc0_performance3(arg);
6284 rn
= "Performance3";
6285 goto cp0_unimplemented
;
6287 // gen_helper_mtc0_performance4(arg);
6288 rn
= "Performance4";
6289 goto cp0_unimplemented
;
6291 // gen_helper_mtc0_performance5(arg);
6292 rn
= "Performance5";
6293 goto cp0_unimplemented
;
6295 // gen_helper_mtc0_performance6(arg);
6296 rn
= "Performance6";
6297 goto cp0_unimplemented
;
6299 // gen_helper_mtc0_performance7(arg);
6300 rn
= "Performance7";
6301 goto cp0_unimplemented
;
6303 goto cp0_unimplemented
;
6309 gen_helper_mtc0_errctl(cpu_env
, arg
);
6310 ctx
->base
.is_jmp
= DISAS_STOP
;
6314 goto cp0_unimplemented
;
6324 goto cp0_unimplemented
;
6333 gen_helper_mtc0_taglo(cpu_env
, arg
);
6340 gen_helper_mtc0_datalo(cpu_env
, arg
);
6344 goto cp0_unimplemented
;
6353 gen_helper_mtc0_taghi(cpu_env
, arg
);
6360 gen_helper_mtc0_datahi(cpu_env
, arg
);
6365 goto cp0_unimplemented
;
6371 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6375 goto cp0_unimplemented
;
6382 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6386 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6387 tcg_gen_st_tl(arg
, cpu_env
,
6388 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6392 goto cp0_unimplemented
;
6396 goto cp0_unimplemented
;
6398 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6400 /* For simplicity assume that all writes can cause interrupts. */
6401 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6403 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6404 * translated code to check for pending interrupts. */
6405 gen_save_pc(ctx
->base
.pc_next
+ 4);
6406 ctx
->base
.is_jmp
= DISAS_EXIT
;
6411 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6414 #if defined(TARGET_MIPS64)
6415 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6417 const char *rn
= "invalid";
6420 check_insn(ctx
, ISA_MIPS64
);
6426 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6430 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6431 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6435 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6436 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6440 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6441 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6446 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6450 goto cp0_unimplemented
;
6456 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6457 gen_helper_mfc0_random(arg
, cpu_env
);
6461 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6462 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6466 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6467 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6471 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6476 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6477 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6481 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6482 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6486 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6487 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6488 rn
= "VPEScheFBack";
6491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6496 goto cp0_unimplemented
;
6502 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6506 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6507 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6512 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6516 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6517 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6521 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6522 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6526 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6527 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6531 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6532 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6536 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6537 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6541 goto cp0_unimplemented
;
6547 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6553 rn
= "GlobalNumber";
6556 goto cp0_unimplemented
;
6562 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6566 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6567 rn
= "ContextConfig";
6568 goto cp0_unimplemented
;
6570 CP0_CHECK(ctx
->ulri
);
6571 tcg_gen_ld_tl(arg
, cpu_env
,
6572 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6576 goto cp0_unimplemented
;
6582 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6586 check_insn(ctx
, ISA_MIPS32R2
);
6587 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6592 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6597 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6602 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6606 goto cp0_unimplemented
;
6612 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6616 check_insn(ctx
, ISA_MIPS32R2
);
6617 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6621 check_insn(ctx
, ISA_MIPS32R2
);
6622 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6626 check_insn(ctx
, ISA_MIPS32R2
);
6627 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6631 check_insn(ctx
, ISA_MIPS32R2
);
6632 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6636 check_insn(ctx
, ISA_MIPS32R2
);
6637 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6641 goto cp0_unimplemented
;
6647 check_insn(ctx
, ISA_MIPS32R2
);
6648 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6652 goto cp0_unimplemented
;
6658 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6668 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6672 goto cp0_unimplemented
;
6678 /* Mark as an IO operation because we read the time. */
6679 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6682 gen_helper_mfc0_count(arg
, cpu_env
);
6683 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6686 /* Break the TB to be able to take timer interrupts immediately
6687 after reading count. DISAS_STOP isn't sufficient, we need to
6688 ensure we break completely out of translated code. */
6689 gen_save_pc(ctx
->base
.pc_next
+ 4);
6690 ctx
->base
.is_jmp
= DISAS_EXIT
;
6693 /* 6,7 are implementation dependent */
6695 goto cp0_unimplemented
;
6701 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6705 goto cp0_unimplemented
;
6711 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6714 /* 6,7 are implementation dependent */
6716 goto cp0_unimplemented
;
6722 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6726 check_insn(ctx
, ISA_MIPS32R2
);
6727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6731 check_insn(ctx
, ISA_MIPS32R2
);
6732 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6736 check_insn(ctx
, ISA_MIPS32R2
);
6737 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6741 goto cp0_unimplemented
;
6747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6751 goto cp0_unimplemented
;
6757 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6761 goto cp0_unimplemented
;
6767 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6771 check_insn(ctx
, ISA_MIPS32R2
);
6772 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6776 check_insn(ctx
, ISA_MIPS32R2
);
6777 CP0_CHECK(ctx
->cmgcr
);
6778 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6782 goto cp0_unimplemented
;
6788 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6796 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6800 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6804 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6808 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6811 /* 6,7 are implementation dependent */
6813 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6821 goto cp0_unimplemented
;
6827 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6831 CP0_CHECK(ctx
->mrp
);
6832 gen_helper_dmfc0_maar(arg
, cpu_env
);
6836 CP0_CHECK(ctx
->mrp
);
6837 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6841 goto cp0_unimplemented
;
6847 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6851 goto cp0_unimplemented
;
6857 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6861 goto cp0_unimplemented
;
6867 check_insn(ctx
, ISA_MIPS3
);
6868 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6872 goto cp0_unimplemented
;
6876 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6877 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6880 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6884 goto cp0_unimplemented
;
6888 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6889 rn
= "'Diagnostic"; /* implementation dependent */
6894 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6898 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6899 rn
= "TraceControl";
6900 goto cp0_unimplemented
;
6902 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6903 rn
= "TraceControl2";
6904 goto cp0_unimplemented
;
6906 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6907 rn
= "UserTraceData";
6908 goto cp0_unimplemented
;
6910 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6912 goto cp0_unimplemented
;
6914 goto cp0_unimplemented
;
6921 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6925 goto cp0_unimplemented
;
6931 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6932 rn
= "Performance0";
6935 // gen_helper_dmfc0_performance1(arg);
6936 rn
= "Performance1";
6937 goto cp0_unimplemented
;
6939 // gen_helper_dmfc0_performance2(arg);
6940 rn
= "Performance2";
6941 goto cp0_unimplemented
;
6943 // gen_helper_dmfc0_performance3(arg);
6944 rn
= "Performance3";
6945 goto cp0_unimplemented
;
6947 // gen_helper_dmfc0_performance4(arg);
6948 rn
= "Performance4";
6949 goto cp0_unimplemented
;
6951 // gen_helper_dmfc0_performance5(arg);
6952 rn
= "Performance5";
6953 goto cp0_unimplemented
;
6955 // gen_helper_dmfc0_performance6(arg);
6956 rn
= "Performance6";
6957 goto cp0_unimplemented
;
6959 // gen_helper_dmfc0_performance7(arg);
6960 rn
= "Performance7";
6961 goto cp0_unimplemented
;
6963 goto cp0_unimplemented
;
6969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6973 goto cp0_unimplemented
;
6980 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6984 goto cp0_unimplemented
;
6993 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7000 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7004 goto cp0_unimplemented
;
7013 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7020 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7024 goto cp0_unimplemented
;
7030 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7034 goto cp0_unimplemented
;
7041 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7045 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7046 tcg_gen_ld_tl(arg
, cpu_env
,
7047 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7051 goto cp0_unimplemented
;
7055 goto cp0_unimplemented
;
7057 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7061 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7062 gen_mfc0_unimplemented(ctx
, arg
);
7065 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7067 const char *rn
= "invalid";
7070 check_insn(ctx
, ISA_MIPS64
);
7072 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7080 gen_helper_mtc0_index(cpu_env
, arg
);
7084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7085 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7104 goto cp0_unimplemented
;
7114 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7115 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7119 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7120 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7124 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7125 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7129 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7130 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7134 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7135 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7139 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7140 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7141 rn
= "VPEScheFBack";
7144 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7145 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7149 goto cp0_unimplemented
;
7155 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7159 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7160 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7164 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7165 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7169 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7170 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7174 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7175 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7179 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7180 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7184 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7185 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7189 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7190 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7194 goto cp0_unimplemented
;
7200 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7206 rn
= "GlobalNumber";
7209 goto cp0_unimplemented
;
7215 gen_helper_mtc0_context(cpu_env
, arg
);
7219 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7220 rn
= "ContextConfig";
7221 goto cp0_unimplemented
;
7223 CP0_CHECK(ctx
->ulri
);
7224 tcg_gen_st_tl(arg
, cpu_env
,
7225 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7229 goto cp0_unimplemented
;
7235 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7239 check_insn(ctx
, ISA_MIPS32R2
);
7240 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7245 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7250 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7255 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7259 goto cp0_unimplemented
;
7265 gen_helper_mtc0_wired(cpu_env
, arg
);
7269 check_insn(ctx
, ISA_MIPS32R2
);
7270 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7274 check_insn(ctx
, ISA_MIPS32R2
);
7275 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7279 check_insn(ctx
, ISA_MIPS32R2
);
7280 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7284 check_insn(ctx
, ISA_MIPS32R2
);
7285 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7289 check_insn(ctx
, ISA_MIPS32R2
);
7290 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7294 goto cp0_unimplemented
;
7300 check_insn(ctx
, ISA_MIPS32R2
);
7301 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7302 ctx
->base
.is_jmp
= DISAS_STOP
;
7306 goto cp0_unimplemented
;
7324 goto cp0_unimplemented
;
7330 gen_helper_mtc0_count(cpu_env
, arg
);
7333 /* 6,7 are implementation dependent */
7335 goto cp0_unimplemented
;
7337 /* Stop translation as we may have switched the execution mode */
7338 ctx
->base
.is_jmp
= DISAS_STOP
;
7343 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7347 goto cp0_unimplemented
;
7353 gen_helper_mtc0_compare(cpu_env
, arg
);
7356 /* 6,7 are implementation dependent */
7358 goto cp0_unimplemented
;
7360 /* Stop translation as we may have switched the execution mode */
7361 ctx
->base
.is_jmp
= DISAS_STOP
;
7366 save_cpu_state(ctx
, 1);
7367 gen_helper_mtc0_status(cpu_env
, arg
);
7368 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7369 gen_save_pc(ctx
->base
.pc_next
+ 4);
7370 ctx
->base
.is_jmp
= DISAS_EXIT
;
7374 check_insn(ctx
, ISA_MIPS32R2
);
7375 gen_helper_mtc0_intctl(cpu_env
, arg
);
7376 /* Stop translation as we may have switched the execution mode */
7377 ctx
->base
.is_jmp
= DISAS_STOP
;
7381 check_insn(ctx
, ISA_MIPS32R2
);
7382 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7383 /* Stop translation as we may have switched the execution mode */
7384 ctx
->base
.is_jmp
= DISAS_STOP
;
7388 check_insn(ctx
, ISA_MIPS32R2
);
7389 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7390 /* Stop translation as we may have switched the execution mode */
7391 ctx
->base
.is_jmp
= DISAS_STOP
;
7395 goto cp0_unimplemented
;
7401 save_cpu_state(ctx
, 1);
7402 gen_helper_mtc0_cause(cpu_env
, arg
);
7403 /* Stop translation as we may have triggered an interrupt.
7404 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7405 * translated code to check for pending interrupts. */
7406 gen_save_pc(ctx
->base
.pc_next
+ 4);
7407 ctx
->base
.is_jmp
= DISAS_EXIT
;
7411 goto cp0_unimplemented
;
7417 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7421 goto cp0_unimplemented
;
7431 check_insn(ctx
, ISA_MIPS32R2
);
7432 gen_helper_mtc0_ebase(cpu_env
, arg
);
7436 goto cp0_unimplemented
;
7442 gen_helper_mtc0_config0(cpu_env
, arg
);
7444 /* Stop translation as we may have switched the execution mode */
7445 ctx
->base
.is_jmp
= DISAS_STOP
;
7448 /* ignored, read only */
7452 gen_helper_mtc0_config2(cpu_env
, arg
);
7454 /* Stop translation as we may have switched the execution mode */
7455 ctx
->base
.is_jmp
= DISAS_STOP
;
7458 gen_helper_mtc0_config3(cpu_env
, arg
);
7460 /* Stop translation as we may have switched the execution mode */
7461 ctx
->base
.is_jmp
= DISAS_STOP
;
7464 /* currently ignored */
7468 gen_helper_mtc0_config5(cpu_env
, arg
);
7470 /* Stop translation as we may have switched the execution mode */
7471 ctx
->base
.is_jmp
= DISAS_STOP
;
7473 /* 6,7 are implementation dependent */
7475 rn
= "Invalid config selector";
7476 goto cp0_unimplemented
;
7482 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7486 CP0_CHECK(ctx
->mrp
);
7487 gen_helper_mtc0_maar(cpu_env
, arg
);
7491 CP0_CHECK(ctx
->mrp
);
7492 gen_helper_mtc0_maari(cpu_env
, arg
);
7496 goto cp0_unimplemented
;
7502 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7506 goto cp0_unimplemented
;
7512 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7516 goto cp0_unimplemented
;
7522 check_insn(ctx
, ISA_MIPS3
);
7523 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7527 goto cp0_unimplemented
;
7531 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7532 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7535 gen_helper_mtc0_framemask(cpu_env
, arg
);
7539 goto cp0_unimplemented
;
7544 rn
= "Diagnostic"; /* implementation dependent */
7549 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7550 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7551 gen_save_pc(ctx
->base
.pc_next
+ 4);
7552 ctx
->base
.is_jmp
= DISAS_EXIT
;
7556 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7557 /* Stop translation as we may have switched the execution mode */
7558 ctx
->base
.is_jmp
= DISAS_STOP
;
7559 rn
= "TraceControl";
7560 goto cp0_unimplemented
;
7562 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7563 /* Stop translation as we may have switched the execution mode */
7564 ctx
->base
.is_jmp
= DISAS_STOP
;
7565 rn
= "TraceControl2";
7566 goto cp0_unimplemented
;
7568 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7569 /* Stop translation as we may have switched the execution mode */
7570 ctx
->base
.is_jmp
= DISAS_STOP
;
7571 rn
= "UserTraceData";
7572 goto cp0_unimplemented
;
7574 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7575 /* Stop translation as we may have switched the execution mode */
7576 ctx
->base
.is_jmp
= DISAS_STOP
;
7578 goto cp0_unimplemented
;
7580 goto cp0_unimplemented
;
7587 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7591 goto cp0_unimplemented
;
7597 gen_helper_mtc0_performance0(cpu_env
, arg
);
7598 rn
= "Performance0";
7601 // gen_helper_mtc0_performance1(cpu_env, arg);
7602 rn
= "Performance1";
7603 goto cp0_unimplemented
;
7605 // gen_helper_mtc0_performance2(cpu_env, arg);
7606 rn
= "Performance2";
7607 goto cp0_unimplemented
;
7609 // gen_helper_mtc0_performance3(cpu_env, arg);
7610 rn
= "Performance3";
7611 goto cp0_unimplemented
;
7613 // gen_helper_mtc0_performance4(cpu_env, arg);
7614 rn
= "Performance4";
7615 goto cp0_unimplemented
;
7617 // gen_helper_mtc0_performance5(cpu_env, arg);
7618 rn
= "Performance5";
7619 goto cp0_unimplemented
;
7621 // gen_helper_mtc0_performance6(cpu_env, arg);
7622 rn
= "Performance6";
7623 goto cp0_unimplemented
;
7625 // gen_helper_mtc0_performance7(cpu_env, arg);
7626 rn
= "Performance7";
7627 goto cp0_unimplemented
;
7629 goto cp0_unimplemented
;
7635 gen_helper_mtc0_errctl(cpu_env
, arg
);
7636 ctx
->base
.is_jmp
= DISAS_STOP
;
7640 goto cp0_unimplemented
;
7650 goto cp0_unimplemented
;
7659 gen_helper_mtc0_taglo(cpu_env
, arg
);
7666 gen_helper_mtc0_datalo(cpu_env
, arg
);
7670 goto cp0_unimplemented
;
7679 gen_helper_mtc0_taghi(cpu_env
, arg
);
7686 gen_helper_mtc0_datahi(cpu_env
, arg
);
7691 goto cp0_unimplemented
;
7697 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7701 goto cp0_unimplemented
;
7708 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7712 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7713 tcg_gen_st_tl(arg
, cpu_env
,
7714 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7718 goto cp0_unimplemented
;
7722 goto cp0_unimplemented
;
7724 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7726 /* For simplicity assume that all writes can cause interrupts. */
7727 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7729 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7730 * translated code to check for pending interrupts. */
7731 gen_save_pc(ctx
->base
.pc_next
+ 4);
7732 ctx
->base
.is_jmp
= DISAS_EXIT
;
7737 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7739 #endif /* TARGET_MIPS64 */
7741 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7742 int u
, int sel
, int h
)
7744 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7745 TCGv t0
= tcg_temp_local_new();
7747 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7748 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7749 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7750 tcg_gen_movi_tl(t0
, -1);
7751 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7752 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7753 tcg_gen_movi_tl(t0
, -1);
7759 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7762 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7772 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7775 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7778 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7781 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7784 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7787 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7790 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7793 gen_mfc0(ctx
, t0
, rt
, sel
);
7800 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7803 gen_mfc0(ctx
, t0
, rt
, sel
);
7809 gen_helper_mftc0_status(t0
, cpu_env
);
7812 gen_mfc0(ctx
, t0
, rt
, sel
);
7818 gen_helper_mftc0_cause(t0
, cpu_env
);
7828 gen_helper_mftc0_epc(t0
, cpu_env
);
7838 gen_helper_mftc0_ebase(t0
, cpu_env
);
7848 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7858 gen_helper_mftc0_debug(t0
, cpu_env
);
7861 gen_mfc0(ctx
, t0
, rt
, sel
);
7866 gen_mfc0(ctx
, t0
, rt
, sel
);
7868 } else switch (sel
) {
7869 /* GPR registers. */
7871 gen_helper_1e0i(mftgpr
, t0
, rt
);
7873 /* Auxiliary CPU registers */
7877 gen_helper_1e0i(mftlo
, t0
, 0);
7880 gen_helper_1e0i(mfthi
, t0
, 0);
7883 gen_helper_1e0i(mftacx
, t0
, 0);
7886 gen_helper_1e0i(mftlo
, t0
, 1);
7889 gen_helper_1e0i(mfthi
, t0
, 1);
7892 gen_helper_1e0i(mftacx
, t0
, 1);
7895 gen_helper_1e0i(mftlo
, t0
, 2);
7898 gen_helper_1e0i(mfthi
, t0
, 2);
7901 gen_helper_1e0i(mftacx
, t0
, 2);
7904 gen_helper_1e0i(mftlo
, t0
, 3);
7907 gen_helper_1e0i(mfthi
, t0
, 3);
7910 gen_helper_1e0i(mftacx
, t0
, 3);
7913 gen_helper_mftdsp(t0
, cpu_env
);
7919 /* Floating point (COP1). */
7921 /* XXX: For now we support only a single FPU context. */
7923 TCGv_i32 fp0
= tcg_temp_new_i32();
7925 gen_load_fpr32(ctx
, fp0
, rt
);
7926 tcg_gen_ext_i32_tl(t0
, fp0
);
7927 tcg_temp_free_i32(fp0
);
7929 TCGv_i32 fp0
= tcg_temp_new_i32();
7931 gen_load_fpr32h(ctx
, fp0
, rt
);
7932 tcg_gen_ext_i32_tl(t0
, fp0
);
7933 tcg_temp_free_i32(fp0
);
7937 /* XXX: For now we support only a single FPU context. */
7938 gen_helper_1e0i(cfc1
, t0
, rt
);
7940 /* COP2: Not implemented. */
7947 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7948 gen_store_gpr(t0
, rd
);
7954 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7955 generate_exception_end(ctx
, EXCP_RI
);
7958 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7959 int u
, int sel
, int h
)
7961 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7962 TCGv t0
= tcg_temp_local_new();
7964 gen_load_gpr(t0
, rt
);
7965 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7966 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7967 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7969 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7970 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7977 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7980 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7990 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7993 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7996 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7999 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8002 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8005 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8008 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8011 gen_mtc0(ctx
, t0
, rd
, sel
);
8018 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8021 gen_mtc0(ctx
, t0
, rd
, sel
);
8027 gen_helper_mttc0_status(cpu_env
, t0
);
8030 gen_mtc0(ctx
, t0
, rd
, sel
);
8036 gen_helper_mttc0_cause(cpu_env
, t0
);
8046 gen_helper_mttc0_ebase(cpu_env
, t0
);
8056 gen_helper_mttc0_debug(cpu_env
, t0
);
8059 gen_mtc0(ctx
, t0
, rd
, sel
);
8064 gen_mtc0(ctx
, t0
, rd
, sel
);
8066 } else switch (sel
) {
8067 /* GPR registers. */
8069 gen_helper_0e1i(mttgpr
, t0
, rd
);
8071 /* Auxiliary CPU registers */
8075 gen_helper_0e1i(mttlo
, t0
, 0);
8078 gen_helper_0e1i(mtthi
, t0
, 0);
8081 gen_helper_0e1i(mttacx
, t0
, 0);
8084 gen_helper_0e1i(mttlo
, t0
, 1);
8087 gen_helper_0e1i(mtthi
, t0
, 1);
8090 gen_helper_0e1i(mttacx
, t0
, 1);
8093 gen_helper_0e1i(mttlo
, t0
, 2);
8096 gen_helper_0e1i(mtthi
, t0
, 2);
8099 gen_helper_0e1i(mttacx
, t0
, 2);
8102 gen_helper_0e1i(mttlo
, t0
, 3);
8105 gen_helper_0e1i(mtthi
, t0
, 3);
8108 gen_helper_0e1i(mttacx
, t0
, 3);
8111 gen_helper_mttdsp(cpu_env
, t0
);
8117 /* Floating point (COP1). */
8119 /* XXX: For now we support only a single FPU context. */
8121 TCGv_i32 fp0
= tcg_temp_new_i32();
8123 tcg_gen_trunc_tl_i32(fp0
, t0
);
8124 gen_store_fpr32(ctx
, fp0
, rd
);
8125 tcg_temp_free_i32(fp0
);
8127 TCGv_i32 fp0
= tcg_temp_new_i32();
8129 tcg_gen_trunc_tl_i32(fp0
, t0
);
8130 gen_store_fpr32h(ctx
, fp0
, rd
);
8131 tcg_temp_free_i32(fp0
);
8135 /* XXX: For now we support only a single FPU context. */
8137 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8139 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8140 tcg_temp_free_i32(fs_tmp
);
8142 /* Stop translation as we may have changed hflags */
8143 ctx
->base
.is_jmp
= DISAS_STOP
;
8145 /* COP2: Not implemented. */
8152 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8158 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8159 generate_exception_end(ctx
, EXCP_RI
);
8162 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8164 const char *opn
= "ldst";
8166 check_cp0_enabled(ctx
);
8173 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8178 TCGv t0
= tcg_temp_new();
8180 gen_load_gpr(t0
, rt
);
8181 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8186 #if defined(TARGET_MIPS64)
8188 check_insn(ctx
, ISA_MIPS3
);
8193 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8197 check_insn(ctx
, ISA_MIPS3
);
8199 TCGv t0
= tcg_temp_new();
8201 gen_load_gpr(t0
, rt
);
8202 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8214 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8220 TCGv t0
= tcg_temp_new();
8221 gen_load_gpr(t0
, rt
);
8222 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8228 check_insn(ctx
, ASE_MT
);
8233 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8234 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8238 check_insn(ctx
, ASE_MT
);
8239 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8240 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8245 if (!env
->tlb
->helper_tlbwi
)
8247 gen_helper_tlbwi(cpu_env
);
8252 if (!env
->tlb
->helper_tlbinv
) {
8255 gen_helper_tlbinv(cpu_env
);
8256 } /* treat as nop if TLBINV not supported */
8261 if (!env
->tlb
->helper_tlbinvf
) {
8264 gen_helper_tlbinvf(cpu_env
);
8265 } /* treat as nop if TLBINV not supported */
8269 if (!env
->tlb
->helper_tlbwr
)
8271 gen_helper_tlbwr(cpu_env
);
8275 if (!env
->tlb
->helper_tlbp
)
8277 gen_helper_tlbp(cpu_env
);
8281 if (!env
->tlb
->helper_tlbr
)
8283 gen_helper_tlbr(cpu_env
);
8285 case OPC_ERET
: /* OPC_ERETNC */
8286 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8287 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8290 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8291 if (ctx
->opcode
& (1 << bit_shift
)) {
8294 check_insn(ctx
, ISA_MIPS32R5
);
8295 gen_helper_eretnc(cpu_env
);
8299 check_insn(ctx
, ISA_MIPS2
);
8300 gen_helper_eret(cpu_env
);
8302 ctx
->base
.is_jmp
= DISAS_EXIT
;
8307 check_insn(ctx
, ISA_MIPS32
);
8308 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8309 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8312 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8314 generate_exception_end(ctx
, EXCP_RI
);
8316 gen_helper_deret(cpu_env
);
8317 ctx
->base
.is_jmp
= DISAS_EXIT
;
8322 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8323 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8324 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8327 /* If we get an exception, we want to restart at next instruction */
8328 ctx
->base
.pc_next
+= 4;
8329 save_cpu_state(ctx
, 1);
8330 ctx
->base
.pc_next
-= 4;
8331 gen_helper_wait(cpu_env
);
8332 ctx
->base
.is_jmp
= DISAS_NORETURN
;
8337 generate_exception_end(ctx
, EXCP_RI
);
8340 (void)opn
; /* avoid a compiler warning */
8342 #endif /* !CONFIG_USER_ONLY */
8344 /* CP1 Branches (before delay slot) */
8345 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8346 int32_t cc
, int32_t offset
)
8348 target_ulong btarget
;
8349 TCGv_i32 t0
= tcg_temp_new_i32();
8351 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8352 generate_exception_end(ctx
, EXCP_RI
);
8357 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8359 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
8363 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8364 tcg_gen_not_i32(t0
, t0
);
8365 tcg_gen_andi_i32(t0
, t0
, 1);
8366 tcg_gen_extu_i32_tl(bcond
, t0
);
8369 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8370 tcg_gen_not_i32(t0
, t0
);
8371 tcg_gen_andi_i32(t0
, t0
, 1);
8372 tcg_gen_extu_i32_tl(bcond
, t0
);
8375 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8376 tcg_gen_andi_i32(t0
, t0
, 1);
8377 tcg_gen_extu_i32_tl(bcond
, t0
);
8380 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8381 tcg_gen_andi_i32(t0
, t0
, 1);
8382 tcg_gen_extu_i32_tl(bcond
, t0
);
8384 ctx
->hflags
|= MIPS_HFLAG_BL
;
8388 TCGv_i32 t1
= tcg_temp_new_i32();
8389 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8390 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8391 tcg_gen_nand_i32(t0
, t0
, t1
);
8392 tcg_temp_free_i32(t1
);
8393 tcg_gen_andi_i32(t0
, t0
, 1);
8394 tcg_gen_extu_i32_tl(bcond
, t0
);
8399 TCGv_i32 t1
= tcg_temp_new_i32();
8400 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8401 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8402 tcg_gen_or_i32(t0
, t0
, t1
);
8403 tcg_temp_free_i32(t1
);
8404 tcg_gen_andi_i32(t0
, t0
, 1);
8405 tcg_gen_extu_i32_tl(bcond
, t0
);
8410 TCGv_i32 t1
= tcg_temp_new_i32();
8411 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8412 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8413 tcg_gen_and_i32(t0
, t0
, t1
);
8414 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8415 tcg_gen_and_i32(t0
, t0
, t1
);
8416 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8417 tcg_gen_nand_i32(t0
, t0
, t1
);
8418 tcg_temp_free_i32(t1
);
8419 tcg_gen_andi_i32(t0
, t0
, 1);
8420 tcg_gen_extu_i32_tl(bcond
, t0
);
8425 TCGv_i32 t1
= tcg_temp_new_i32();
8426 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8427 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8428 tcg_gen_or_i32(t0
, t0
, t1
);
8429 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8430 tcg_gen_or_i32(t0
, t0
, t1
);
8431 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8432 tcg_gen_or_i32(t0
, t0
, t1
);
8433 tcg_temp_free_i32(t1
);
8434 tcg_gen_andi_i32(t0
, t0
, 1);
8435 tcg_gen_extu_i32_tl(bcond
, t0
);
8438 ctx
->hflags
|= MIPS_HFLAG_BC
;
8441 MIPS_INVAL("cp1 cond branch");
8442 generate_exception_end(ctx
, EXCP_RI
);
8445 ctx
->btarget
= btarget
;
8446 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8448 tcg_temp_free_i32(t0
);
8451 /* R6 CP1 Branches */
8452 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8453 int32_t ft
, int32_t offset
,
8456 target_ulong btarget
;
8457 TCGv_i64 t0
= tcg_temp_new_i64();
8459 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8460 #ifdef MIPS_DEBUG_DISAS
8461 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8462 "\n", ctx
->base
.pc_next
);
8464 generate_exception_end(ctx
, EXCP_RI
);
8468 gen_load_fpr64(ctx
, t0
, ft
);
8469 tcg_gen_andi_i64(t0
, t0
, 1);
8471 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
8475 tcg_gen_xori_i64(t0
, t0
, 1);
8476 ctx
->hflags
|= MIPS_HFLAG_BC
;
8479 /* t0 already set */
8480 ctx
->hflags
|= MIPS_HFLAG_BC
;
8483 MIPS_INVAL("cp1 cond branch");
8484 generate_exception_end(ctx
, EXCP_RI
);
8488 tcg_gen_trunc_i64_tl(bcond
, t0
);
8490 ctx
->btarget
= btarget
;
8492 switch (delayslot_size
) {
8494 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8497 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8502 tcg_temp_free_i64(t0
);
8505 /* Coprocessor 1 (FPU) */
8507 #define FOP(func, fmt) (((fmt) << 21) | (func))
8510 OPC_ADD_S
= FOP(0, FMT_S
),
8511 OPC_SUB_S
= FOP(1, FMT_S
),
8512 OPC_MUL_S
= FOP(2, FMT_S
),
8513 OPC_DIV_S
= FOP(3, FMT_S
),
8514 OPC_SQRT_S
= FOP(4, FMT_S
),
8515 OPC_ABS_S
= FOP(5, FMT_S
),
8516 OPC_MOV_S
= FOP(6, FMT_S
),
8517 OPC_NEG_S
= FOP(7, FMT_S
),
8518 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8519 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8520 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8521 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8522 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8523 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8524 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8525 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8526 OPC_SEL_S
= FOP(16, FMT_S
),
8527 OPC_MOVCF_S
= FOP(17, FMT_S
),
8528 OPC_MOVZ_S
= FOP(18, FMT_S
),
8529 OPC_MOVN_S
= FOP(19, FMT_S
),
8530 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8531 OPC_RECIP_S
= FOP(21, FMT_S
),
8532 OPC_RSQRT_S
= FOP(22, FMT_S
),
8533 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8534 OPC_MADDF_S
= FOP(24, FMT_S
),
8535 OPC_MSUBF_S
= FOP(25, FMT_S
),
8536 OPC_RINT_S
= FOP(26, FMT_S
),
8537 OPC_CLASS_S
= FOP(27, FMT_S
),
8538 OPC_MIN_S
= FOP(28, FMT_S
),
8539 OPC_RECIP2_S
= FOP(28, FMT_S
),
8540 OPC_MINA_S
= FOP(29, FMT_S
),
8541 OPC_RECIP1_S
= FOP(29, FMT_S
),
8542 OPC_MAX_S
= FOP(30, FMT_S
),
8543 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8544 OPC_MAXA_S
= FOP(31, FMT_S
),
8545 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8546 OPC_CVT_D_S
= FOP(33, FMT_S
),
8547 OPC_CVT_W_S
= FOP(36, FMT_S
),
8548 OPC_CVT_L_S
= FOP(37, FMT_S
),
8549 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8550 OPC_CMP_F_S
= FOP (48, FMT_S
),
8551 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8552 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8553 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8554 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8555 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8556 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8557 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8558 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8559 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8560 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8561 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8562 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8563 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8564 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8565 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8567 OPC_ADD_D
= FOP(0, FMT_D
),
8568 OPC_SUB_D
= FOP(1, FMT_D
),
8569 OPC_MUL_D
= FOP(2, FMT_D
),
8570 OPC_DIV_D
= FOP(3, FMT_D
),
8571 OPC_SQRT_D
= FOP(4, FMT_D
),
8572 OPC_ABS_D
= FOP(5, FMT_D
),
8573 OPC_MOV_D
= FOP(6, FMT_D
),
8574 OPC_NEG_D
= FOP(7, FMT_D
),
8575 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8576 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8577 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8578 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8579 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8580 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8581 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8582 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8583 OPC_SEL_D
= FOP(16, FMT_D
),
8584 OPC_MOVCF_D
= FOP(17, FMT_D
),
8585 OPC_MOVZ_D
= FOP(18, FMT_D
),
8586 OPC_MOVN_D
= FOP(19, FMT_D
),
8587 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8588 OPC_RECIP_D
= FOP(21, FMT_D
),
8589 OPC_RSQRT_D
= FOP(22, FMT_D
),
8590 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8591 OPC_MADDF_D
= FOP(24, FMT_D
),
8592 OPC_MSUBF_D
= FOP(25, FMT_D
),
8593 OPC_RINT_D
= FOP(26, FMT_D
),
8594 OPC_CLASS_D
= FOP(27, FMT_D
),
8595 OPC_MIN_D
= FOP(28, FMT_D
),
8596 OPC_RECIP2_D
= FOP(28, FMT_D
),
8597 OPC_MINA_D
= FOP(29, FMT_D
),
8598 OPC_RECIP1_D
= FOP(29, FMT_D
),
8599 OPC_MAX_D
= FOP(30, FMT_D
),
8600 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8601 OPC_MAXA_D
= FOP(31, FMT_D
),
8602 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8603 OPC_CVT_S_D
= FOP(32, FMT_D
),
8604 OPC_CVT_W_D
= FOP(36, FMT_D
),
8605 OPC_CVT_L_D
= FOP(37, FMT_D
),
8606 OPC_CMP_F_D
= FOP (48, FMT_D
),
8607 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8608 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8609 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8610 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8611 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8612 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8613 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8614 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8615 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8616 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8617 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8618 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8619 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8620 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8621 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8623 OPC_CVT_S_W
= FOP(32, FMT_W
),
8624 OPC_CVT_D_W
= FOP(33, FMT_W
),
8625 OPC_CVT_S_L
= FOP(32, FMT_L
),
8626 OPC_CVT_D_L
= FOP(33, FMT_L
),
8627 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8629 OPC_ADD_PS
= FOP(0, FMT_PS
),
8630 OPC_SUB_PS
= FOP(1, FMT_PS
),
8631 OPC_MUL_PS
= FOP(2, FMT_PS
),
8632 OPC_DIV_PS
= FOP(3, FMT_PS
),
8633 OPC_ABS_PS
= FOP(5, FMT_PS
),
8634 OPC_MOV_PS
= FOP(6, FMT_PS
),
8635 OPC_NEG_PS
= FOP(7, FMT_PS
),
8636 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8637 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8638 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8639 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8640 OPC_MULR_PS
= FOP(26, FMT_PS
),
8641 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8642 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8643 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8644 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8646 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8647 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8648 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8649 OPC_PLL_PS
= FOP(44, FMT_PS
),
8650 OPC_PLU_PS
= FOP(45, FMT_PS
),
8651 OPC_PUL_PS
= FOP(46, FMT_PS
),
8652 OPC_PUU_PS
= FOP(47, FMT_PS
),
8653 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8654 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8655 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8656 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8657 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8658 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8659 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8660 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8661 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8662 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8663 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8664 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8665 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8666 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8667 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8668 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8672 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8673 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8674 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8675 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8676 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8677 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8678 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8679 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8680 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8681 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8682 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8683 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8684 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8685 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8686 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8687 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8688 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8689 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8690 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8691 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8692 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8693 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8695 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8696 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8697 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8698 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8699 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8700 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8701 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8702 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8703 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8704 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8705 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8706 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8707 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8708 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8709 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8710 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8711 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8712 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8713 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8714 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8715 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8716 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8718 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8720 TCGv t0
= tcg_temp_new();
8725 TCGv_i32 fp0
= tcg_temp_new_i32();
8727 gen_load_fpr32(ctx
, fp0
, fs
);
8728 tcg_gen_ext_i32_tl(t0
, fp0
);
8729 tcg_temp_free_i32(fp0
);
8731 gen_store_gpr(t0
, rt
);
8734 gen_load_gpr(t0
, rt
);
8736 TCGv_i32 fp0
= tcg_temp_new_i32();
8738 tcg_gen_trunc_tl_i32(fp0
, t0
);
8739 gen_store_fpr32(ctx
, fp0
, fs
);
8740 tcg_temp_free_i32(fp0
);
8744 gen_helper_1e0i(cfc1
, t0
, fs
);
8745 gen_store_gpr(t0
, rt
);
8748 gen_load_gpr(t0
, rt
);
8749 save_cpu_state(ctx
, 0);
8751 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8753 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8754 tcg_temp_free_i32(fs_tmp
);
8756 /* Stop translation as we may have changed hflags */
8757 ctx
->base
.is_jmp
= DISAS_STOP
;
8759 #if defined(TARGET_MIPS64)
8761 gen_load_fpr64(ctx
, t0
, fs
);
8762 gen_store_gpr(t0
, rt
);
8765 gen_load_gpr(t0
, rt
);
8766 gen_store_fpr64(ctx
, t0
, fs
);
8771 TCGv_i32 fp0
= tcg_temp_new_i32();
8773 gen_load_fpr32h(ctx
, fp0
, fs
);
8774 tcg_gen_ext_i32_tl(t0
, fp0
);
8775 tcg_temp_free_i32(fp0
);
8777 gen_store_gpr(t0
, rt
);
8780 gen_load_gpr(t0
, rt
);
8782 TCGv_i32 fp0
= tcg_temp_new_i32();
8784 tcg_gen_trunc_tl_i32(fp0
, t0
);
8785 gen_store_fpr32h(ctx
, fp0
, fs
);
8786 tcg_temp_free_i32(fp0
);
8790 MIPS_INVAL("cp1 move");
8791 generate_exception_end(ctx
, EXCP_RI
);
8799 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8815 l1
= gen_new_label();
8816 t0
= tcg_temp_new_i32();
8817 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8818 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8819 tcg_temp_free_i32(t0
);
8821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8823 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8828 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8832 TCGv_i32 t0
= tcg_temp_new_i32();
8833 TCGLabel
*l1
= gen_new_label();
8840 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8841 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8842 gen_load_fpr32(ctx
, t0
, fs
);
8843 gen_store_fpr32(ctx
, t0
, fd
);
8845 tcg_temp_free_i32(t0
);
8848 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8851 TCGv_i32 t0
= tcg_temp_new_i32();
8853 TCGLabel
*l1
= gen_new_label();
8860 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8861 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8862 tcg_temp_free_i32(t0
);
8863 fp0
= tcg_temp_new_i64();
8864 gen_load_fpr64(ctx
, fp0
, fs
);
8865 gen_store_fpr64(ctx
, fp0
, fd
);
8866 tcg_temp_free_i64(fp0
);
8870 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8874 TCGv_i32 t0
= tcg_temp_new_i32();
8875 TCGLabel
*l1
= gen_new_label();
8876 TCGLabel
*l2
= gen_new_label();
8883 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8884 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8885 gen_load_fpr32(ctx
, t0
, fs
);
8886 gen_store_fpr32(ctx
, t0
, fd
);
8889 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8890 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8891 gen_load_fpr32h(ctx
, t0
, fs
);
8892 gen_store_fpr32h(ctx
, t0
, fd
);
8893 tcg_temp_free_i32(t0
);
8897 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8900 TCGv_i32 t1
= tcg_const_i32(0);
8901 TCGv_i32 fp0
= tcg_temp_new_i32();
8902 TCGv_i32 fp1
= tcg_temp_new_i32();
8903 TCGv_i32 fp2
= tcg_temp_new_i32();
8904 gen_load_fpr32(ctx
, fp0
, fd
);
8905 gen_load_fpr32(ctx
, fp1
, ft
);
8906 gen_load_fpr32(ctx
, fp2
, fs
);
8910 tcg_gen_andi_i32(fp0
, fp0
, 1);
8911 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8914 tcg_gen_andi_i32(fp1
, fp1
, 1);
8915 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8918 tcg_gen_andi_i32(fp1
, fp1
, 1);
8919 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8922 MIPS_INVAL("gen_sel_s");
8923 generate_exception_end(ctx
, EXCP_RI
);
8927 gen_store_fpr32(ctx
, fp0
, fd
);
8928 tcg_temp_free_i32(fp2
);
8929 tcg_temp_free_i32(fp1
);
8930 tcg_temp_free_i32(fp0
);
8931 tcg_temp_free_i32(t1
);
8934 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8937 TCGv_i64 t1
= tcg_const_i64(0);
8938 TCGv_i64 fp0
= tcg_temp_new_i64();
8939 TCGv_i64 fp1
= tcg_temp_new_i64();
8940 TCGv_i64 fp2
= tcg_temp_new_i64();
8941 gen_load_fpr64(ctx
, fp0
, fd
);
8942 gen_load_fpr64(ctx
, fp1
, ft
);
8943 gen_load_fpr64(ctx
, fp2
, fs
);
8947 tcg_gen_andi_i64(fp0
, fp0
, 1);
8948 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8951 tcg_gen_andi_i64(fp1
, fp1
, 1);
8952 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8955 tcg_gen_andi_i64(fp1
, fp1
, 1);
8956 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8959 MIPS_INVAL("gen_sel_d");
8960 generate_exception_end(ctx
, EXCP_RI
);
8964 gen_store_fpr64(ctx
, fp0
, fd
);
8965 tcg_temp_free_i64(fp2
);
8966 tcg_temp_free_i64(fp1
);
8967 tcg_temp_free_i64(fp0
);
8968 tcg_temp_free_i64(t1
);
8971 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8972 int ft
, int fs
, int fd
, int cc
)
8974 uint32_t func
= ctx
->opcode
& 0x3f;
8978 TCGv_i32 fp0
= tcg_temp_new_i32();
8979 TCGv_i32 fp1
= tcg_temp_new_i32();
8981 gen_load_fpr32(ctx
, fp0
, fs
);
8982 gen_load_fpr32(ctx
, fp1
, ft
);
8983 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8984 tcg_temp_free_i32(fp1
);
8985 gen_store_fpr32(ctx
, fp0
, fd
);
8986 tcg_temp_free_i32(fp0
);
8991 TCGv_i32 fp0
= tcg_temp_new_i32();
8992 TCGv_i32 fp1
= tcg_temp_new_i32();
8994 gen_load_fpr32(ctx
, fp0
, fs
);
8995 gen_load_fpr32(ctx
, fp1
, ft
);
8996 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8997 tcg_temp_free_i32(fp1
);
8998 gen_store_fpr32(ctx
, fp0
, fd
);
8999 tcg_temp_free_i32(fp0
);
9004 TCGv_i32 fp0
= tcg_temp_new_i32();
9005 TCGv_i32 fp1
= tcg_temp_new_i32();
9007 gen_load_fpr32(ctx
, fp0
, fs
);
9008 gen_load_fpr32(ctx
, fp1
, ft
);
9009 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9010 tcg_temp_free_i32(fp1
);
9011 gen_store_fpr32(ctx
, fp0
, fd
);
9012 tcg_temp_free_i32(fp0
);
9017 TCGv_i32 fp0
= tcg_temp_new_i32();
9018 TCGv_i32 fp1
= tcg_temp_new_i32();
9020 gen_load_fpr32(ctx
, fp0
, fs
);
9021 gen_load_fpr32(ctx
, fp1
, ft
);
9022 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9023 tcg_temp_free_i32(fp1
);
9024 gen_store_fpr32(ctx
, fp0
, fd
);
9025 tcg_temp_free_i32(fp0
);
9030 TCGv_i32 fp0
= tcg_temp_new_i32();
9032 gen_load_fpr32(ctx
, fp0
, fs
);
9033 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9034 gen_store_fpr32(ctx
, fp0
, fd
);
9035 tcg_temp_free_i32(fp0
);
9040 TCGv_i32 fp0
= tcg_temp_new_i32();
9042 gen_load_fpr32(ctx
, fp0
, fs
);
9044 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9046 gen_helper_float_abs_s(fp0
, fp0
);
9048 gen_store_fpr32(ctx
, fp0
, fd
);
9049 tcg_temp_free_i32(fp0
);
9054 TCGv_i32 fp0
= tcg_temp_new_i32();
9056 gen_load_fpr32(ctx
, fp0
, fs
);
9057 gen_store_fpr32(ctx
, fp0
, fd
);
9058 tcg_temp_free_i32(fp0
);
9063 TCGv_i32 fp0
= tcg_temp_new_i32();
9065 gen_load_fpr32(ctx
, fp0
, fs
);
9067 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9069 gen_helper_float_chs_s(fp0
, fp0
);
9071 gen_store_fpr32(ctx
, fp0
, fd
);
9072 tcg_temp_free_i32(fp0
);
9076 check_cp1_64bitmode(ctx
);
9078 TCGv_i32 fp32
= tcg_temp_new_i32();
9079 TCGv_i64 fp64
= tcg_temp_new_i64();
9081 gen_load_fpr32(ctx
, fp32
, fs
);
9083 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9085 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9087 tcg_temp_free_i32(fp32
);
9088 gen_store_fpr64(ctx
, fp64
, fd
);
9089 tcg_temp_free_i64(fp64
);
9093 check_cp1_64bitmode(ctx
);
9095 TCGv_i32 fp32
= tcg_temp_new_i32();
9096 TCGv_i64 fp64
= tcg_temp_new_i64();
9098 gen_load_fpr32(ctx
, fp32
, fs
);
9100 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9102 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9104 tcg_temp_free_i32(fp32
);
9105 gen_store_fpr64(ctx
, fp64
, fd
);
9106 tcg_temp_free_i64(fp64
);
9110 check_cp1_64bitmode(ctx
);
9112 TCGv_i32 fp32
= tcg_temp_new_i32();
9113 TCGv_i64 fp64
= tcg_temp_new_i64();
9115 gen_load_fpr32(ctx
, fp32
, fs
);
9117 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9119 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9121 tcg_temp_free_i32(fp32
);
9122 gen_store_fpr64(ctx
, fp64
, fd
);
9123 tcg_temp_free_i64(fp64
);
9127 check_cp1_64bitmode(ctx
);
9129 TCGv_i32 fp32
= tcg_temp_new_i32();
9130 TCGv_i64 fp64
= tcg_temp_new_i64();
9132 gen_load_fpr32(ctx
, fp32
, fs
);
9134 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9136 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9138 tcg_temp_free_i32(fp32
);
9139 gen_store_fpr64(ctx
, fp64
, fd
);
9140 tcg_temp_free_i64(fp64
);
9145 TCGv_i32 fp0
= tcg_temp_new_i32();
9147 gen_load_fpr32(ctx
, fp0
, fs
);
9149 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9151 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9153 gen_store_fpr32(ctx
, fp0
, fd
);
9154 tcg_temp_free_i32(fp0
);
9159 TCGv_i32 fp0
= tcg_temp_new_i32();
9161 gen_load_fpr32(ctx
, fp0
, fs
);
9163 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9165 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9167 gen_store_fpr32(ctx
, fp0
, fd
);
9168 tcg_temp_free_i32(fp0
);
9173 TCGv_i32 fp0
= tcg_temp_new_i32();
9175 gen_load_fpr32(ctx
, fp0
, fs
);
9177 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9179 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9181 gen_store_fpr32(ctx
, fp0
, fd
);
9182 tcg_temp_free_i32(fp0
);
9187 TCGv_i32 fp0
= tcg_temp_new_i32();
9189 gen_load_fpr32(ctx
, fp0
, fs
);
9191 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9193 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9195 gen_store_fpr32(ctx
, fp0
, fd
);
9196 tcg_temp_free_i32(fp0
);
9200 check_insn(ctx
, ISA_MIPS32R6
);
9201 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9204 check_insn(ctx
, ISA_MIPS32R6
);
9205 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9208 check_insn(ctx
, ISA_MIPS32R6
);
9209 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9213 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9216 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9218 TCGLabel
*l1
= gen_new_label();
9222 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9224 fp0
= tcg_temp_new_i32();
9225 gen_load_fpr32(ctx
, fp0
, fs
);
9226 gen_store_fpr32(ctx
, fp0
, fd
);
9227 tcg_temp_free_i32(fp0
);
9232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9234 TCGLabel
*l1
= gen_new_label();
9238 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9239 fp0
= tcg_temp_new_i32();
9240 gen_load_fpr32(ctx
, fp0
, fs
);
9241 gen_store_fpr32(ctx
, fp0
, fd
);
9242 tcg_temp_free_i32(fp0
);
9249 TCGv_i32 fp0
= tcg_temp_new_i32();
9251 gen_load_fpr32(ctx
, fp0
, fs
);
9252 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9253 gen_store_fpr32(ctx
, fp0
, fd
);
9254 tcg_temp_free_i32(fp0
);
9259 TCGv_i32 fp0
= tcg_temp_new_i32();
9261 gen_load_fpr32(ctx
, fp0
, fs
);
9262 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9263 gen_store_fpr32(ctx
, fp0
, fd
);
9264 tcg_temp_free_i32(fp0
);
9268 check_insn(ctx
, ISA_MIPS32R6
);
9270 TCGv_i32 fp0
= tcg_temp_new_i32();
9271 TCGv_i32 fp1
= tcg_temp_new_i32();
9272 TCGv_i32 fp2
= tcg_temp_new_i32();
9273 gen_load_fpr32(ctx
, fp0
, fs
);
9274 gen_load_fpr32(ctx
, fp1
, ft
);
9275 gen_load_fpr32(ctx
, fp2
, fd
);
9276 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9277 gen_store_fpr32(ctx
, fp2
, fd
);
9278 tcg_temp_free_i32(fp2
);
9279 tcg_temp_free_i32(fp1
);
9280 tcg_temp_free_i32(fp0
);
9284 check_insn(ctx
, ISA_MIPS32R6
);
9286 TCGv_i32 fp0
= tcg_temp_new_i32();
9287 TCGv_i32 fp1
= tcg_temp_new_i32();
9288 TCGv_i32 fp2
= tcg_temp_new_i32();
9289 gen_load_fpr32(ctx
, fp0
, fs
);
9290 gen_load_fpr32(ctx
, fp1
, ft
);
9291 gen_load_fpr32(ctx
, fp2
, fd
);
9292 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9293 gen_store_fpr32(ctx
, fp2
, fd
);
9294 tcg_temp_free_i32(fp2
);
9295 tcg_temp_free_i32(fp1
);
9296 tcg_temp_free_i32(fp0
);
9300 check_insn(ctx
, ISA_MIPS32R6
);
9302 TCGv_i32 fp0
= tcg_temp_new_i32();
9303 gen_load_fpr32(ctx
, fp0
, fs
);
9304 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9305 gen_store_fpr32(ctx
, fp0
, fd
);
9306 tcg_temp_free_i32(fp0
);
9310 check_insn(ctx
, ISA_MIPS32R6
);
9312 TCGv_i32 fp0
= tcg_temp_new_i32();
9313 gen_load_fpr32(ctx
, fp0
, fs
);
9314 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9315 gen_store_fpr32(ctx
, fp0
, fd
);
9316 tcg_temp_free_i32(fp0
);
9319 case OPC_MIN_S
: /* OPC_RECIP2_S */
9320 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9322 TCGv_i32 fp0
= tcg_temp_new_i32();
9323 TCGv_i32 fp1
= tcg_temp_new_i32();
9324 TCGv_i32 fp2
= tcg_temp_new_i32();
9325 gen_load_fpr32(ctx
, fp0
, fs
);
9326 gen_load_fpr32(ctx
, fp1
, ft
);
9327 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9328 gen_store_fpr32(ctx
, fp2
, fd
);
9329 tcg_temp_free_i32(fp2
);
9330 tcg_temp_free_i32(fp1
);
9331 tcg_temp_free_i32(fp0
);
9334 check_cp1_64bitmode(ctx
);
9336 TCGv_i32 fp0
= tcg_temp_new_i32();
9337 TCGv_i32 fp1
= tcg_temp_new_i32();
9339 gen_load_fpr32(ctx
, fp0
, fs
);
9340 gen_load_fpr32(ctx
, fp1
, ft
);
9341 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9342 tcg_temp_free_i32(fp1
);
9343 gen_store_fpr32(ctx
, fp0
, fd
);
9344 tcg_temp_free_i32(fp0
);
9348 case OPC_MINA_S
: /* OPC_RECIP1_S */
9349 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9351 TCGv_i32 fp0
= tcg_temp_new_i32();
9352 TCGv_i32 fp1
= tcg_temp_new_i32();
9353 TCGv_i32 fp2
= tcg_temp_new_i32();
9354 gen_load_fpr32(ctx
, fp0
, fs
);
9355 gen_load_fpr32(ctx
, fp1
, ft
);
9356 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9357 gen_store_fpr32(ctx
, fp2
, fd
);
9358 tcg_temp_free_i32(fp2
);
9359 tcg_temp_free_i32(fp1
);
9360 tcg_temp_free_i32(fp0
);
9363 check_cp1_64bitmode(ctx
);
9365 TCGv_i32 fp0
= tcg_temp_new_i32();
9367 gen_load_fpr32(ctx
, fp0
, fs
);
9368 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9369 gen_store_fpr32(ctx
, fp0
, fd
);
9370 tcg_temp_free_i32(fp0
);
9374 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9375 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9377 TCGv_i32 fp0
= tcg_temp_new_i32();
9378 TCGv_i32 fp1
= tcg_temp_new_i32();
9379 gen_load_fpr32(ctx
, fp0
, fs
);
9380 gen_load_fpr32(ctx
, fp1
, ft
);
9381 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9382 gen_store_fpr32(ctx
, fp1
, fd
);
9383 tcg_temp_free_i32(fp1
);
9384 tcg_temp_free_i32(fp0
);
9387 check_cp1_64bitmode(ctx
);
9389 TCGv_i32 fp0
= tcg_temp_new_i32();
9391 gen_load_fpr32(ctx
, fp0
, fs
);
9392 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9393 gen_store_fpr32(ctx
, fp0
, fd
);
9394 tcg_temp_free_i32(fp0
);
9398 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9399 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9401 TCGv_i32 fp0
= tcg_temp_new_i32();
9402 TCGv_i32 fp1
= tcg_temp_new_i32();
9403 gen_load_fpr32(ctx
, fp0
, fs
);
9404 gen_load_fpr32(ctx
, fp1
, ft
);
9405 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9406 gen_store_fpr32(ctx
, fp1
, fd
);
9407 tcg_temp_free_i32(fp1
);
9408 tcg_temp_free_i32(fp0
);
9411 check_cp1_64bitmode(ctx
);
9413 TCGv_i32 fp0
= tcg_temp_new_i32();
9414 TCGv_i32 fp1
= tcg_temp_new_i32();
9416 gen_load_fpr32(ctx
, fp0
, fs
);
9417 gen_load_fpr32(ctx
, fp1
, ft
);
9418 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9419 tcg_temp_free_i32(fp1
);
9420 gen_store_fpr32(ctx
, fp0
, fd
);
9421 tcg_temp_free_i32(fp0
);
9426 check_cp1_registers(ctx
, fd
);
9428 TCGv_i32 fp32
= tcg_temp_new_i32();
9429 TCGv_i64 fp64
= tcg_temp_new_i64();
9431 gen_load_fpr32(ctx
, fp32
, fs
);
9432 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9433 tcg_temp_free_i32(fp32
);
9434 gen_store_fpr64(ctx
, fp64
, fd
);
9435 tcg_temp_free_i64(fp64
);
9440 TCGv_i32 fp0
= tcg_temp_new_i32();
9442 gen_load_fpr32(ctx
, fp0
, fs
);
9444 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9446 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9448 gen_store_fpr32(ctx
, fp0
, fd
);
9449 tcg_temp_free_i32(fp0
);
9453 check_cp1_64bitmode(ctx
);
9455 TCGv_i32 fp32
= tcg_temp_new_i32();
9456 TCGv_i64 fp64
= tcg_temp_new_i64();
9458 gen_load_fpr32(ctx
, fp32
, fs
);
9460 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9462 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9464 tcg_temp_free_i32(fp32
);
9465 gen_store_fpr64(ctx
, fp64
, fd
);
9466 tcg_temp_free_i64(fp64
);
9472 TCGv_i64 fp64
= tcg_temp_new_i64();
9473 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9474 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9476 gen_load_fpr32(ctx
, fp32_0
, fs
);
9477 gen_load_fpr32(ctx
, fp32_1
, ft
);
9478 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9479 tcg_temp_free_i32(fp32_1
);
9480 tcg_temp_free_i32(fp32_0
);
9481 gen_store_fpr64(ctx
, fp64
, fd
);
9482 tcg_temp_free_i64(fp64
);
9494 case OPC_CMP_NGLE_S
:
9501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9502 if (ctx
->opcode
& (1 << 6)) {
9503 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9505 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9509 check_cp1_registers(ctx
, fs
| ft
| fd
);
9511 TCGv_i64 fp0
= tcg_temp_new_i64();
9512 TCGv_i64 fp1
= tcg_temp_new_i64();
9514 gen_load_fpr64(ctx
, fp0
, fs
);
9515 gen_load_fpr64(ctx
, fp1
, ft
);
9516 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9517 tcg_temp_free_i64(fp1
);
9518 gen_store_fpr64(ctx
, fp0
, fd
);
9519 tcg_temp_free_i64(fp0
);
9523 check_cp1_registers(ctx
, fs
| ft
| fd
);
9525 TCGv_i64 fp0
= tcg_temp_new_i64();
9526 TCGv_i64 fp1
= tcg_temp_new_i64();
9528 gen_load_fpr64(ctx
, fp0
, fs
);
9529 gen_load_fpr64(ctx
, fp1
, ft
);
9530 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9531 tcg_temp_free_i64(fp1
);
9532 gen_store_fpr64(ctx
, fp0
, fd
);
9533 tcg_temp_free_i64(fp0
);
9537 check_cp1_registers(ctx
, fs
| ft
| fd
);
9539 TCGv_i64 fp0
= tcg_temp_new_i64();
9540 TCGv_i64 fp1
= tcg_temp_new_i64();
9542 gen_load_fpr64(ctx
, fp0
, fs
);
9543 gen_load_fpr64(ctx
, fp1
, ft
);
9544 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9545 tcg_temp_free_i64(fp1
);
9546 gen_store_fpr64(ctx
, fp0
, fd
);
9547 tcg_temp_free_i64(fp0
);
9551 check_cp1_registers(ctx
, fs
| ft
| fd
);
9553 TCGv_i64 fp0
= tcg_temp_new_i64();
9554 TCGv_i64 fp1
= tcg_temp_new_i64();
9556 gen_load_fpr64(ctx
, fp0
, fs
);
9557 gen_load_fpr64(ctx
, fp1
, ft
);
9558 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9559 tcg_temp_free_i64(fp1
);
9560 gen_store_fpr64(ctx
, fp0
, fd
);
9561 tcg_temp_free_i64(fp0
);
9565 check_cp1_registers(ctx
, fs
| fd
);
9567 TCGv_i64 fp0
= tcg_temp_new_i64();
9569 gen_load_fpr64(ctx
, fp0
, fs
);
9570 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9571 gen_store_fpr64(ctx
, fp0
, fd
);
9572 tcg_temp_free_i64(fp0
);
9576 check_cp1_registers(ctx
, fs
| fd
);
9578 TCGv_i64 fp0
= tcg_temp_new_i64();
9580 gen_load_fpr64(ctx
, fp0
, fs
);
9582 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9584 gen_helper_float_abs_d(fp0
, fp0
);
9586 gen_store_fpr64(ctx
, fp0
, fd
);
9587 tcg_temp_free_i64(fp0
);
9591 check_cp1_registers(ctx
, fs
| fd
);
9593 TCGv_i64 fp0
= tcg_temp_new_i64();
9595 gen_load_fpr64(ctx
, fp0
, fs
);
9596 gen_store_fpr64(ctx
, fp0
, fd
);
9597 tcg_temp_free_i64(fp0
);
9601 check_cp1_registers(ctx
, fs
| fd
);
9603 TCGv_i64 fp0
= tcg_temp_new_i64();
9605 gen_load_fpr64(ctx
, fp0
, fs
);
9607 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9609 gen_helper_float_chs_d(fp0
, fp0
);
9611 gen_store_fpr64(ctx
, fp0
, fd
);
9612 tcg_temp_free_i64(fp0
);
9616 check_cp1_64bitmode(ctx
);
9618 TCGv_i64 fp0
= tcg_temp_new_i64();
9620 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9624 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9626 gen_store_fpr64(ctx
, fp0
, fd
);
9627 tcg_temp_free_i64(fp0
);
9631 check_cp1_64bitmode(ctx
);
9633 TCGv_i64 fp0
= tcg_temp_new_i64();
9635 gen_load_fpr64(ctx
, fp0
, fs
);
9637 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9639 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9641 gen_store_fpr64(ctx
, fp0
, fd
);
9642 tcg_temp_free_i64(fp0
);
9646 check_cp1_64bitmode(ctx
);
9648 TCGv_i64 fp0
= tcg_temp_new_i64();
9650 gen_load_fpr64(ctx
, fp0
, fs
);
9652 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9654 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9656 gen_store_fpr64(ctx
, fp0
, fd
);
9657 tcg_temp_free_i64(fp0
);
9661 check_cp1_64bitmode(ctx
);
9663 TCGv_i64 fp0
= tcg_temp_new_i64();
9665 gen_load_fpr64(ctx
, fp0
, fs
);
9667 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9669 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9671 gen_store_fpr64(ctx
, fp0
, fd
);
9672 tcg_temp_free_i64(fp0
);
9676 check_cp1_registers(ctx
, fs
);
9678 TCGv_i32 fp32
= tcg_temp_new_i32();
9679 TCGv_i64 fp64
= tcg_temp_new_i64();
9681 gen_load_fpr64(ctx
, fp64
, fs
);
9683 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9685 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9687 tcg_temp_free_i64(fp64
);
9688 gen_store_fpr32(ctx
, fp32
, fd
);
9689 tcg_temp_free_i32(fp32
);
9693 check_cp1_registers(ctx
, fs
);
9695 TCGv_i32 fp32
= tcg_temp_new_i32();
9696 TCGv_i64 fp64
= tcg_temp_new_i64();
9698 gen_load_fpr64(ctx
, fp64
, fs
);
9700 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9702 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9704 tcg_temp_free_i64(fp64
);
9705 gen_store_fpr32(ctx
, fp32
, fd
);
9706 tcg_temp_free_i32(fp32
);
9710 check_cp1_registers(ctx
, fs
);
9712 TCGv_i32 fp32
= tcg_temp_new_i32();
9713 TCGv_i64 fp64
= tcg_temp_new_i64();
9715 gen_load_fpr64(ctx
, fp64
, fs
);
9717 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9719 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9721 tcg_temp_free_i64(fp64
);
9722 gen_store_fpr32(ctx
, fp32
, fd
);
9723 tcg_temp_free_i32(fp32
);
9727 check_cp1_registers(ctx
, fs
);
9729 TCGv_i32 fp32
= tcg_temp_new_i32();
9730 TCGv_i64 fp64
= tcg_temp_new_i64();
9732 gen_load_fpr64(ctx
, fp64
, fs
);
9734 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9736 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9738 tcg_temp_free_i64(fp64
);
9739 gen_store_fpr32(ctx
, fp32
, fd
);
9740 tcg_temp_free_i32(fp32
);
9744 check_insn(ctx
, ISA_MIPS32R6
);
9745 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9748 check_insn(ctx
, ISA_MIPS32R6
);
9749 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9752 check_insn(ctx
, ISA_MIPS32R6
);
9753 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9756 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9757 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9760 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9762 TCGLabel
*l1
= gen_new_label();
9766 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9768 fp0
= tcg_temp_new_i64();
9769 gen_load_fpr64(ctx
, fp0
, fs
);
9770 gen_store_fpr64(ctx
, fp0
, fd
);
9771 tcg_temp_free_i64(fp0
);
9776 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9778 TCGLabel
*l1
= gen_new_label();
9782 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9783 fp0
= tcg_temp_new_i64();
9784 gen_load_fpr64(ctx
, fp0
, fs
);
9785 gen_store_fpr64(ctx
, fp0
, fd
);
9786 tcg_temp_free_i64(fp0
);
9792 check_cp1_registers(ctx
, fs
| fd
);
9794 TCGv_i64 fp0
= tcg_temp_new_i64();
9796 gen_load_fpr64(ctx
, fp0
, fs
);
9797 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9798 gen_store_fpr64(ctx
, fp0
, fd
);
9799 tcg_temp_free_i64(fp0
);
9803 check_cp1_registers(ctx
, fs
| fd
);
9805 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp0
, fs
);
9808 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9809 gen_store_fpr64(ctx
, fp0
, fd
);
9810 tcg_temp_free_i64(fp0
);
9814 check_insn(ctx
, ISA_MIPS32R6
);
9816 TCGv_i64 fp0
= tcg_temp_new_i64();
9817 TCGv_i64 fp1
= tcg_temp_new_i64();
9818 TCGv_i64 fp2
= tcg_temp_new_i64();
9819 gen_load_fpr64(ctx
, fp0
, fs
);
9820 gen_load_fpr64(ctx
, fp1
, ft
);
9821 gen_load_fpr64(ctx
, fp2
, fd
);
9822 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9823 gen_store_fpr64(ctx
, fp2
, fd
);
9824 tcg_temp_free_i64(fp2
);
9825 tcg_temp_free_i64(fp1
);
9826 tcg_temp_free_i64(fp0
);
9830 check_insn(ctx
, ISA_MIPS32R6
);
9832 TCGv_i64 fp0
= tcg_temp_new_i64();
9833 TCGv_i64 fp1
= tcg_temp_new_i64();
9834 TCGv_i64 fp2
= tcg_temp_new_i64();
9835 gen_load_fpr64(ctx
, fp0
, fs
);
9836 gen_load_fpr64(ctx
, fp1
, ft
);
9837 gen_load_fpr64(ctx
, fp2
, fd
);
9838 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9839 gen_store_fpr64(ctx
, fp2
, fd
);
9840 tcg_temp_free_i64(fp2
);
9841 tcg_temp_free_i64(fp1
);
9842 tcg_temp_free_i64(fp0
);
9846 check_insn(ctx
, ISA_MIPS32R6
);
9848 TCGv_i64 fp0
= tcg_temp_new_i64();
9849 gen_load_fpr64(ctx
, fp0
, fs
);
9850 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9851 gen_store_fpr64(ctx
, fp0
, fd
);
9852 tcg_temp_free_i64(fp0
);
9856 check_insn(ctx
, ISA_MIPS32R6
);
9858 TCGv_i64 fp0
= tcg_temp_new_i64();
9859 gen_load_fpr64(ctx
, fp0
, fs
);
9860 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9861 gen_store_fpr64(ctx
, fp0
, fd
);
9862 tcg_temp_free_i64(fp0
);
9865 case OPC_MIN_D
: /* OPC_RECIP2_D */
9866 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9868 TCGv_i64 fp0
= tcg_temp_new_i64();
9869 TCGv_i64 fp1
= tcg_temp_new_i64();
9870 gen_load_fpr64(ctx
, fp0
, fs
);
9871 gen_load_fpr64(ctx
, fp1
, ft
);
9872 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9873 gen_store_fpr64(ctx
, fp1
, fd
);
9874 tcg_temp_free_i64(fp1
);
9875 tcg_temp_free_i64(fp0
);
9878 check_cp1_64bitmode(ctx
);
9880 TCGv_i64 fp0
= tcg_temp_new_i64();
9881 TCGv_i64 fp1
= tcg_temp_new_i64();
9883 gen_load_fpr64(ctx
, fp0
, fs
);
9884 gen_load_fpr64(ctx
, fp1
, ft
);
9885 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9886 tcg_temp_free_i64(fp1
);
9887 gen_store_fpr64(ctx
, fp0
, fd
);
9888 tcg_temp_free_i64(fp0
);
9892 case OPC_MINA_D
: /* OPC_RECIP1_D */
9893 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9895 TCGv_i64 fp0
= tcg_temp_new_i64();
9896 TCGv_i64 fp1
= tcg_temp_new_i64();
9897 gen_load_fpr64(ctx
, fp0
, fs
);
9898 gen_load_fpr64(ctx
, fp1
, ft
);
9899 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9900 gen_store_fpr64(ctx
, fp1
, fd
);
9901 tcg_temp_free_i64(fp1
);
9902 tcg_temp_free_i64(fp0
);
9905 check_cp1_64bitmode(ctx
);
9907 TCGv_i64 fp0
= tcg_temp_new_i64();
9909 gen_load_fpr64(ctx
, fp0
, fs
);
9910 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9911 gen_store_fpr64(ctx
, fp0
, fd
);
9912 tcg_temp_free_i64(fp0
);
9916 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9917 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9919 TCGv_i64 fp0
= tcg_temp_new_i64();
9920 TCGv_i64 fp1
= tcg_temp_new_i64();
9921 gen_load_fpr64(ctx
, fp0
, fs
);
9922 gen_load_fpr64(ctx
, fp1
, ft
);
9923 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9924 gen_store_fpr64(ctx
, fp1
, fd
);
9925 tcg_temp_free_i64(fp1
);
9926 tcg_temp_free_i64(fp0
);
9929 check_cp1_64bitmode(ctx
);
9931 TCGv_i64 fp0
= tcg_temp_new_i64();
9933 gen_load_fpr64(ctx
, fp0
, fs
);
9934 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9935 gen_store_fpr64(ctx
, fp0
, fd
);
9936 tcg_temp_free_i64(fp0
);
9940 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9941 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9943 TCGv_i64 fp0
= tcg_temp_new_i64();
9944 TCGv_i64 fp1
= tcg_temp_new_i64();
9945 gen_load_fpr64(ctx
, fp0
, fs
);
9946 gen_load_fpr64(ctx
, fp1
, ft
);
9947 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9948 gen_store_fpr64(ctx
, fp1
, fd
);
9949 tcg_temp_free_i64(fp1
);
9950 tcg_temp_free_i64(fp0
);
9953 check_cp1_64bitmode(ctx
);
9955 TCGv_i64 fp0
= tcg_temp_new_i64();
9956 TCGv_i64 fp1
= tcg_temp_new_i64();
9958 gen_load_fpr64(ctx
, fp0
, fs
);
9959 gen_load_fpr64(ctx
, fp1
, ft
);
9960 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9961 tcg_temp_free_i64(fp1
);
9962 gen_store_fpr64(ctx
, fp0
, fd
);
9963 tcg_temp_free_i64(fp0
);
9976 case OPC_CMP_NGLE_D
:
9983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9984 if (ctx
->opcode
& (1 << 6)) {
9985 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9987 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9991 check_cp1_registers(ctx
, fs
);
9993 TCGv_i32 fp32
= tcg_temp_new_i32();
9994 TCGv_i64 fp64
= tcg_temp_new_i64();
9996 gen_load_fpr64(ctx
, fp64
, fs
);
9997 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9998 tcg_temp_free_i64(fp64
);
9999 gen_store_fpr32(ctx
, fp32
, fd
);
10000 tcg_temp_free_i32(fp32
);
10004 check_cp1_registers(ctx
, fs
);
10006 TCGv_i32 fp32
= tcg_temp_new_i32();
10007 TCGv_i64 fp64
= tcg_temp_new_i64();
10009 gen_load_fpr64(ctx
, fp64
, fs
);
10010 if (ctx
->nan2008
) {
10011 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10013 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10015 tcg_temp_free_i64(fp64
);
10016 gen_store_fpr32(ctx
, fp32
, fd
);
10017 tcg_temp_free_i32(fp32
);
10021 check_cp1_64bitmode(ctx
);
10023 TCGv_i64 fp0
= tcg_temp_new_i64();
10025 gen_load_fpr64(ctx
, fp0
, fs
);
10026 if (ctx
->nan2008
) {
10027 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10029 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10031 gen_store_fpr64(ctx
, fp0
, fd
);
10032 tcg_temp_free_i64(fp0
);
10037 TCGv_i32 fp0
= tcg_temp_new_i32();
10039 gen_load_fpr32(ctx
, fp0
, fs
);
10040 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10041 gen_store_fpr32(ctx
, fp0
, fd
);
10042 tcg_temp_free_i32(fp0
);
10046 check_cp1_registers(ctx
, fd
);
10048 TCGv_i32 fp32
= tcg_temp_new_i32();
10049 TCGv_i64 fp64
= tcg_temp_new_i64();
10051 gen_load_fpr32(ctx
, fp32
, fs
);
10052 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10053 tcg_temp_free_i32(fp32
);
10054 gen_store_fpr64(ctx
, fp64
, fd
);
10055 tcg_temp_free_i64(fp64
);
10059 check_cp1_64bitmode(ctx
);
10061 TCGv_i32 fp32
= tcg_temp_new_i32();
10062 TCGv_i64 fp64
= tcg_temp_new_i64();
10064 gen_load_fpr64(ctx
, fp64
, fs
);
10065 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10066 tcg_temp_free_i64(fp64
);
10067 gen_store_fpr32(ctx
, fp32
, fd
);
10068 tcg_temp_free_i32(fp32
);
10072 check_cp1_64bitmode(ctx
);
10074 TCGv_i64 fp0
= tcg_temp_new_i64();
10076 gen_load_fpr64(ctx
, fp0
, fs
);
10077 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10078 gen_store_fpr64(ctx
, fp0
, fd
);
10079 tcg_temp_free_i64(fp0
);
10082 case OPC_CVT_PS_PW
:
10085 TCGv_i64 fp0
= tcg_temp_new_i64();
10087 gen_load_fpr64(ctx
, fp0
, fs
);
10088 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10089 gen_store_fpr64(ctx
, fp0
, fd
);
10090 tcg_temp_free_i64(fp0
);
10096 TCGv_i64 fp0
= tcg_temp_new_i64();
10097 TCGv_i64 fp1
= tcg_temp_new_i64();
10099 gen_load_fpr64(ctx
, fp0
, fs
);
10100 gen_load_fpr64(ctx
, fp1
, ft
);
10101 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10102 tcg_temp_free_i64(fp1
);
10103 gen_store_fpr64(ctx
, fp0
, fd
);
10104 tcg_temp_free_i64(fp0
);
10110 TCGv_i64 fp0
= tcg_temp_new_i64();
10111 TCGv_i64 fp1
= tcg_temp_new_i64();
10113 gen_load_fpr64(ctx
, fp0
, fs
);
10114 gen_load_fpr64(ctx
, fp1
, ft
);
10115 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10116 tcg_temp_free_i64(fp1
);
10117 gen_store_fpr64(ctx
, fp0
, fd
);
10118 tcg_temp_free_i64(fp0
);
10124 TCGv_i64 fp0
= tcg_temp_new_i64();
10125 TCGv_i64 fp1
= tcg_temp_new_i64();
10127 gen_load_fpr64(ctx
, fp0
, fs
);
10128 gen_load_fpr64(ctx
, fp1
, ft
);
10129 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10130 tcg_temp_free_i64(fp1
);
10131 gen_store_fpr64(ctx
, fp0
, fd
);
10132 tcg_temp_free_i64(fp0
);
10138 TCGv_i64 fp0
= tcg_temp_new_i64();
10140 gen_load_fpr64(ctx
, fp0
, fs
);
10141 gen_helper_float_abs_ps(fp0
, fp0
);
10142 gen_store_fpr64(ctx
, fp0
, fd
);
10143 tcg_temp_free_i64(fp0
);
10149 TCGv_i64 fp0
= tcg_temp_new_i64();
10151 gen_load_fpr64(ctx
, fp0
, fs
);
10152 gen_store_fpr64(ctx
, fp0
, fd
);
10153 tcg_temp_free_i64(fp0
);
10159 TCGv_i64 fp0
= tcg_temp_new_i64();
10161 gen_load_fpr64(ctx
, fp0
, fs
);
10162 gen_helper_float_chs_ps(fp0
, fp0
);
10163 gen_store_fpr64(ctx
, fp0
, fd
);
10164 tcg_temp_free_i64(fp0
);
10169 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10174 TCGLabel
*l1
= gen_new_label();
10178 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10179 fp0
= tcg_temp_new_i64();
10180 gen_load_fpr64(ctx
, fp0
, fs
);
10181 gen_store_fpr64(ctx
, fp0
, fd
);
10182 tcg_temp_free_i64(fp0
);
10189 TCGLabel
*l1
= gen_new_label();
10193 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10194 fp0
= tcg_temp_new_i64();
10195 gen_load_fpr64(ctx
, fp0
, fs
);
10196 gen_store_fpr64(ctx
, fp0
, fd
);
10197 tcg_temp_free_i64(fp0
);
10205 TCGv_i64 fp0
= tcg_temp_new_i64();
10206 TCGv_i64 fp1
= tcg_temp_new_i64();
10208 gen_load_fpr64(ctx
, fp0
, ft
);
10209 gen_load_fpr64(ctx
, fp1
, fs
);
10210 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10211 tcg_temp_free_i64(fp1
);
10212 gen_store_fpr64(ctx
, fp0
, fd
);
10213 tcg_temp_free_i64(fp0
);
10219 TCGv_i64 fp0
= tcg_temp_new_i64();
10220 TCGv_i64 fp1
= tcg_temp_new_i64();
10222 gen_load_fpr64(ctx
, fp0
, ft
);
10223 gen_load_fpr64(ctx
, fp1
, fs
);
10224 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10225 tcg_temp_free_i64(fp1
);
10226 gen_store_fpr64(ctx
, fp0
, fd
);
10227 tcg_temp_free_i64(fp0
);
10230 case OPC_RECIP2_PS
:
10233 TCGv_i64 fp0
= tcg_temp_new_i64();
10234 TCGv_i64 fp1
= tcg_temp_new_i64();
10236 gen_load_fpr64(ctx
, fp0
, fs
);
10237 gen_load_fpr64(ctx
, fp1
, ft
);
10238 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10239 tcg_temp_free_i64(fp1
);
10240 gen_store_fpr64(ctx
, fp0
, fd
);
10241 tcg_temp_free_i64(fp0
);
10244 case OPC_RECIP1_PS
:
10247 TCGv_i64 fp0
= tcg_temp_new_i64();
10249 gen_load_fpr64(ctx
, fp0
, fs
);
10250 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10251 gen_store_fpr64(ctx
, fp0
, fd
);
10252 tcg_temp_free_i64(fp0
);
10255 case OPC_RSQRT1_PS
:
10258 TCGv_i64 fp0
= tcg_temp_new_i64();
10260 gen_load_fpr64(ctx
, fp0
, fs
);
10261 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10262 gen_store_fpr64(ctx
, fp0
, fd
);
10263 tcg_temp_free_i64(fp0
);
10266 case OPC_RSQRT2_PS
:
10269 TCGv_i64 fp0
= tcg_temp_new_i64();
10270 TCGv_i64 fp1
= tcg_temp_new_i64();
10272 gen_load_fpr64(ctx
, fp0
, fs
);
10273 gen_load_fpr64(ctx
, fp1
, ft
);
10274 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10275 tcg_temp_free_i64(fp1
);
10276 gen_store_fpr64(ctx
, fp0
, fd
);
10277 tcg_temp_free_i64(fp0
);
10281 check_cp1_64bitmode(ctx
);
10283 TCGv_i32 fp0
= tcg_temp_new_i32();
10285 gen_load_fpr32h(ctx
, fp0
, fs
);
10286 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10287 gen_store_fpr32(ctx
, fp0
, fd
);
10288 tcg_temp_free_i32(fp0
);
10291 case OPC_CVT_PW_PS
:
10294 TCGv_i64 fp0
= tcg_temp_new_i64();
10296 gen_load_fpr64(ctx
, fp0
, fs
);
10297 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10298 gen_store_fpr64(ctx
, fp0
, fd
);
10299 tcg_temp_free_i64(fp0
);
10303 check_cp1_64bitmode(ctx
);
10305 TCGv_i32 fp0
= tcg_temp_new_i32();
10307 gen_load_fpr32(ctx
, fp0
, fs
);
10308 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10309 gen_store_fpr32(ctx
, fp0
, fd
);
10310 tcg_temp_free_i32(fp0
);
10316 TCGv_i32 fp0
= tcg_temp_new_i32();
10317 TCGv_i32 fp1
= tcg_temp_new_i32();
10319 gen_load_fpr32(ctx
, fp0
, fs
);
10320 gen_load_fpr32(ctx
, fp1
, ft
);
10321 gen_store_fpr32h(ctx
, fp0
, fd
);
10322 gen_store_fpr32(ctx
, fp1
, fd
);
10323 tcg_temp_free_i32(fp0
);
10324 tcg_temp_free_i32(fp1
);
10330 TCGv_i32 fp0
= tcg_temp_new_i32();
10331 TCGv_i32 fp1
= tcg_temp_new_i32();
10333 gen_load_fpr32(ctx
, fp0
, fs
);
10334 gen_load_fpr32h(ctx
, fp1
, ft
);
10335 gen_store_fpr32(ctx
, fp1
, fd
);
10336 gen_store_fpr32h(ctx
, fp0
, fd
);
10337 tcg_temp_free_i32(fp0
);
10338 tcg_temp_free_i32(fp1
);
10344 TCGv_i32 fp0
= tcg_temp_new_i32();
10345 TCGv_i32 fp1
= tcg_temp_new_i32();
10347 gen_load_fpr32h(ctx
, fp0
, fs
);
10348 gen_load_fpr32(ctx
, fp1
, ft
);
10349 gen_store_fpr32(ctx
, fp1
, fd
);
10350 gen_store_fpr32h(ctx
, fp0
, fd
);
10351 tcg_temp_free_i32(fp0
);
10352 tcg_temp_free_i32(fp1
);
10358 TCGv_i32 fp0
= tcg_temp_new_i32();
10359 TCGv_i32 fp1
= tcg_temp_new_i32();
10361 gen_load_fpr32h(ctx
, fp0
, fs
);
10362 gen_load_fpr32h(ctx
, fp1
, ft
);
10363 gen_store_fpr32(ctx
, fp1
, fd
);
10364 gen_store_fpr32h(ctx
, fp0
, fd
);
10365 tcg_temp_free_i32(fp0
);
10366 tcg_temp_free_i32(fp1
);
10370 case OPC_CMP_UN_PS
:
10371 case OPC_CMP_EQ_PS
:
10372 case OPC_CMP_UEQ_PS
:
10373 case OPC_CMP_OLT_PS
:
10374 case OPC_CMP_ULT_PS
:
10375 case OPC_CMP_OLE_PS
:
10376 case OPC_CMP_ULE_PS
:
10377 case OPC_CMP_SF_PS
:
10378 case OPC_CMP_NGLE_PS
:
10379 case OPC_CMP_SEQ_PS
:
10380 case OPC_CMP_NGL_PS
:
10381 case OPC_CMP_LT_PS
:
10382 case OPC_CMP_NGE_PS
:
10383 case OPC_CMP_LE_PS
:
10384 case OPC_CMP_NGT_PS
:
10385 if (ctx
->opcode
& (1 << 6)) {
10386 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10388 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10392 MIPS_INVAL("farith");
10393 generate_exception_end(ctx
, EXCP_RI
);
10398 /* Coprocessor 3 (FPU) */
10399 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10400 int fd
, int fs
, int base
, int index
)
10402 TCGv t0
= tcg_temp_new();
10405 gen_load_gpr(t0
, index
);
10406 } else if (index
== 0) {
10407 gen_load_gpr(t0
, base
);
10409 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10411 /* Don't do NOP if destination is zero: we must perform the actual
10417 TCGv_i32 fp0
= tcg_temp_new_i32();
10419 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10420 tcg_gen_trunc_tl_i32(fp0
, t0
);
10421 gen_store_fpr32(ctx
, fp0
, fd
);
10422 tcg_temp_free_i32(fp0
);
10427 check_cp1_registers(ctx
, fd
);
10429 TCGv_i64 fp0
= tcg_temp_new_i64();
10430 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10431 gen_store_fpr64(ctx
, fp0
, fd
);
10432 tcg_temp_free_i64(fp0
);
10436 check_cp1_64bitmode(ctx
);
10437 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10439 TCGv_i64 fp0
= tcg_temp_new_i64();
10441 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10442 gen_store_fpr64(ctx
, fp0
, fd
);
10443 tcg_temp_free_i64(fp0
);
10449 TCGv_i32 fp0
= tcg_temp_new_i32();
10450 gen_load_fpr32(ctx
, fp0
, fs
);
10451 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10452 tcg_temp_free_i32(fp0
);
10457 check_cp1_registers(ctx
, fs
);
10459 TCGv_i64 fp0
= tcg_temp_new_i64();
10460 gen_load_fpr64(ctx
, fp0
, fs
);
10461 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10462 tcg_temp_free_i64(fp0
);
10466 check_cp1_64bitmode(ctx
);
10467 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10469 TCGv_i64 fp0
= tcg_temp_new_i64();
10470 gen_load_fpr64(ctx
, fp0
, fs
);
10471 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10472 tcg_temp_free_i64(fp0
);
10479 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10480 int fd
, int fr
, int fs
, int ft
)
10486 TCGv t0
= tcg_temp_local_new();
10487 TCGv_i32 fp
= tcg_temp_new_i32();
10488 TCGv_i32 fph
= tcg_temp_new_i32();
10489 TCGLabel
*l1
= gen_new_label();
10490 TCGLabel
*l2
= gen_new_label();
10492 gen_load_gpr(t0
, fr
);
10493 tcg_gen_andi_tl(t0
, t0
, 0x7);
10495 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10496 gen_load_fpr32(ctx
, fp
, fs
);
10497 gen_load_fpr32h(ctx
, fph
, fs
);
10498 gen_store_fpr32(ctx
, fp
, fd
);
10499 gen_store_fpr32h(ctx
, fph
, fd
);
10502 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10504 #ifdef TARGET_WORDS_BIGENDIAN
10505 gen_load_fpr32(ctx
, fp
, fs
);
10506 gen_load_fpr32h(ctx
, fph
, ft
);
10507 gen_store_fpr32h(ctx
, fp
, fd
);
10508 gen_store_fpr32(ctx
, fph
, fd
);
10510 gen_load_fpr32h(ctx
, fph
, fs
);
10511 gen_load_fpr32(ctx
, fp
, ft
);
10512 gen_store_fpr32(ctx
, fph
, fd
);
10513 gen_store_fpr32h(ctx
, fp
, fd
);
10516 tcg_temp_free_i32(fp
);
10517 tcg_temp_free_i32(fph
);
10523 TCGv_i32 fp0
= tcg_temp_new_i32();
10524 TCGv_i32 fp1
= tcg_temp_new_i32();
10525 TCGv_i32 fp2
= tcg_temp_new_i32();
10527 gen_load_fpr32(ctx
, fp0
, fs
);
10528 gen_load_fpr32(ctx
, fp1
, ft
);
10529 gen_load_fpr32(ctx
, fp2
, fr
);
10530 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10531 tcg_temp_free_i32(fp0
);
10532 tcg_temp_free_i32(fp1
);
10533 gen_store_fpr32(ctx
, fp2
, fd
);
10534 tcg_temp_free_i32(fp2
);
10539 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10541 TCGv_i64 fp0
= tcg_temp_new_i64();
10542 TCGv_i64 fp1
= tcg_temp_new_i64();
10543 TCGv_i64 fp2
= tcg_temp_new_i64();
10545 gen_load_fpr64(ctx
, fp0
, fs
);
10546 gen_load_fpr64(ctx
, fp1
, ft
);
10547 gen_load_fpr64(ctx
, fp2
, fr
);
10548 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10549 tcg_temp_free_i64(fp0
);
10550 tcg_temp_free_i64(fp1
);
10551 gen_store_fpr64(ctx
, fp2
, fd
);
10552 tcg_temp_free_i64(fp2
);
10558 TCGv_i64 fp0
= tcg_temp_new_i64();
10559 TCGv_i64 fp1
= tcg_temp_new_i64();
10560 TCGv_i64 fp2
= tcg_temp_new_i64();
10562 gen_load_fpr64(ctx
, fp0
, fs
);
10563 gen_load_fpr64(ctx
, fp1
, ft
);
10564 gen_load_fpr64(ctx
, fp2
, fr
);
10565 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10566 tcg_temp_free_i64(fp0
);
10567 tcg_temp_free_i64(fp1
);
10568 gen_store_fpr64(ctx
, fp2
, fd
);
10569 tcg_temp_free_i64(fp2
);
10575 TCGv_i32 fp0
= tcg_temp_new_i32();
10576 TCGv_i32 fp1
= tcg_temp_new_i32();
10577 TCGv_i32 fp2
= tcg_temp_new_i32();
10579 gen_load_fpr32(ctx
, fp0
, fs
);
10580 gen_load_fpr32(ctx
, fp1
, ft
);
10581 gen_load_fpr32(ctx
, fp2
, fr
);
10582 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10583 tcg_temp_free_i32(fp0
);
10584 tcg_temp_free_i32(fp1
);
10585 gen_store_fpr32(ctx
, fp2
, fd
);
10586 tcg_temp_free_i32(fp2
);
10591 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10593 TCGv_i64 fp0
= tcg_temp_new_i64();
10594 TCGv_i64 fp1
= tcg_temp_new_i64();
10595 TCGv_i64 fp2
= tcg_temp_new_i64();
10597 gen_load_fpr64(ctx
, fp0
, fs
);
10598 gen_load_fpr64(ctx
, fp1
, ft
);
10599 gen_load_fpr64(ctx
, fp2
, fr
);
10600 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10601 tcg_temp_free_i64(fp0
);
10602 tcg_temp_free_i64(fp1
);
10603 gen_store_fpr64(ctx
, fp2
, fd
);
10604 tcg_temp_free_i64(fp2
);
10610 TCGv_i64 fp0
= tcg_temp_new_i64();
10611 TCGv_i64 fp1
= tcg_temp_new_i64();
10612 TCGv_i64 fp2
= tcg_temp_new_i64();
10614 gen_load_fpr64(ctx
, fp0
, fs
);
10615 gen_load_fpr64(ctx
, fp1
, ft
);
10616 gen_load_fpr64(ctx
, fp2
, fr
);
10617 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10618 tcg_temp_free_i64(fp0
);
10619 tcg_temp_free_i64(fp1
);
10620 gen_store_fpr64(ctx
, fp2
, fd
);
10621 tcg_temp_free_i64(fp2
);
10627 TCGv_i32 fp0
= tcg_temp_new_i32();
10628 TCGv_i32 fp1
= tcg_temp_new_i32();
10629 TCGv_i32 fp2
= tcg_temp_new_i32();
10631 gen_load_fpr32(ctx
, fp0
, fs
);
10632 gen_load_fpr32(ctx
, fp1
, ft
);
10633 gen_load_fpr32(ctx
, fp2
, fr
);
10634 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10635 tcg_temp_free_i32(fp0
);
10636 tcg_temp_free_i32(fp1
);
10637 gen_store_fpr32(ctx
, fp2
, fd
);
10638 tcg_temp_free_i32(fp2
);
10643 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10645 TCGv_i64 fp0
= tcg_temp_new_i64();
10646 TCGv_i64 fp1
= tcg_temp_new_i64();
10647 TCGv_i64 fp2
= tcg_temp_new_i64();
10649 gen_load_fpr64(ctx
, fp0
, fs
);
10650 gen_load_fpr64(ctx
, fp1
, ft
);
10651 gen_load_fpr64(ctx
, fp2
, fr
);
10652 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10653 tcg_temp_free_i64(fp0
);
10654 tcg_temp_free_i64(fp1
);
10655 gen_store_fpr64(ctx
, fp2
, fd
);
10656 tcg_temp_free_i64(fp2
);
10662 TCGv_i64 fp0
= tcg_temp_new_i64();
10663 TCGv_i64 fp1
= tcg_temp_new_i64();
10664 TCGv_i64 fp2
= tcg_temp_new_i64();
10666 gen_load_fpr64(ctx
, fp0
, fs
);
10667 gen_load_fpr64(ctx
, fp1
, ft
);
10668 gen_load_fpr64(ctx
, fp2
, fr
);
10669 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10670 tcg_temp_free_i64(fp0
);
10671 tcg_temp_free_i64(fp1
);
10672 gen_store_fpr64(ctx
, fp2
, fd
);
10673 tcg_temp_free_i64(fp2
);
10679 TCGv_i32 fp0
= tcg_temp_new_i32();
10680 TCGv_i32 fp1
= tcg_temp_new_i32();
10681 TCGv_i32 fp2
= tcg_temp_new_i32();
10683 gen_load_fpr32(ctx
, fp0
, fs
);
10684 gen_load_fpr32(ctx
, fp1
, ft
);
10685 gen_load_fpr32(ctx
, fp2
, fr
);
10686 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10687 tcg_temp_free_i32(fp0
);
10688 tcg_temp_free_i32(fp1
);
10689 gen_store_fpr32(ctx
, fp2
, fd
);
10690 tcg_temp_free_i32(fp2
);
10695 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10697 TCGv_i64 fp0
= tcg_temp_new_i64();
10698 TCGv_i64 fp1
= tcg_temp_new_i64();
10699 TCGv_i64 fp2
= tcg_temp_new_i64();
10701 gen_load_fpr64(ctx
, fp0
, fs
);
10702 gen_load_fpr64(ctx
, fp1
, ft
);
10703 gen_load_fpr64(ctx
, fp2
, fr
);
10704 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10705 tcg_temp_free_i64(fp0
);
10706 tcg_temp_free_i64(fp1
);
10707 gen_store_fpr64(ctx
, fp2
, fd
);
10708 tcg_temp_free_i64(fp2
);
10714 TCGv_i64 fp0
= tcg_temp_new_i64();
10715 TCGv_i64 fp1
= tcg_temp_new_i64();
10716 TCGv_i64 fp2
= tcg_temp_new_i64();
10718 gen_load_fpr64(ctx
, fp0
, fs
);
10719 gen_load_fpr64(ctx
, fp1
, ft
);
10720 gen_load_fpr64(ctx
, fp2
, fr
);
10721 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10722 tcg_temp_free_i64(fp0
);
10723 tcg_temp_free_i64(fp1
);
10724 gen_store_fpr64(ctx
, fp2
, fd
);
10725 tcg_temp_free_i64(fp2
);
10729 MIPS_INVAL("flt3_arith");
10730 generate_exception_end(ctx
, EXCP_RI
);
10735 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10739 #if !defined(CONFIG_USER_ONLY)
10740 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10741 Therefore only check the ISA in system mode. */
10742 check_insn(ctx
, ISA_MIPS32R2
);
10744 t0
= tcg_temp_new();
10748 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10749 gen_store_gpr(t0
, rt
);
10752 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10753 gen_store_gpr(t0
, rt
);
10756 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10759 gen_helper_rdhwr_cc(t0
, cpu_env
);
10760 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10763 gen_store_gpr(t0
, rt
);
10764 /* Break the TB to be able to take timer interrupts immediately
10765 after reading count. DISAS_STOP isn't sufficient, we need to ensure
10766 we break completely out of translated code. */
10767 gen_save_pc(ctx
->base
.pc_next
+ 4);
10768 ctx
->base
.is_jmp
= DISAS_EXIT
;
10771 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10772 gen_store_gpr(t0
, rt
);
10775 check_insn(ctx
, ISA_MIPS32R6
);
10777 /* Performance counter registers are not implemented other than
10778 * control register 0.
10780 generate_exception(ctx
, EXCP_RI
);
10782 gen_helper_rdhwr_performance(t0
, cpu_env
);
10783 gen_store_gpr(t0
, rt
);
10786 check_insn(ctx
, ISA_MIPS32R6
);
10787 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10788 gen_store_gpr(t0
, rt
);
10791 #if defined(CONFIG_USER_ONLY)
10792 tcg_gen_ld_tl(t0
, cpu_env
,
10793 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10794 gen_store_gpr(t0
, rt
);
10797 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10798 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10799 tcg_gen_ld_tl(t0
, cpu_env
,
10800 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10801 gen_store_gpr(t0
, rt
);
10803 generate_exception_end(ctx
, EXCP_RI
);
10807 default: /* Invalid */
10808 MIPS_INVAL("rdhwr");
10809 generate_exception_end(ctx
, EXCP_RI
);
10815 static inline void clear_branch_hflags(DisasContext
*ctx
)
10817 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10818 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
10819 save_cpu_state(ctx
, 0);
10821 /* it is not safe to save ctx->hflags as hflags may be changed
10822 in execution time by the instruction in delay / forbidden slot. */
10823 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10827 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10829 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10830 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10831 /* Branches completion */
10832 clear_branch_hflags(ctx
);
10833 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10834 /* FIXME: Need to clear can_do_io. */
10835 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10836 case MIPS_HFLAG_FBNSLOT
:
10837 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
10840 /* unconditional branch */
10841 if (proc_hflags
& MIPS_HFLAG_BX
) {
10842 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10844 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10846 case MIPS_HFLAG_BL
:
10847 /* blikely taken case */
10848 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10850 case MIPS_HFLAG_BC
:
10851 /* Conditional branch */
10853 TCGLabel
*l1
= gen_new_label();
10855 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10856 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
10858 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10861 case MIPS_HFLAG_BR
:
10862 /* unconditional branch to register */
10863 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10864 TCGv t0
= tcg_temp_new();
10865 TCGv_i32 t1
= tcg_temp_new_i32();
10867 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10868 tcg_gen_trunc_tl_i32(t1
, t0
);
10870 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10871 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10872 tcg_gen_or_i32(hflags
, hflags
, t1
);
10873 tcg_temp_free_i32(t1
);
10875 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10877 tcg_gen_mov_tl(cpu_PC
, btarget
);
10879 if (ctx
->base
.singlestep_enabled
) {
10880 save_cpu_state(ctx
, 0);
10881 gen_helper_raise_exception_debug(cpu_env
);
10883 tcg_gen_lookup_and_goto_ptr();
10886 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10892 /* Compact Branches */
10893 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10894 int rs
, int rt
, int32_t offset
)
10896 int bcond_compute
= 0;
10897 TCGv t0
= tcg_temp_new();
10898 TCGv t1
= tcg_temp_new();
10899 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10901 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10902 #ifdef MIPS_DEBUG_DISAS
10903 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10904 "\n", ctx
->base
.pc_next
);
10906 generate_exception_end(ctx
, EXCP_RI
);
10910 /* Load needed operands and calculate btarget */
10912 /* compact branch */
10913 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10914 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10915 gen_load_gpr(t0
, rs
);
10916 gen_load_gpr(t1
, rt
);
10918 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10919 if (rs
<= rt
&& rs
== 0) {
10920 /* OPC_BEQZALC, OPC_BNEZALC */
10921 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10924 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10925 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10926 gen_load_gpr(t0
, rs
);
10927 gen_load_gpr(t1
, rt
);
10929 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10931 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10932 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10933 if (rs
== 0 || rs
== rt
) {
10934 /* OPC_BLEZALC, OPC_BGEZALC */
10935 /* OPC_BGTZALC, OPC_BLTZALC */
10936 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10938 gen_load_gpr(t0
, rs
);
10939 gen_load_gpr(t1
, rt
);
10941 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10945 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10950 /* OPC_BEQZC, OPC_BNEZC */
10951 gen_load_gpr(t0
, rs
);
10953 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10955 /* OPC_JIC, OPC_JIALC */
10956 TCGv tbase
= tcg_temp_new();
10957 TCGv toffset
= tcg_temp_new();
10959 gen_load_gpr(tbase
, rt
);
10960 tcg_gen_movi_tl(toffset
, offset
);
10961 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10962 tcg_temp_free(tbase
);
10963 tcg_temp_free(toffset
);
10967 MIPS_INVAL("Compact branch/jump");
10968 generate_exception_end(ctx
, EXCP_RI
);
10972 if (bcond_compute
== 0) {
10973 /* Uncoditional compact branch */
10976 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10979 ctx
->hflags
|= MIPS_HFLAG_BR
;
10982 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10985 ctx
->hflags
|= MIPS_HFLAG_B
;
10988 MIPS_INVAL("Compact branch/jump");
10989 generate_exception_end(ctx
, EXCP_RI
);
10993 /* Generating branch here as compact branches don't have delay slot */
10994 gen_branch(ctx
, 4);
10996 /* Conditional compact branch */
10997 TCGLabel
*fs
= gen_new_label();
10998 save_cpu_state(ctx
, 0);
11001 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11002 if (rs
== 0 && rt
!= 0) {
11004 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11005 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11007 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11010 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11013 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11014 if (rs
== 0 && rt
!= 0) {
11016 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11017 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11019 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11022 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11025 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11026 if (rs
== 0 && rt
!= 0) {
11028 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11029 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11031 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11034 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11037 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11038 if (rs
== 0 && rt
!= 0) {
11040 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11041 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11043 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11046 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11049 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11050 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11052 /* OPC_BOVC, OPC_BNVC */
11053 TCGv t2
= tcg_temp_new();
11054 TCGv t3
= tcg_temp_new();
11055 TCGv t4
= tcg_temp_new();
11056 TCGv input_overflow
= tcg_temp_new();
11058 gen_load_gpr(t0
, rs
);
11059 gen_load_gpr(t1
, rt
);
11060 tcg_gen_ext32s_tl(t2
, t0
);
11061 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11062 tcg_gen_ext32s_tl(t3
, t1
);
11063 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11064 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11066 tcg_gen_add_tl(t4
, t2
, t3
);
11067 tcg_gen_ext32s_tl(t4
, t4
);
11068 tcg_gen_xor_tl(t2
, t2
, t3
);
11069 tcg_gen_xor_tl(t3
, t4
, t3
);
11070 tcg_gen_andc_tl(t2
, t3
, t2
);
11071 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11072 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11073 if (opc
== OPC_BOVC
) {
11075 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11078 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11080 tcg_temp_free(input_overflow
);
11084 } else if (rs
< rt
&& rs
== 0) {
11085 /* OPC_BEQZALC, OPC_BNEZALC */
11086 if (opc
== OPC_BEQZALC
) {
11088 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11091 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11094 /* OPC_BEQC, OPC_BNEC */
11095 if (opc
== OPC_BEQC
) {
11097 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11100 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11105 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11108 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11111 MIPS_INVAL("Compact conditional branch/jump");
11112 generate_exception_end(ctx
, EXCP_RI
);
11116 /* Generating branch here as compact branches don't have delay slot */
11117 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11120 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11128 /* ISA extensions (ASEs) */
11129 /* MIPS16 extension to MIPS32 */
11131 /* MIPS16 major opcodes */
11133 M16_OPC_ADDIUSP
= 0x00,
11134 M16_OPC_ADDIUPC
= 0x01,
11136 M16_OPC_JAL
= 0x03,
11137 M16_OPC_BEQZ
= 0x04,
11138 M16_OPC_BNEQZ
= 0x05,
11139 M16_OPC_SHIFT
= 0x06,
11141 M16_OPC_RRIA
= 0x08,
11142 M16_OPC_ADDIU8
= 0x09,
11143 M16_OPC_SLTI
= 0x0a,
11144 M16_OPC_SLTIU
= 0x0b,
11147 M16_OPC_CMPI
= 0x0e,
11151 M16_OPC_LWSP
= 0x12,
11153 M16_OPC_LBU
= 0x14,
11154 M16_OPC_LHU
= 0x15,
11155 M16_OPC_LWPC
= 0x16,
11156 M16_OPC_LWU
= 0x17,
11159 M16_OPC_SWSP
= 0x1a,
11161 M16_OPC_RRR
= 0x1c,
11163 M16_OPC_EXTEND
= 0x1e,
11167 /* I8 funct field */
11186 /* RR funct field */
11220 /* I64 funct field */
11228 I64_DADDIUPC
= 0x6,
11232 /* RR ry field for CNVT */
11234 RR_RY_CNVT_ZEB
= 0x0,
11235 RR_RY_CNVT_ZEH
= 0x1,
11236 RR_RY_CNVT_ZEW
= 0x2,
11237 RR_RY_CNVT_SEB
= 0x4,
11238 RR_RY_CNVT_SEH
= 0x5,
11239 RR_RY_CNVT_SEW
= 0x6,
11242 static int xlat (int r
)
11244 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11249 static void gen_mips16_save (DisasContext
*ctx
,
11250 int xsregs
, int aregs
,
11251 int do_ra
, int do_s0
, int do_s1
,
11254 TCGv t0
= tcg_temp_new();
11255 TCGv t1
= tcg_temp_new();
11256 TCGv t2
= tcg_temp_new();
11286 generate_exception_end(ctx
, EXCP_RI
);
11292 gen_base_offset_addr(ctx
, t0
, 29, 12);
11293 gen_load_gpr(t1
, 7);
11294 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11297 gen_base_offset_addr(ctx
, t0
, 29, 8);
11298 gen_load_gpr(t1
, 6);
11299 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11302 gen_base_offset_addr(ctx
, t0
, 29, 4);
11303 gen_load_gpr(t1
, 5);
11304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11307 gen_base_offset_addr(ctx
, t0
, 29, 0);
11308 gen_load_gpr(t1
, 4);
11309 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11312 gen_load_gpr(t0
, 29);
11314 #define DECR_AND_STORE(reg) do { \
11315 tcg_gen_movi_tl(t2, -4); \
11316 gen_op_addr_add(ctx, t0, t0, t2); \
11317 gen_load_gpr(t1, reg); \
11318 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11322 DECR_AND_STORE(31);
11327 DECR_AND_STORE(30);
11330 DECR_AND_STORE(23);
11333 DECR_AND_STORE(22);
11336 DECR_AND_STORE(21);
11339 DECR_AND_STORE(20);
11342 DECR_AND_STORE(19);
11345 DECR_AND_STORE(18);
11349 DECR_AND_STORE(17);
11352 DECR_AND_STORE(16);
11382 generate_exception_end(ctx
, EXCP_RI
);
11398 #undef DECR_AND_STORE
11400 tcg_gen_movi_tl(t2
, -framesize
);
11401 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11407 static void gen_mips16_restore (DisasContext
*ctx
,
11408 int xsregs
, int aregs
,
11409 int do_ra
, int do_s0
, int do_s1
,
11413 TCGv t0
= tcg_temp_new();
11414 TCGv t1
= tcg_temp_new();
11415 TCGv t2
= tcg_temp_new();
11417 tcg_gen_movi_tl(t2
, framesize
);
11418 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11420 #define DECR_AND_LOAD(reg) do { \
11421 tcg_gen_movi_tl(t2, -4); \
11422 gen_op_addr_add(ctx, t0, t0, t2); \
11423 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11424 gen_store_gpr(t1, reg); \
11488 generate_exception_end(ctx
, EXCP_RI
);
11504 #undef DECR_AND_LOAD
11506 tcg_gen_movi_tl(t2
, framesize
);
11507 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11513 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11514 int is_64_bit
, int extended
)
11518 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11519 generate_exception_end(ctx
, EXCP_RI
);
11523 t0
= tcg_temp_new();
11525 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11526 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11528 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11534 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11537 TCGv_i32 t0
= tcg_const_i32(op
);
11538 TCGv t1
= tcg_temp_new();
11539 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11540 gen_helper_cache(cpu_env
, t1
, t0
);
11543 #if defined(TARGET_MIPS64)
11544 static void decode_i64_mips16 (DisasContext
*ctx
,
11545 int ry
, int funct
, int16_t offset
,
11550 check_insn(ctx
, ISA_MIPS3
);
11551 check_mips_64(ctx
);
11552 offset
= extended
? offset
: offset
<< 3;
11553 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11556 check_insn(ctx
, ISA_MIPS3
);
11557 check_mips_64(ctx
);
11558 offset
= extended
? offset
: offset
<< 3;
11559 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11562 check_insn(ctx
, ISA_MIPS3
);
11563 check_mips_64(ctx
);
11564 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11565 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11568 check_insn(ctx
, ISA_MIPS3
);
11569 check_mips_64(ctx
);
11570 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11571 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11574 check_insn(ctx
, ISA_MIPS3
);
11575 check_mips_64(ctx
);
11576 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11577 generate_exception_end(ctx
, EXCP_RI
);
11579 offset
= extended
? offset
: offset
<< 3;
11580 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11584 check_insn(ctx
, ISA_MIPS3
);
11585 check_mips_64(ctx
);
11586 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11587 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11590 check_insn(ctx
, ISA_MIPS3
);
11591 check_mips_64(ctx
);
11592 offset
= extended
? offset
: offset
<< 2;
11593 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11596 check_insn(ctx
, ISA_MIPS3
);
11597 check_mips_64(ctx
);
11598 offset
= extended
? offset
: offset
<< 2;
11599 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11605 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11607 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11608 int op
, rx
, ry
, funct
, sa
;
11609 int16_t imm
, offset
;
11611 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11612 op
= (ctx
->opcode
>> 11) & 0x1f;
11613 sa
= (ctx
->opcode
>> 22) & 0x1f;
11614 funct
= (ctx
->opcode
>> 8) & 0x7;
11615 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11616 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11617 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11618 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11619 | (ctx
->opcode
& 0x1f));
11621 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11624 case M16_OPC_ADDIUSP
:
11625 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11627 case M16_OPC_ADDIUPC
:
11628 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11631 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11632 /* No delay slot, so just process as a normal instruction */
11635 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11636 /* No delay slot, so just process as a normal instruction */
11638 case M16_OPC_BNEQZ
:
11639 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11640 /* No delay slot, so just process as a normal instruction */
11642 case M16_OPC_SHIFT
:
11643 switch (ctx
->opcode
& 0x3) {
11645 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11648 #if defined(TARGET_MIPS64)
11649 check_mips_64(ctx
);
11650 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11652 generate_exception_end(ctx
, EXCP_RI
);
11656 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11659 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11663 #if defined(TARGET_MIPS64)
11665 check_insn(ctx
, ISA_MIPS3
);
11666 check_mips_64(ctx
);
11667 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11671 imm
= ctx
->opcode
& 0xf;
11672 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11673 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11674 imm
= (int16_t) (imm
<< 1) >> 1;
11675 if ((ctx
->opcode
>> 4) & 0x1) {
11676 #if defined(TARGET_MIPS64)
11677 check_mips_64(ctx
);
11678 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11680 generate_exception_end(ctx
, EXCP_RI
);
11683 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11686 case M16_OPC_ADDIU8
:
11687 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11690 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11692 case M16_OPC_SLTIU
:
11693 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11698 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11701 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11704 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11707 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11710 check_insn(ctx
, ISA_MIPS32
);
11712 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11713 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11714 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11715 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11716 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11717 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11718 | (ctx
->opcode
& 0xf)) << 3;
11720 if (ctx
->opcode
& (1 << 7)) {
11721 gen_mips16_save(ctx
, xsregs
, aregs
,
11722 do_ra
, do_s0
, do_s1
,
11725 gen_mips16_restore(ctx
, xsregs
, aregs
,
11726 do_ra
, do_s0
, do_s1
,
11732 generate_exception_end(ctx
, EXCP_RI
);
11737 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11740 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11742 #if defined(TARGET_MIPS64)
11744 check_insn(ctx
, ISA_MIPS3
);
11745 check_mips_64(ctx
);
11746 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11750 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11753 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11756 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11759 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11762 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11765 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11768 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11770 #if defined(TARGET_MIPS64)
11772 check_insn(ctx
, ISA_MIPS3
);
11773 check_mips_64(ctx
);
11774 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11778 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11781 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11784 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11787 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11789 #if defined(TARGET_MIPS64)
11791 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11795 generate_exception_end(ctx
, EXCP_RI
);
11802 static inline bool is_uhi(int sdbbp_code
)
11804 #ifdef CONFIG_USER_ONLY
11807 return semihosting_enabled() && sdbbp_code
== 1;
11811 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11815 int op
, cnvt_op
, op1
, offset
;
11819 op
= (ctx
->opcode
>> 11) & 0x1f;
11820 sa
= (ctx
->opcode
>> 2) & 0x7;
11821 sa
= sa
== 0 ? 8 : sa
;
11822 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11823 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11824 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11825 op1
= offset
= ctx
->opcode
& 0x1f;
11830 case M16_OPC_ADDIUSP
:
11832 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11834 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11837 case M16_OPC_ADDIUPC
:
11838 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11841 offset
= (ctx
->opcode
& 0x7ff) << 1;
11842 offset
= (int16_t)(offset
<< 4) >> 4;
11843 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11844 /* No delay slot, so just process as a normal instruction */
11847 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11848 offset
= (((ctx
->opcode
& 0x1f) << 21)
11849 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11851 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11852 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11856 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11857 ((int8_t)ctx
->opcode
) << 1, 0);
11858 /* No delay slot, so just process as a normal instruction */
11860 case M16_OPC_BNEQZ
:
11861 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11862 ((int8_t)ctx
->opcode
) << 1, 0);
11863 /* No delay slot, so just process as a normal instruction */
11865 case M16_OPC_SHIFT
:
11866 switch (ctx
->opcode
& 0x3) {
11868 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11871 #if defined(TARGET_MIPS64)
11872 check_insn(ctx
, ISA_MIPS3
);
11873 check_mips_64(ctx
);
11874 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11876 generate_exception_end(ctx
, EXCP_RI
);
11880 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11883 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11887 #if defined(TARGET_MIPS64)
11889 check_insn(ctx
, ISA_MIPS3
);
11890 check_mips_64(ctx
);
11891 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11896 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11898 if ((ctx
->opcode
>> 4) & 1) {
11899 #if defined(TARGET_MIPS64)
11900 check_insn(ctx
, ISA_MIPS3
);
11901 check_mips_64(ctx
);
11902 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11904 generate_exception_end(ctx
, EXCP_RI
);
11907 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11911 case M16_OPC_ADDIU8
:
11913 int16_t imm
= (int8_t) ctx
->opcode
;
11915 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11920 int16_t imm
= (uint8_t) ctx
->opcode
;
11921 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11924 case M16_OPC_SLTIU
:
11926 int16_t imm
= (uint8_t) ctx
->opcode
;
11927 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11934 funct
= (ctx
->opcode
>> 8) & 0x7;
11937 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11938 ((int8_t)ctx
->opcode
) << 1, 0);
11941 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11942 ((int8_t)ctx
->opcode
) << 1, 0);
11945 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11948 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11949 ((int8_t)ctx
->opcode
) << 3);
11952 check_insn(ctx
, ISA_MIPS32
);
11954 int do_ra
= ctx
->opcode
& (1 << 6);
11955 int do_s0
= ctx
->opcode
& (1 << 5);
11956 int do_s1
= ctx
->opcode
& (1 << 4);
11957 int framesize
= ctx
->opcode
& 0xf;
11959 if (framesize
== 0) {
11962 framesize
= framesize
<< 3;
11965 if (ctx
->opcode
& (1 << 7)) {
11966 gen_mips16_save(ctx
, 0, 0,
11967 do_ra
, do_s0
, do_s1
, framesize
);
11969 gen_mips16_restore(ctx
, 0, 0,
11970 do_ra
, do_s0
, do_s1
, framesize
);
11976 int rz
= xlat(ctx
->opcode
& 0x7);
11978 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11979 ((ctx
->opcode
>> 5) & 0x7);
11980 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11984 reg32
= ctx
->opcode
& 0x1f;
11985 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11988 generate_exception_end(ctx
, EXCP_RI
);
11995 int16_t imm
= (uint8_t) ctx
->opcode
;
11997 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12002 int16_t imm
= (uint8_t) ctx
->opcode
;
12003 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12006 #if defined(TARGET_MIPS64)
12008 check_insn(ctx
, ISA_MIPS3
);
12009 check_mips_64(ctx
);
12010 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12014 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12017 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12020 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12023 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12026 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12029 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12032 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12034 #if defined (TARGET_MIPS64)
12036 check_insn(ctx
, ISA_MIPS3
);
12037 check_mips_64(ctx
);
12038 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12042 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12045 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12048 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12051 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12055 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12058 switch (ctx
->opcode
& 0x3) {
12060 mips32_op
= OPC_ADDU
;
12063 mips32_op
= OPC_SUBU
;
12065 #if defined(TARGET_MIPS64)
12067 mips32_op
= OPC_DADDU
;
12068 check_insn(ctx
, ISA_MIPS3
);
12069 check_mips_64(ctx
);
12072 mips32_op
= OPC_DSUBU
;
12073 check_insn(ctx
, ISA_MIPS3
);
12074 check_mips_64(ctx
);
12078 generate_exception_end(ctx
, EXCP_RI
);
12082 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12091 int nd
= (ctx
->opcode
>> 7) & 0x1;
12092 int link
= (ctx
->opcode
>> 6) & 0x1;
12093 int ra
= (ctx
->opcode
>> 5) & 0x1;
12096 check_insn(ctx
, ISA_MIPS32
);
12105 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12110 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12111 gen_helper_do_semihosting(cpu_env
);
12113 /* XXX: not clear which exception should be raised
12114 * when in debug mode...
12116 check_insn(ctx
, ISA_MIPS32
);
12117 generate_exception_end(ctx
, EXCP_DBp
);
12121 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12124 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12127 generate_exception_end(ctx
, EXCP_BREAK
);
12130 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12133 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12136 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12138 #if defined (TARGET_MIPS64)
12140 check_insn(ctx
, ISA_MIPS3
);
12141 check_mips_64(ctx
);
12142 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12146 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12149 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12152 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12155 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12158 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12161 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12164 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12167 check_insn(ctx
, ISA_MIPS32
);
12169 case RR_RY_CNVT_ZEB
:
12170 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12172 case RR_RY_CNVT_ZEH
:
12173 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12175 case RR_RY_CNVT_SEB
:
12176 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12178 case RR_RY_CNVT_SEH
:
12179 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12181 #if defined (TARGET_MIPS64)
12182 case RR_RY_CNVT_ZEW
:
12183 check_insn(ctx
, ISA_MIPS64
);
12184 check_mips_64(ctx
);
12185 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12187 case RR_RY_CNVT_SEW
:
12188 check_insn(ctx
, ISA_MIPS64
);
12189 check_mips_64(ctx
);
12190 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12194 generate_exception_end(ctx
, EXCP_RI
);
12199 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12201 #if defined (TARGET_MIPS64)
12203 check_insn(ctx
, ISA_MIPS3
);
12204 check_mips_64(ctx
);
12205 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12208 check_insn(ctx
, ISA_MIPS3
);
12209 check_mips_64(ctx
);
12210 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12213 check_insn(ctx
, ISA_MIPS3
);
12214 check_mips_64(ctx
);
12215 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12218 check_insn(ctx
, ISA_MIPS3
);
12219 check_mips_64(ctx
);
12220 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12224 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12227 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12230 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12233 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12235 #if defined (TARGET_MIPS64)
12237 check_insn(ctx
, ISA_MIPS3
);
12238 check_mips_64(ctx
);
12239 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12242 check_insn(ctx
, ISA_MIPS3
);
12243 check_mips_64(ctx
);
12244 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12247 check_insn(ctx
, ISA_MIPS3
);
12248 check_mips_64(ctx
);
12249 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12252 check_insn(ctx
, ISA_MIPS3
);
12253 check_mips_64(ctx
);
12254 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12258 generate_exception_end(ctx
, EXCP_RI
);
12262 case M16_OPC_EXTEND
:
12263 decode_extended_mips16_opc(env
, ctx
);
12266 #if defined(TARGET_MIPS64)
12268 funct
= (ctx
->opcode
>> 8) & 0x7;
12269 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12273 generate_exception_end(ctx
, EXCP_RI
);
12280 /* microMIPS extension to MIPS32/MIPS64 */
12283 * microMIPS32/microMIPS64 major opcodes
12285 * 1. MIPS Architecture for Programmers Volume II-B:
12286 * The microMIPS32 Instruction Set (Revision 3.05)
12288 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12290 * 2. MIPS Architecture For Programmers Volume II-A:
12291 * The MIPS64 Instruction Set (Revision 3.51)
12321 POOL32S
= 0x16, /* MIPS64 */
12322 DADDIU32
= 0x17, /* MIPS64 */
12351 /* 0x29 is reserved */
12364 /* 0x31 is reserved */
12377 SD32
= 0x36, /* MIPS64 */
12378 LD32
= 0x37, /* MIPS64 */
12380 /* 0x39 is reserved */
12396 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12406 /* POOL32A encoding of minor opcode field */
12409 /* These opcodes are distinguished only by bits 9..6; those bits are
12410 * what are recorded below. */
12447 /* The following can be distinguished by their lower 6 bits. */
12457 /* POOL32AXF encoding of minor opcode field extension */
12460 * 1. MIPS Architecture for Programmers Volume II-B:
12461 * The microMIPS32 Instruction Set (Revision 3.05)
12463 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12465 * 2. MIPS Architecture for Programmers VolumeIV-e:
12466 * The MIPS DSP Application-Specific Extension
12467 * to the microMIPS32 Architecture (Revision 2.34)
12469 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12484 /* begin of microMIPS32 DSP */
12486 /* bits 13..12 for 0x01 */
12492 /* bits 13..12 for 0x2a */
12498 /* bits 13..12 for 0x32 */
12502 /* end of microMIPS32 DSP */
12504 /* bits 15..12 for 0x2c */
12521 /* bits 15..12 for 0x34 */
12529 /* bits 15..12 for 0x3c */
12531 JR
= 0x0, /* alias */
12539 /* bits 15..12 for 0x05 */
12543 /* bits 15..12 for 0x0d */
12555 /* bits 15..12 for 0x15 */
12561 /* bits 15..12 for 0x1d */
12565 /* bits 15..12 for 0x2d */
12570 /* bits 15..12 for 0x35 */
12577 /* POOL32B encoding of minor opcode field (bits 15..12) */
12593 /* POOL32C encoding of minor opcode field (bits 15..12) */
12614 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12627 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12640 /* POOL32F encoding of minor opcode field (bits 5..0) */
12643 /* These are the bit 7..6 values */
12652 /* These are the bit 8..6 values */
12677 MOVZ_FMT_05
= 0x05,
12711 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12718 /* POOL32Fxf encoding of minor opcode extension field */
12756 /* POOL32I encoding of minor opcode field (bits 25..21) */
12786 /* These overlap and are distinguished by bit16 of the instruction */
12795 /* POOL16A encoding of minor opcode field */
12802 /* POOL16B encoding of minor opcode field */
12809 /* POOL16C encoding of minor opcode field */
12829 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12849 /* POOL16D encoding of minor opcode field */
12856 /* POOL16E encoding of minor opcode field */
12863 static int mmreg (int r
)
12865 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12870 /* Used for 16-bit store instructions. */
12871 static int mmreg2 (int r
)
12873 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12878 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12879 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12880 #define uMIPS_RS2(op) uMIPS_RS(op)
12881 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12882 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12883 #define uMIPS_RS5(op) (op & 0x1f)
12885 /* Signed immediate */
12886 #define SIMM(op, start, width) \
12887 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12890 /* Zero-extended immediate */
12891 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12893 static void gen_addiur1sp(DisasContext
*ctx
)
12895 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12897 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12900 static void gen_addiur2(DisasContext
*ctx
)
12902 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12903 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12904 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12906 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12909 static void gen_addiusp(DisasContext
*ctx
)
12911 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12914 if (encoded
<= 1) {
12915 decoded
= 256 + encoded
;
12916 } else if (encoded
<= 255) {
12918 } else if (encoded
<= 509) {
12919 decoded
= encoded
- 512;
12921 decoded
= encoded
- 768;
12924 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12927 static void gen_addius5(DisasContext
*ctx
)
12929 int imm
= SIMM(ctx
->opcode
, 1, 4);
12930 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12932 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12935 static void gen_andi16(DisasContext
*ctx
)
12937 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12938 31, 32, 63, 64, 255, 32768, 65535 };
12939 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12940 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12941 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12943 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12946 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12947 int base
, int16_t offset
)
12952 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12953 generate_exception_end(ctx
, EXCP_RI
);
12957 t0
= tcg_temp_new();
12959 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12961 t1
= tcg_const_tl(reglist
);
12962 t2
= tcg_const_i32(ctx
->mem_idx
);
12964 save_cpu_state(ctx
, 1);
12967 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12970 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12972 #ifdef TARGET_MIPS64
12974 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12977 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12983 tcg_temp_free_i32(t2
);
12987 static void gen_pool16c_insn(DisasContext
*ctx
)
12989 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12990 int rs
= mmreg(ctx
->opcode
& 0x7);
12992 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12997 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13003 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13009 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13015 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13022 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13023 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13025 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13034 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13035 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13037 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13044 int reg
= ctx
->opcode
& 0x1f;
13046 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13052 int reg
= ctx
->opcode
& 0x1f;
13053 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13054 /* Let normal delay slot handling in our caller take us
13055 to the branch target. */
13060 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13061 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13065 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13066 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13070 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13074 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13077 generate_exception_end(ctx
, EXCP_BREAK
);
13080 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13081 gen_helper_do_semihosting(cpu_env
);
13083 /* XXX: not clear which exception should be raised
13084 * when in debug mode...
13086 check_insn(ctx
, ISA_MIPS32
);
13087 generate_exception_end(ctx
, EXCP_DBp
);
13090 case JRADDIUSP
+ 0:
13091 case JRADDIUSP
+ 1:
13093 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13094 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13095 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13096 /* Let normal delay slot handling in our caller take us
13097 to the branch target. */
13101 generate_exception_end(ctx
, EXCP_RI
);
13106 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13109 int rd
, rs
, re
, rt
;
13110 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13111 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13112 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13113 rd
= rd_enc
[enc_dest
];
13114 re
= re_enc
[enc_dest
];
13115 rs
= rs_rt_enc
[enc_rs
];
13116 rt
= rs_rt_enc
[enc_rt
];
13118 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13120 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13123 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13125 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13129 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13131 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13132 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13134 switch (ctx
->opcode
& 0xf) {
13136 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13139 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13143 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13144 int offset
= extract32(ctx
->opcode
, 4, 4);
13145 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13148 case R6_JRC16
: /* JRCADDIUSP */
13149 if ((ctx
->opcode
>> 4) & 1) {
13151 int imm
= extract32(ctx
->opcode
, 5, 5);
13152 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13153 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13156 int rs
= extract32(ctx
->opcode
, 5, 5);
13157 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13160 case MOVEP
... MOVEP_07
:
13161 case MOVEP_0C
... MOVEP_0F
:
13163 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13164 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13165 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13166 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13170 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13173 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13177 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13178 int offset
= extract32(ctx
->opcode
, 4, 4);
13179 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13182 case JALRC16
: /* BREAK16, SDBBP16 */
13183 switch (ctx
->opcode
& 0x3f) {
13185 case JALRC16
+ 0x20:
13187 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13192 generate_exception(ctx
, EXCP_BREAK
);
13196 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13197 gen_helper_do_semihosting(cpu_env
);
13199 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13200 generate_exception(ctx
, EXCP_RI
);
13202 generate_exception(ctx
, EXCP_DBp
);
13209 generate_exception(ctx
, EXCP_RI
);
13214 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13216 TCGv t0
= tcg_temp_new();
13217 TCGv t1
= tcg_temp_new();
13219 gen_load_gpr(t0
, base
);
13222 gen_load_gpr(t1
, index
);
13223 tcg_gen_shli_tl(t1
, t1
, 2);
13224 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13227 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13228 gen_store_gpr(t1
, rd
);
13234 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13235 int base
, int16_t offset
)
13239 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13240 generate_exception_end(ctx
, EXCP_RI
);
13244 t0
= tcg_temp_new();
13245 t1
= tcg_temp_new();
13247 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13252 generate_exception_end(ctx
, EXCP_RI
);
13255 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13256 gen_store_gpr(t1
, rd
);
13257 tcg_gen_movi_tl(t1
, 4);
13258 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13259 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13260 gen_store_gpr(t1
, rd
+1);
13263 gen_load_gpr(t1
, rd
);
13264 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13265 tcg_gen_movi_tl(t1
, 4);
13266 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13267 gen_load_gpr(t1
, rd
+1);
13268 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13270 #ifdef TARGET_MIPS64
13273 generate_exception_end(ctx
, EXCP_RI
);
13276 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13277 gen_store_gpr(t1
, rd
);
13278 tcg_gen_movi_tl(t1
, 8);
13279 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13280 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13281 gen_store_gpr(t1
, rd
+1);
13284 gen_load_gpr(t1
, rd
);
13285 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13286 tcg_gen_movi_tl(t1
, 8);
13287 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13288 gen_load_gpr(t1
, rd
+1);
13289 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13297 static void gen_sync(int stype
)
13299 TCGBar tcg_mo
= TCG_BAR_SC
;
13302 case 0x4: /* SYNC_WMB */
13303 tcg_mo
|= TCG_MO_ST_ST
;
13305 case 0x10: /* SYNC_MB */
13306 tcg_mo
|= TCG_MO_ALL
;
13308 case 0x11: /* SYNC_ACQUIRE */
13309 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13311 case 0x12: /* SYNC_RELEASE */
13312 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13314 case 0x13: /* SYNC_RMB */
13315 tcg_mo
|= TCG_MO_LD_LD
;
13318 tcg_mo
|= TCG_MO_ALL
;
13322 tcg_gen_mb(tcg_mo
);
13325 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13327 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13328 int minor
= (ctx
->opcode
>> 12) & 0xf;
13329 uint32_t mips32_op
;
13331 switch (extension
) {
13333 mips32_op
= OPC_TEQ
;
13336 mips32_op
= OPC_TGE
;
13339 mips32_op
= OPC_TGEU
;
13342 mips32_op
= OPC_TLT
;
13345 mips32_op
= OPC_TLTU
;
13348 mips32_op
= OPC_TNE
;
13350 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13352 #ifndef CONFIG_USER_ONLY
13355 check_cp0_enabled(ctx
);
13357 /* Treat as NOP. */
13360 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13364 check_cp0_enabled(ctx
);
13366 TCGv t0
= tcg_temp_new();
13368 gen_load_gpr(t0
, rt
);
13369 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13375 switch (minor
& 3) {
13377 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13380 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13383 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13386 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13389 goto pool32axf_invalid
;
13393 switch (minor
& 3) {
13395 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13398 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13401 goto pool32axf_invalid
;
13407 check_insn(ctx
, ISA_MIPS32R6
);
13408 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13411 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13414 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13417 mips32_op
= OPC_CLO
;
13420 mips32_op
= OPC_CLZ
;
13422 check_insn(ctx
, ISA_MIPS32
);
13423 gen_cl(ctx
, mips32_op
, rt
, rs
);
13426 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13427 gen_rdhwr(ctx
, rt
, rs
, 0);
13430 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13433 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13434 mips32_op
= OPC_MULT
;
13437 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13438 mips32_op
= OPC_MULTU
;
13441 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13442 mips32_op
= OPC_DIV
;
13445 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13446 mips32_op
= OPC_DIVU
;
13449 check_insn(ctx
, ISA_MIPS32
);
13450 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13453 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13454 mips32_op
= OPC_MADD
;
13457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13458 mips32_op
= OPC_MADDU
;
13461 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13462 mips32_op
= OPC_MSUB
;
13465 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13466 mips32_op
= OPC_MSUBU
;
13468 check_insn(ctx
, ISA_MIPS32
);
13469 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13472 goto pool32axf_invalid
;
13483 generate_exception_err(ctx
, EXCP_CpU
, 2);
13486 goto pool32axf_invalid
;
13491 case JALR
: /* JALRC */
13492 case JALR_HB
: /* JALRC_HB */
13493 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13494 /* JALRC, JALRC_HB */
13495 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13497 /* JALR, JALR_HB */
13498 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13499 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13504 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13505 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13506 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13509 goto pool32axf_invalid
;
13515 check_cp0_enabled(ctx
);
13516 check_insn(ctx
, ISA_MIPS32R2
);
13517 gen_load_srsgpr(rs
, rt
);
13520 check_cp0_enabled(ctx
);
13521 check_insn(ctx
, ISA_MIPS32R2
);
13522 gen_store_srsgpr(rs
, rt
);
13525 goto pool32axf_invalid
;
13528 #ifndef CONFIG_USER_ONLY
13532 mips32_op
= OPC_TLBP
;
13535 mips32_op
= OPC_TLBR
;
13538 mips32_op
= OPC_TLBWI
;
13541 mips32_op
= OPC_TLBWR
;
13544 mips32_op
= OPC_TLBINV
;
13547 mips32_op
= OPC_TLBINVF
;
13550 mips32_op
= OPC_WAIT
;
13553 mips32_op
= OPC_DERET
;
13556 mips32_op
= OPC_ERET
;
13558 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13561 goto pool32axf_invalid
;
13567 check_cp0_enabled(ctx
);
13569 TCGv t0
= tcg_temp_new();
13571 save_cpu_state(ctx
, 1);
13572 gen_helper_di(t0
, cpu_env
);
13573 gen_store_gpr(t0
, rs
);
13574 /* Stop translation as we may have switched the execution mode */
13575 ctx
->base
.is_jmp
= DISAS_STOP
;
13580 check_cp0_enabled(ctx
);
13582 TCGv t0
= tcg_temp_new();
13584 save_cpu_state(ctx
, 1);
13585 gen_helper_ei(t0
, cpu_env
);
13586 gen_store_gpr(t0
, rs
);
13587 /* DISAS_STOP isn't sufficient, we need to ensure we break out
13588 of translated code to check for pending interrupts. */
13589 gen_save_pc(ctx
->base
.pc_next
+ 4);
13590 ctx
->base
.is_jmp
= DISAS_EXIT
;
13595 goto pool32axf_invalid
;
13602 gen_sync(extract32(ctx
->opcode
, 16, 5));
13605 generate_exception_end(ctx
, EXCP_SYSCALL
);
13608 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13609 gen_helper_do_semihosting(cpu_env
);
13611 check_insn(ctx
, ISA_MIPS32
);
13612 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13613 generate_exception_end(ctx
, EXCP_RI
);
13615 generate_exception_end(ctx
, EXCP_DBp
);
13620 goto pool32axf_invalid
;
13624 switch (minor
& 3) {
13626 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13629 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13632 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13635 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13638 goto pool32axf_invalid
;
13642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13645 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13648 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13651 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13654 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13657 goto pool32axf_invalid
;
13662 MIPS_INVAL("pool32axf");
13663 generate_exception_end(ctx
, EXCP_RI
);
13668 /* Values for microMIPS fmt field. Variable-width, depending on which
13669 formats the instruction supports. */
13688 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13690 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13691 uint32_t mips32_op
;
13693 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13694 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13695 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13697 switch (extension
) {
13698 case FLOAT_1BIT_FMT(CFC1
, 0):
13699 mips32_op
= OPC_CFC1
;
13701 case FLOAT_1BIT_FMT(CTC1
, 0):
13702 mips32_op
= OPC_CTC1
;
13704 case FLOAT_1BIT_FMT(MFC1
, 0):
13705 mips32_op
= OPC_MFC1
;
13707 case FLOAT_1BIT_FMT(MTC1
, 0):
13708 mips32_op
= OPC_MTC1
;
13710 case FLOAT_1BIT_FMT(MFHC1
, 0):
13711 mips32_op
= OPC_MFHC1
;
13713 case FLOAT_1BIT_FMT(MTHC1
, 0):
13714 mips32_op
= OPC_MTHC1
;
13716 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13719 /* Reciprocal square root */
13720 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13721 mips32_op
= OPC_RSQRT_S
;
13723 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13724 mips32_op
= OPC_RSQRT_D
;
13728 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13729 mips32_op
= OPC_SQRT_S
;
13731 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13732 mips32_op
= OPC_SQRT_D
;
13736 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13737 mips32_op
= OPC_RECIP_S
;
13739 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13740 mips32_op
= OPC_RECIP_D
;
13744 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13745 mips32_op
= OPC_FLOOR_L_S
;
13747 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13748 mips32_op
= OPC_FLOOR_L_D
;
13750 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13751 mips32_op
= OPC_FLOOR_W_S
;
13753 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13754 mips32_op
= OPC_FLOOR_W_D
;
13758 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13759 mips32_op
= OPC_CEIL_L_S
;
13761 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13762 mips32_op
= OPC_CEIL_L_D
;
13764 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13765 mips32_op
= OPC_CEIL_W_S
;
13767 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13768 mips32_op
= OPC_CEIL_W_D
;
13772 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13773 mips32_op
= OPC_TRUNC_L_S
;
13775 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13776 mips32_op
= OPC_TRUNC_L_D
;
13778 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13779 mips32_op
= OPC_TRUNC_W_S
;
13781 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13782 mips32_op
= OPC_TRUNC_W_D
;
13786 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13787 mips32_op
= OPC_ROUND_L_S
;
13789 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13790 mips32_op
= OPC_ROUND_L_D
;
13792 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13793 mips32_op
= OPC_ROUND_W_S
;
13795 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13796 mips32_op
= OPC_ROUND_W_D
;
13799 /* Integer to floating-point conversion */
13800 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13801 mips32_op
= OPC_CVT_L_S
;
13803 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13804 mips32_op
= OPC_CVT_L_D
;
13806 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13807 mips32_op
= OPC_CVT_W_S
;
13809 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13810 mips32_op
= OPC_CVT_W_D
;
13813 /* Paired-foo conversions */
13814 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13815 mips32_op
= OPC_CVT_S_PL
;
13817 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13818 mips32_op
= OPC_CVT_S_PU
;
13820 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13821 mips32_op
= OPC_CVT_PW_PS
;
13823 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13824 mips32_op
= OPC_CVT_PS_PW
;
13827 /* Floating-point moves */
13828 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13829 mips32_op
= OPC_MOV_S
;
13831 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13832 mips32_op
= OPC_MOV_D
;
13834 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13835 mips32_op
= OPC_MOV_PS
;
13838 /* Absolute value */
13839 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13840 mips32_op
= OPC_ABS_S
;
13842 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13843 mips32_op
= OPC_ABS_D
;
13845 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13846 mips32_op
= OPC_ABS_PS
;
13850 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13851 mips32_op
= OPC_NEG_S
;
13853 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13854 mips32_op
= OPC_NEG_D
;
13856 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13857 mips32_op
= OPC_NEG_PS
;
13860 /* Reciprocal square root step */
13861 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13862 mips32_op
= OPC_RSQRT1_S
;
13864 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13865 mips32_op
= OPC_RSQRT1_D
;
13867 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13868 mips32_op
= OPC_RSQRT1_PS
;
13871 /* Reciprocal step */
13872 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13873 mips32_op
= OPC_RECIP1_S
;
13875 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13876 mips32_op
= OPC_RECIP1_S
;
13878 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13879 mips32_op
= OPC_RECIP1_PS
;
13882 /* Conversions from double */
13883 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13884 mips32_op
= OPC_CVT_D_S
;
13886 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13887 mips32_op
= OPC_CVT_D_W
;
13889 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13890 mips32_op
= OPC_CVT_D_L
;
13893 /* Conversions from single */
13894 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13895 mips32_op
= OPC_CVT_S_D
;
13897 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13898 mips32_op
= OPC_CVT_S_W
;
13900 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13901 mips32_op
= OPC_CVT_S_L
;
13903 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13906 /* Conditional moves on floating-point codes */
13907 case COND_FLOAT_MOV(MOVT
, 0):
13908 case COND_FLOAT_MOV(MOVT
, 1):
13909 case COND_FLOAT_MOV(MOVT
, 2):
13910 case COND_FLOAT_MOV(MOVT
, 3):
13911 case COND_FLOAT_MOV(MOVT
, 4):
13912 case COND_FLOAT_MOV(MOVT
, 5):
13913 case COND_FLOAT_MOV(MOVT
, 6):
13914 case COND_FLOAT_MOV(MOVT
, 7):
13915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13916 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13918 case COND_FLOAT_MOV(MOVF
, 0):
13919 case COND_FLOAT_MOV(MOVF
, 1):
13920 case COND_FLOAT_MOV(MOVF
, 2):
13921 case COND_FLOAT_MOV(MOVF
, 3):
13922 case COND_FLOAT_MOV(MOVF
, 4):
13923 case COND_FLOAT_MOV(MOVF
, 5):
13924 case COND_FLOAT_MOV(MOVF
, 6):
13925 case COND_FLOAT_MOV(MOVF
, 7):
13926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13927 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13930 MIPS_INVAL("pool32fxf");
13931 generate_exception_end(ctx
, EXCP_RI
);
13936 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13940 int rt
, rs
, rd
, rr
;
13942 uint32_t op
, minor
, minor2
, mips32_op
;
13943 uint32_t cond
, fmt
, cc
;
13945 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13946 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13948 rt
= (ctx
->opcode
>> 21) & 0x1f;
13949 rs
= (ctx
->opcode
>> 16) & 0x1f;
13950 rd
= (ctx
->opcode
>> 11) & 0x1f;
13951 rr
= (ctx
->opcode
>> 6) & 0x1f;
13952 imm
= (int16_t) ctx
->opcode
;
13954 op
= (ctx
->opcode
>> 26) & 0x3f;
13957 minor
= ctx
->opcode
& 0x3f;
13960 minor
= (ctx
->opcode
>> 6) & 0xf;
13963 mips32_op
= OPC_SLL
;
13966 mips32_op
= OPC_SRA
;
13969 mips32_op
= OPC_SRL
;
13972 mips32_op
= OPC_ROTR
;
13974 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13977 check_insn(ctx
, ISA_MIPS32R6
);
13978 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13981 check_insn(ctx
, ISA_MIPS32R6
);
13982 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13985 check_insn(ctx
, ISA_MIPS32R6
);
13986 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13989 goto pool32a_invalid
;
13993 minor
= (ctx
->opcode
>> 6) & 0xf;
13997 mips32_op
= OPC_ADD
;
14000 mips32_op
= OPC_ADDU
;
14003 mips32_op
= OPC_SUB
;
14006 mips32_op
= OPC_SUBU
;
14009 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14010 mips32_op
= OPC_MUL
;
14012 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14016 mips32_op
= OPC_SLLV
;
14019 mips32_op
= OPC_SRLV
;
14022 mips32_op
= OPC_SRAV
;
14025 mips32_op
= OPC_ROTRV
;
14027 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14029 /* Logical operations */
14031 mips32_op
= OPC_AND
;
14034 mips32_op
= OPC_OR
;
14037 mips32_op
= OPC_NOR
;
14040 mips32_op
= OPC_XOR
;
14042 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14044 /* Set less than */
14046 mips32_op
= OPC_SLT
;
14049 mips32_op
= OPC_SLTU
;
14051 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14054 goto pool32a_invalid
;
14058 minor
= (ctx
->opcode
>> 6) & 0xf;
14060 /* Conditional moves */
14061 case MOVN
: /* MUL */
14062 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14064 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14067 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14070 case MOVZ
: /* MUH */
14071 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14073 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14076 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14080 check_insn(ctx
, ISA_MIPS32R6
);
14081 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14084 check_insn(ctx
, ISA_MIPS32R6
);
14085 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14087 case LWXS
: /* DIV */
14088 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14090 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14093 gen_ldxs(ctx
, rs
, rt
, rd
);
14097 check_insn(ctx
, ISA_MIPS32R6
);
14098 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14101 check_insn(ctx
, ISA_MIPS32R6
);
14102 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14105 check_insn(ctx
, ISA_MIPS32R6
);
14106 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14109 goto pool32a_invalid
;
14113 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14116 check_insn(ctx
, ISA_MIPS32R6
);
14117 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14118 extract32(ctx
->opcode
, 9, 2));
14121 check_insn(ctx
, ISA_MIPS32R6
);
14122 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14123 extract32(ctx
->opcode
, 9, 2));
14126 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14129 gen_pool32axf(env
, ctx
, rt
, rs
);
14132 generate_exception_end(ctx
, EXCP_BREAK
);
14135 check_insn(ctx
, ISA_MIPS32R6
);
14136 generate_exception_end(ctx
, EXCP_RI
);
14140 MIPS_INVAL("pool32a");
14141 generate_exception_end(ctx
, EXCP_RI
);
14146 minor
= (ctx
->opcode
>> 12) & 0xf;
14149 check_cp0_enabled(ctx
);
14150 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14151 gen_cache_operation(ctx
, rt
, rs
, imm
);
14156 /* COP2: Not implemented. */
14157 generate_exception_err(ctx
, EXCP_CpU
, 2);
14159 #ifdef TARGET_MIPS64
14162 check_insn(ctx
, ISA_MIPS3
);
14163 check_mips_64(ctx
);
14168 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14170 #ifdef TARGET_MIPS64
14173 check_insn(ctx
, ISA_MIPS3
);
14174 check_mips_64(ctx
);
14179 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14182 MIPS_INVAL("pool32b");
14183 generate_exception_end(ctx
, EXCP_RI
);
14188 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14189 minor
= ctx
->opcode
& 0x3f;
14190 check_cp1_enabled(ctx
);
14193 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14194 mips32_op
= OPC_ALNV_PS
;
14197 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14198 mips32_op
= OPC_MADD_S
;
14201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14202 mips32_op
= OPC_MADD_D
;
14205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14206 mips32_op
= OPC_MADD_PS
;
14209 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14210 mips32_op
= OPC_MSUB_S
;
14213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14214 mips32_op
= OPC_MSUB_D
;
14217 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14218 mips32_op
= OPC_MSUB_PS
;
14221 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14222 mips32_op
= OPC_NMADD_S
;
14225 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14226 mips32_op
= OPC_NMADD_D
;
14229 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14230 mips32_op
= OPC_NMADD_PS
;
14233 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14234 mips32_op
= OPC_NMSUB_S
;
14237 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14238 mips32_op
= OPC_NMSUB_D
;
14241 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14242 mips32_op
= OPC_NMSUB_PS
;
14244 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14246 case CABS_COND_FMT
:
14247 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14248 cond
= (ctx
->opcode
>> 6) & 0xf;
14249 cc
= (ctx
->opcode
>> 13) & 0x7;
14250 fmt
= (ctx
->opcode
>> 10) & 0x3;
14253 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14256 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14259 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14262 goto pool32f_invalid
;
14266 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14267 cond
= (ctx
->opcode
>> 6) & 0xf;
14268 cc
= (ctx
->opcode
>> 13) & 0x7;
14269 fmt
= (ctx
->opcode
>> 10) & 0x3;
14272 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14275 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14278 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14281 goto pool32f_invalid
;
14285 check_insn(ctx
, ISA_MIPS32R6
);
14286 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14289 check_insn(ctx
, ISA_MIPS32R6
);
14290 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14293 gen_pool32fxf(ctx
, rt
, rs
);
14297 switch ((ctx
->opcode
>> 6) & 0x7) {
14299 mips32_op
= OPC_PLL_PS
;
14302 mips32_op
= OPC_PLU_PS
;
14305 mips32_op
= OPC_PUL_PS
;
14308 mips32_op
= OPC_PUU_PS
;
14311 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14312 mips32_op
= OPC_CVT_PS_S
;
14314 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14317 goto pool32f_invalid
;
14321 check_insn(ctx
, ISA_MIPS32R6
);
14322 switch ((ctx
->opcode
>> 9) & 0x3) {
14324 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14327 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14330 goto pool32f_invalid
;
14335 switch ((ctx
->opcode
>> 6) & 0x7) {
14337 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14338 mips32_op
= OPC_LWXC1
;
14341 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14342 mips32_op
= OPC_SWXC1
;
14345 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14346 mips32_op
= OPC_LDXC1
;
14349 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14350 mips32_op
= OPC_SDXC1
;
14353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14354 mips32_op
= OPC_LUXC1
;
14357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14358 mips32_op
= OPC_SUXC1
;
14360 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14363 goto pool32f_invalid
;
14367 check_insn(ctx
, ISA_MIPS32R6
);
14368 switch ((ctx
->opcode
>> 9) & 0x3) {
14370 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14373 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14376 goto pool32f_invalid
;
14381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14382 fmt
= (ctx
->opcode
>> 9) & 0x3;
14383 switch ((ctx
->opcode
>> 6) & 0x7) {
14387 mips32_op
= OPC_RSQRT2_S
;
14390 mips32_op
= OPC_RSQRT2_D
;
14393 mips32_op
= OPC_RSQRT2_PS
;
14396 goto pool32f_invalid
;
14402 mips32_op
= OPC_RECIP2_S
;
14405 mips32_op
= OPC_RECIP2_D
;
14408 mips32_op
= OPC_RECIP2_PS
;
14411 goto pool32f_invalid
;
14415 mips32_op
= OPC_ADDR_PS
;
14418 mips32_op
= OPC_MULR_PS
;
14420 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14423 goto pool32f_invalid
;
14427 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14428 cc
= (ctx
->opcode
>> 13) & 0x7;
14429 fmt
= (ctx
->opcode
>> 9) & 0x3;
14430 switch ((ctx
->opcode
>> 6) & 0x7) {
14431 case MOVF_FMT
: /* RINT_FMT */
14432 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14436 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14439 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14442 goto pool32f_invalid
;
14448 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14451 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14455 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14458 goto pool32f_invalid
;
14462 case MOVT_FMT
: /* CLASS_FMT */
14463 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14467 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14470 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14473 goto pool32f_invalid
;
14479 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14482 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14486 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14489 goto pool32f_invalid
;
14494 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14497 goto pool32f_invalid
;
14500 #define FINSN_3ARG_SDPS(prfx) \
14501 switch ((ctx->opcode >> 8) & 0x3) { \
14503 mips32_op = OPC_##prfx##_S; \
14506 mips32_op = OPC_##prfx##_D; \
14508 case FMT_SDPS_PS: \
14510 mips32_op = OPC_##prfx##_PS; \
14513 goto pool32f_invalid; \
14516 check_insn(ctx
, ISA_MIPS32R6
);
14517 switch ((ctx
->opcode
>> 9) & 0x3) {
14519 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14522 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14525 goto pool32f_invalid
;
14529 check_insn(ctx
, ISA_MIPS32R6
);
14530 switch ((ctx
->opcode
>> 9) & 0x3) {
14532 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14535 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14538 goto pool32f_invalid
;
14542 /* regular FP ops */
14543 switch ((ctx
->opcode
>> 6) & 0x3) {
14545 FINSN_3ARG_SDPS(ADD
);
14548 FINSN_3ARG_SDPS(SUB
);
14551 FINSN_3ARG_SDPS(MUL
);
14554 fmt
= (ctx
->opcode
>> 8) & 0x3;
14556 mips32_op
= OPC_DIV_D
;
14557 } else if (fmt
== 0) {
14558 mips32_op
= OPC_DIV_S
;
14560 goto pool32f_invalid
;
14564 goto pool32f_invalid
;
14569 switch ((ctx
->opcode
>> 6) & 0x7) {
14570 case MOVN_FMT
: /* SELNEZ_FMT */
14571 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14573 switch ((ctx
->opcode
>> 9) & 0x3) {
14575 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14578 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14581 goto pool32f_invalid
;
14585 FINSN_3ARG_SDPS(MOVN
);
14589 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14590 FINSN_3ARG_SDPS(MOVN
);
14592 case MOVZ_FMT
: /* SELEQZ_FMT */
14593 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14595 switch ((ctx
->opcode
>> 9) & 0x3) {
14597 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14600 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14603 goto pool32f_invalid
;
14607 FINSN_3ARG_SDPS(MOVZ
);
14611 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14612 FINSN_3ARG_SDPS(MOVZ
);
14615 check_insn(ctx
, ISA_MIPS32R6
);
14616 switch ((ctx
->opcode
>> 9) & 0x3) {
14618 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14621 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14624 goto pool32f_invalid
;
14628 check_insn(ctx
, ISA_MIPS32R6
);
14629 switch ((ctx
->opcode
>> 9) & 0x3) {
14631 mips32_op
= OPC_MADDF_S
;
14634 mips32_op
= OPC_MADDF_D
;
14637 goto pool32f_invalid
;
14641 check_insn(ctx
, ISA_MIPS32R6
);
14642 switch ((ctx
->opcode
>> 9) & 0x3) {
14644 mips32_op
= OPC_MSUBF_S
;
14647 mips32_op
= OPC_MSUBF_D
;
14650 goto pool32f_invalid
;
14654 goto pool32f_invalid
;
14658 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14662 MIPS_INVAL("pool32f");
14663 generate_exception_end(ctx
, EXCP_RI
);
14667 generate_exception_err(ctx
, EXCP_CpU
, 1);
14671 minor
= (ctx
->opcode
>> 21) & 0x1f;
14674 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14675 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14678 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14679 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14680 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14684 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14685 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14688 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14689 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14692 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14693 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14694 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14697 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14698 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14699 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14702 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14703 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14706 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14707 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14711 case TLTI
: /* BC1EQZC */
14712 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14714 check_cp1_enabled(ctx
);
14715 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14718 mips32_op
= OPC_TLTI
;
14722 case TGEI
: /* BC1NEZC */
14723 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14725 check_cp1_enabled(ctx
);
14726 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14729 mips32_op
= OPC_TGEI
;
14734 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14735 mips32_op
= OPC_TLTIU
;
14738 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14739 mips32_op
= OPC_TGEIU
;
14741 case TNEI
: /* SYNCI */
14742 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14744 /* Break the TB to be able to sync copied instructions
14746 ctx
->base
.is_jmp
= DISAS_STOP
;
14749 mips32_op
= OPC_TNEI
;
14754 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14755 mips32_op
= OPC_TEQI
;
14757 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14762 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14763 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14764 4, rs
, 0, imm
<< 1, 0);
14765 /* Compact branches don't have a delay slot, so just let
14766 the normal delay slot handling take us to the branch
14770 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14771 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14774 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14775 /* Break the TB to be able to sync copied instructions
14777 ctx
->base
.is_jmp
= DISAS_STOP
;
14781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14782 /* COP2: Not implemented. */
14783 generate_exception_err(ctx
, EXCP_CpU
, 2);
14786 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14787 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14790 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14791 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14794 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14795 mips32_op
= OPC_BC1FANY4
;
14798 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14799 mips32_op
= OPC_BC1TANY4
;
14802 check_insn(ctx
, ASE_MIPS3D
);
14805 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14806 check_cp1_enabled(ctx
);
14807 gen_compute_branch1(ctx
, mips32_op
,
14808 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14810 generate_exception_err(ctx
, EXCP_CpU
, 1);
14815 /* MIPS DSP: not implemented */
14818 MIPS_INVAL("pool32i");
14819 generate_exception_end(ctx
, EXCP_RI
);
14824 minor
= (ctx
->opcode
>> 12) & 0xf;
14825 offset
= sextract32(ctx
->opcode
, 0,
14826 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14830 mips32_op
= OPC_LWL
;
14833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14834 mips32_op
= OPC_SWL
;
14837 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14838 mips32_op
= OPC_LWR
;
14841 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14842 mips32_op
= OPC_SWR
;
14844 #if defined(TARGET_MIPS64)
14846 check_insn(ctx
, ISA_MIPS3
);
14847 check_mips_64(ctx
);
14848 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14849 mips32_op
= OPC_LDL
;
14852 check_insn(ctx
, ISA_MIPS3
);
14853 check_mips_64(ctx
);
14854 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14855 mips32_op
= OPC_SDL
;
14858 check_insn(ctx
, ISA_MIPS3
);
14859 check_mips_64(ctx
);
14860 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14861 mips32_op
= OPC_LDR
;
14864 check_insn(ctx
, ISA_MIPS3
);
14865 check_mips_64(ctx
);
14866 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14867 mips32_op
= OPC_SDR
;
14870 check_insn(ctx
, ISA_MIPS3
);
14871 check_mips_64(ctx
);
14872 mips32_op
= OPC_LWU
;
14875 check_insn(ctx
, ISA_MIPS3
);
14876 check_mips_64(ctx
);
14877 mips32_op
= OPC_LLD
;
14881 mips32_op
= OPC_LL
;
14884 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14887 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
14890 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14892 #if defined(TARGET_MIPS64)
14894 check_insn(ctx
, ISA_MIPS3
);
14895 check_mips_64(ctx
);
14896 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14901 MIPS_INVAL("pool32c ld-eva");
14902 generate_exception_end(ctx
, EXCP_RI
);
14905 check_cp0_enabled(ctx
);
14907 minor2
= (ctx
->opcode
>> 9) & 0x7;
14908 offset
= sextract32(ctx
->opcode
, 0, 9);
14911 mips32_op
= OPC_LBUE
;
14914 mips32_op
= OPC_LHUE
;
14917 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14918 mips32_op
= OPC_LWLE
;
14921 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14922 mips32_op
= OPC_LWRE
;
14925 mips32_op
= OPC_LBE
;
14928 mips32_op
= OPC_LHE
;
14931 mips32_op
= OPC_LLE
;
14934 mips32_op
= OPC_LWE
;
14940 MIPS_INVAL("pool32c st-eva");
14941 generate_exception_end(ctx
, EXCP_RI
);
14944 check_cp0_enabled(ctx
);
14946 minor2
= (ctx
->opcode
>> 9) & 0x7;
14947 offset
= sextract32(ctx
->opcode
, 0, 9);
14950 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14951 mips32_op
= OPC_SWLE
;
14954 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14955 mips32_op
= OPC_SWRE
;
14958 /* Treat as no-op */
14959 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14960 /* hint codes 24-31 are reserved and signal RI */
14961 generate_exception(ctx
, EXCP_RI
);
14965 /* Treat as no-op */
14966 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14967 gen_cache_operation(ctx
, rt
, rs
, offset
);
14971 mips32_op
= OPC_SBE
;
14974 mips32_op
= OPC_SHE
;
14977 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
14980 mips32_op
= OPC_SWE
;
14985 /* Treat as no-op */
14986 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14987 /* hint codes 24-31 are reserved and signal RI */
14988 generate_exception(ctx
, EXCP_RI
);
14992 MIPS_INVAL("pool32c");
14993 generate_exception_end(ctx
, EXCP_RI
);
14997 case ADDI32
: /* AUI, LUI */
14998 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15000 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15003 mips32_op
= OPC_ADDI
;
15008 mips32_op
= OPC_ADDIU
;
15010 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15013 /* Logical operations */
15015 mips32_op
= OPC_ORI
;
15018 mips32_op
= OPC_XORI
;
15021 mips32_op
= OPC_ANDI
;
15023 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15026 /* Set less than immediate */
15028 mips32_op
= OPC_SLTI
;
15031 mips32_op
= OPC_SLTIU
;
15033 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15036 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15037 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15038 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15039 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15041 case JALS32
: /* BOVC, BEQC, BEQZALC */
15042 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15045 mips32_op
= OPC_BOVC
;
15046 } else if (rs
< rt
&& rs
== 0) {
15048 mips32_op
= OPC_BEQZALC
;
15051 mips32_op
= OPC_BEQC
;
15053 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15056 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15057 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15058 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15061 case BEQ32
: /* BC */
15062 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15064 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15065 sextract32(ctx
->opcode
<< 1, 0, 27));
15068 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15071 case BNE32
: /* BALC */
15072 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15074 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15075 sextract32(ctx
->opcode
<< 1, 0, 27));
15078 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15081 case J32
: /* BGTZC, BLTZC, BLTC */
15082 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15083 if (rs
== 0 && rt
!= 0) {
15085 mips32_op
= OPC_BGTZC
;
15086 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15088 mips32_op
= OPC_BLTZC
;
15091 mips32_op
= OPC_BLTC
;
15093 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15096 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15097 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15100 case JAL32
: /* BLEZC, BGEZC, BGEC */
15101 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15102 if (rs
== 0 && rt
!= 0) {
15104 mips32_op
= OPC_BLEZC
;
15105 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15107 mips32_op
= OPC_BGEZC
;
15110 mips32_op
= OPC_BGEC
;
15112 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15115 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15116 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15117 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15120 /* Floating point (COP1) */
15122 mips32_op
= OPC_LWC1
;
15125 mips32_op
= OPC_LDC1
;
15128 mips32_op
= OPC_SWC1
;
15131 mips32_op
= OPC_SDC1
;
15133 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15135 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15136 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15137 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15138 switch ((ctx
->opcode
>> 16) & 0x1f) {
15139 case ADDIUPC_00
... ADDIUPC_07
:
15140 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15143 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
15146 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
15148 case LWPC_08
... LWPC_0F
:
15149 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15152 generate_exception(ctx
, EXCP_RI
);
15157 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15158 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15160 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15163 case BNVC
: /* BNEC, BNEZALC */
15164 check_insn(ctx
, ISA_MIPS32R6
);
15167 mips32_op
= OPC_BNVC
;
15168 } else if (rs
< rt
&& rs
== 0) {
15170 mips32_op
= OPC_BNEZALC
;
15173 mips32_op
= OPC_BNEC
;
15175 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15177 case R6_BNEZC
: /* JIALC */
15178 check_insn(ctx
, ISA_MIPS32R6
);
15181 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15182 sextract32(ctx
->opcode
<< 1, 0, 22));
15185 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15188 case R6_BEQZC
: /* JIC */
15189 check_insn(ctx
, ISA_MIPS32R6
);
15192 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15193 sextract32(ctx
->opcode
<< 1, 0, 22));
15196 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15199 case BLEZALC
: /* BGEZALC, BGEUC */
15200 check_insn(ctx
, ISA_MIPS32R6
);
15201 if (rs
== 0 && rt
!= 0) {
15203 mips32_op
= OPC_BLEZALC
;
15204 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15206 mips32_op
= OPC_BGEZALC
;
15209 mips32_op
= OPC_BGEUC
;
15211 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15213 case BGTZALC
: /* BLTZALC, BLTUC */
15214 check_insn(ctx
, ISA_MIPS32R6
);
15215 if (rs
== 0 && rt
!= 0) {
15217 mips32_op
= OPC_BGTZALC
;
15218 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15220 mips32_op
= OPC_BLTZALC
;
15223 mips32_op
= OPC_BLTUC
;
15225 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15227 /* Loads and stores */
15229 mips32_op
= OPC_LB
;
15232 mips32_op
= OPC_LBU
;
15235 mips32_op
= OPC_LH
;
15238 mips32_op
= OPC_LHU
;
15241 mips32_op
= OPC_LW
;
15243 #ifdef TARGET_MIPS64
15245 check_insn(ctx
, ISA_MIPS3
);
15246 check_mips_64(ctx
);
15247 mips32_op
= OPC_LD
;
15250 check_insn(ctx
, ISA_MIPS3
);
15251 check_mips_64(ctx
);
15252 mips32_op
= OPC_SD
;
15256 mips32_op
= OPC_SB
;
15259 mips32_op
= OPC_SH
;
15262 mips32_op
= OPC_SW
;
15265 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15268 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15271 generate_exception_end(ctx
, EXCP_RI
);
15276 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15280 /* make sure instructions are on a halfword boundary */
15281 if (ctx
->base
.pc_next
& 0x1) {
15282 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15283 generate_exception_end(ctx
, EXCP_AdEL
);
15287 op
= (ctx
->opcode
>> 10) & 0x3f;
15288 /* Enforce properly-sized instructions in a delay slot */
15289 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15290 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15292 /* POOL32A, POOL32B, POOL32I, POOL32C */
15294 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15296 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15298 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15300 /* LB32, LH32, LWC132, LDC132, LW32 */
15301 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15302 generate_exception_end(ctx
, EXCP_RI
);
15307 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15309 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15311 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15312 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15313 generate_exception_end(ctx
, EXCP_RI
);
15323 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15324 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15325 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15328 switch (ctx
->opcode
& 0x1) {
15336 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15337 /* In the Release 6 the register number location in
15338 * the instruction encoding has changed.
15340 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15342 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15348 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15349 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15350 int amount
= (ctx
->opcode
>> 1) & 0x7;
15352 amount
= amount
== 0 ? 8 : amount
;
15354 switch (ctx
->opcode
& 0x1) {
15363 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15367 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15368 gen_pool16c_r6_insn(ctx
);
15370 gen_pool16c_insn(ctx
);
15375 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15376 int rb
= 28; /* GP */
15377 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15379 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15384 if (ctx
->opcode
& 1) {
15385 generate_exception_end(ctx
, EXCP_RI
);
15388 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15389 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15390 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15391 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15396 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15397 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15398 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15399 offset
= (offset
== 0xf ? -1 : offset
);
15401 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15406 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15407 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15408 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15410 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15415 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15416 int rb
= 29; /* SP */
15417 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15419 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15424 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15425 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15426 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15428 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15433 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15434 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15435 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15437 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15442 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15443 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15444 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15446 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15451 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15452 int rb
= 29; /* SP */
15453 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15455 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15460 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15461 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15462 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15464 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15469 int rd
= uMIPS_RD5(ctx
->opcode
);
15470 int rs
= uMIPS_RS5(ctx
->opcode
);
15472 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15479 switch (ctx
->opcode
& 0x1) {
15489 switch (ctx
->opcode
& 0x1) {
15494 gen_addiur1sp(ctx
);
15498 case B16
: /* BC16 */
15499 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15500 sextract32(ctx
->opcode
, 0, 10) << 1,
15501 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15503 case BNEZ16
: /* BNEZC16 */
15504 case BEQZ16
: /* BEQZC16 */
15505 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15506 mmreg(uMIPS_RD(ctx
->opcode
)),
15507 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15508 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15513 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15514 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15516 imm
= (imm
== 0x7f ? -1 : imm
);
15517 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15523 generate_exception_end(ctx
, EXCP_RI
);
15526 decode_micromips32_opc(env
, ctx
);
15533 /* SmartMIPS extension to MIPS32 */
15535 #if defined(TARGET_MIPS64)
15537 /* MDMX extension to MIPS64 */
15541 /* MIPSDSP functions. */
15542 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15543 int rd
, int base
, int offset
)
15548 t0
= tcg_temp_new();
15551 gen_load_gpr(t0
, offset
);
15552 } else if (offset
== 0) {
15553 gen_load_gpr(t0
, base
);
15555 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15560 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15561 gen_store_gpr(t0
, rd
);
15564 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15565 gen_store_gpr(t0
, rd
);
15568 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15569 gen_store_gpr(t0
, rd
);
15571 #if defined(TARGET_MIPS64)
15573 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15574 gen_store_gpr(t0
, rd
);
15581 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15582 int ret
, int v1
, int v2
)
15588 /* Treat as NOP. */
15592 v1_t
= tcg_temp_new();
15593 v2_t
= tcg_temp_new();
15595 gen_load_gpr(v1_t
, v1
);
15596 gen_load_gpr(v2_t
, v2
);
15599 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15600 case OPC_MULT_G_2E
:
15604 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15606 case OPC_ADDUH_R_QB
:
15607 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15610 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15612 case OPC_ADDQH_R_PH
:
15613 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15616 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15618 case OPC_ADDQH_R_W
:
15619 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15622 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15624 case OPC_SUBUH_R_QB
:
15625 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15628 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15630 case OPC_SUBQH_R_PH
:
15631 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15634 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15636 case OPC_SUBQH_R_W
:
15637 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15641 case OPC_ABSQ_S_PH_DSP
:
15643 case OPC_ABSQ_S_QB
:
15645 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15647 case OPC_ABSQ_S_PH
:
15649 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15653 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15655 case OPC_PRECEQ_W_PHL
:
15657 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15658 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15660 case OPC_PRECEQ_W_PHR
:
15662 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15663 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15664 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15666 case OPC_PRECEQU_PH_QBL
:
15668 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15670 case OPC_PRECEQU_PH_QBR
:
15672 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15674 case OPC_PRECEQU_PH_QBLA
:
15676 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15678 case OPC_PRECEQU_PH_QBRA
:
15680 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15682 case OPC_PRECEU_PH_QBL
:
15684 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15686 case OPC_PRECEU_PH_QBR
:
15688 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15690 case OPC_PRECEU_PH_QBLA
:
15692 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15694 case OPC_PRECEU_PH_QBRA
:
15696 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15700 case OPC_ADDU_QB_DSP
:
15704 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15706 case OPC_ADDQ_S_PH
:
15708 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15712 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15716 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15718 case OPC_ADDU_S_QB
:
15720 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15724 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15726 case OPC_ADDU_S_PH
:
15728 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15732 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15734 case OPC_SUBQ_S_PH
:
15736 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15740 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15744 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15746 case OPC_SUBU_S_QB
:
15748 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15752 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15754 case OPC_SUBU_S_PH
:
15756 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15760 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15764 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15768 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15770 case OPC_RADDU_W_QB
:
15772 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15776 case OPC_CMPU_EQ_QB_DSP
:
15778 case OPC_PRECR_QB_PH
:
15780 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15782 case OPC_PRECRQ_QB_PH
:
15784 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15786 case OPC_PRECR_SRA_PH_W
:
15789 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15790 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15792 tcg_temp_free_i32(sa_t
);
15795 case OPC_PRECR_SRA_R_PH_W
:
15798 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15799 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15801 tcg_temp_free_i32(sa_t
);
15804 case OPC_PRECRQ_PH_W
:
15806 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15808 case OPC_PRECRQ_RS_PH_W
:
15810 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15812 case OPC_PRECRQU_S_QB_PH
:
15814 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15818 #ifdef TARGET_MIPS64
15819 case OPC_ABSQ_S_QH_DSP
:
15821 case OPC_PRECEQ_L_PWL
:
15823 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15825 case OPC_PRECEQ_L_PWR
:
15827 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15829 case OPC_PRECEQ_PW_QHL
:
15831 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15833 case OPC_PRECEQ_PW_QHR
:
15835 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15837 case OPC_PRECEQ_PW_QHLA
:
15839 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15841 case OPC_PRECEQ_PW_QHRA
:
15843 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15845 case OPC_PRECEQU_QH_OBL
:
15847 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15849 case OPC_PRECEQU_QH_OBR
:
15851 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15853 case OPC_PRECEQU_QH_OBLA
:
15855 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15857 case OPC_PRECEQU_QH_OBRA
:
15859 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15861 case OPC_PRECEU_QH_OBL
:
15863 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15865 case OPC_PRECEU_QH_OBR
:
15867 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15869 case OPC_PRECEU_QH_OBLA
:
15871 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15873 case OPC_PRECEU_QH_OBRA
:
15875 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15877 case OPC_ABSQ_S_OB
:
15879 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15881 case OPC_ABSQ_S_PW
:
15883 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15885 case OPC_ABSQ_S_QH
:
15887 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15891 case OPC_ADDU_OB_DSP
:
15893 case OPC_RADDU_L_OB
:
15895 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15899 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15901 case OPC_SUBQ_S_PW
:
15903 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15907 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15909 case OPC_SUBQ_S_QH
:
15911 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15915 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15917 case OPC_SUBU_S_OB
:
15919 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15923 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15925 case OPC_SUBU_S_QH
:
15927 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15931 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15933 case OPC_SUBUH_R_OB
:
15935 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15939 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15941 case OPC_ADDQ_S_PW
:
15943 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15947 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15949 case OPC_ADDQ_S_QH
:
15951 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15955 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15957 case OPC_ADDU_S_OB
:
15959 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15963 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15965 case OPC_ADDU_S_QH
:
15967 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15971 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15973 case OPC_ADDUH_R_OB
:
15975 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15979 case OPC_CMPU_EQ_OB_DSP
:
15981 case OPC_PRECR_OB_QH
:
15983 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15985 case OPC_PRECR_SRA_QH_PW
:
15988 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15989 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15990 tcg_temp_free_i32(ret_t
);
15993 case OPC_PRECR_SRA_R_QH_PW
:
15996 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15997 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15998 tcg_temp_free_i32(sa_v
);
16001 case OPC_PRECRQ_OB_QH
:
16003 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16005 case OPC_PRECRQ_PW_L
:
16007 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
16009 case OPC_PRECRQ_QH_PW
:
16011 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16013 case OPC_PRECRQ_RS_QH_PW
:
16015 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16017 case OPC_PRECRQU_S_OB_QH
:
16019 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16026 tcg_temp_free(v1_t
);
16027 tcg_temp_free(v2_t
);
16030 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16031 int ret
, int v1
, int v2
)
16039 /* Treat as NOP. */
16043 t0
= tcg_temp_new();
16044 v1_t
= tcg_temp_new();
16045 v2_t
= tcg_temp_new();
16047 tcg_gen_movi_tl(t0
, v1
);
16048 gen_load_gpr(v1_t
, v1
);
16049 gen_load_gpr(v2_t
, v2
);
16052 case OPC_SHLL_QB_DSP
:
16054 op2
= MASK_SHLL_QB(ctx
->opcode
);
16058 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16062 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16066 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16070 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16072 case OPC_SHLL_S_PH
:
16074 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16076 case OPC_SHLLV_S_PH
:
16078 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16082 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16084 case OPC_SHLLV_S_W
:
16086 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16090 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16094 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16098 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16102 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16106 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16108 case OPC_SHRA_R_QB
:
16110 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16114 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16116 case OPC_SHRAV_R_QB
:
16118 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16122 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16124 case OPC_SHRA_R_PH
:
16126 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16130 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16132 case OPC_SHRAV_R_PH
:
16134 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16138 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16140 case OPC_SHRAV_R_W
:
16142 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16144 default: /* Invalid */
16145 MIPS_INVAL("MASK SHLL.QB");
16146 generate_exception_end(ctx
, EXCP_RI
);
16151 #ifdef TARGET_MIPS64
16152 case OPC_SHLL_OB_DSP
:
16153 op2
= MASK_SHLL_OB(ctx
->opcode
);
16157 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16161 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16163 case OPC_SHLL_S_PW
:
16165 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16167 case OPC_SHLLV_S_PW
:
16169 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16173 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16177 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16181 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16185 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16187 case OPC_SHLL_S_QH
:
16189 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16191 case OPC_SHLLV_S_QH
:
16193 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16197 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16201 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16203 case OPC_SHRA_R_OB
:
16205 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16207 case OPC_SHRAV_R_OB
:
16209 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16213 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16217 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16219 case OPC_SHRA_R_PW
:
16221 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16223 case OPC_SHRAV_R_PW
:
16225 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16229 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16233 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16235 case OPC_SHRA_R_QH
:
16237 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16239 case OPC_SHRAV_R_QH
:
16241 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16245 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16249 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16253 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16257 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16259 default: /* Invalid */
16260 MIPS_INVAL("MASK SHLL.OB");
16261 generate_exception_end(ctx
, EXCP_RI
);
16269 tcg_temp_free(v1_t
);
16270 tcg_temp_free(v2_t
);
16273 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16274 int ret
, int v1
, int v2
, int check_ret
)
16280 if ((ret
== 0) && (check_ret
== 1)) {
16281 /* Treat as NOP. */
16285 t0
= tcg_temp_new_i32();
16286 v1_t
= tcg_temp_new();
16287 v2_t
= tcg_temp_new();
16289 tcg_gen_movi_i32(t0
, ret
);
16290 gen_load_gpr(v1_t
, v1
);
16291 gen_load_gpr(v2_t
, v2
);
16294 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16295 * the same mask and op1. */
16296 case OPC_MULT_G_2E
:
16300 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16303 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16306 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16308 case OPC_MULQ_RS_W
:
16309 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16313 case OPC_DPA_W_PH_DSP
:
16315 case OPC_DPAU_H_QBL
:
16317 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16319 case OPC_DPAU_H_QBR
:
16321 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16323 case OPC_DPSU_H_QBL
:
16325 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16327 case OPC_DPSU_H_QBR
:
16329 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16333 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16335 case OPC_DPAX_W_PH
:
16337 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16339 case OPC_DPAQ_S_W_PH
:
16341 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16343 case OPC_DPAQX_S_W_PH
:
16345 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16347 case OPC_DPAQX_SA_W_PH
:
16349 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16353 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16355 case OPC_DPSX_W_PH
:
16357 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16359 case OPC_DPSQ_S_W_PH
:
16361 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16363 case OPC_DPSQX_S_W_PH
:
16365 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16367 case OPC_DPSQX_SA_W_PH
:
16369 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16371 case OPC_MULSAQ_S_W_PH
:
16373 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16375 case OPC_DPAQ_SA_L_W
:
16377 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16379 case OPC_DPSQ_SA_L_W
:
16381 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16383 case OPC_MAQ_S_W_PHL
:
16385 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16387 case OPC_MAQ_S_W_PHR
:
16389 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16391 case OPC_MAQ_SA_W_PHL
:
16393 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16395 case OPC_MAQ_SA_W_PHR
:
16397 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16399 case OPC_MULSA_W_PH
:
16401 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16405 #ifdef TARGET_MIPS64
16406 case OPC_DPAQ_W_QH_DSP
:
16408 int ac
= ret
& 0x03;
16409 tcg_gen_movi_i32(t0
, ac
);
16414 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16418 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16422 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16426 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16430 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16432 case OPC_DPAQ_S_W_QH
:
16434 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16436 case OPC_DPAQ_SA_L_PW
:
16438 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16440 case OPC_DPAU_H_OBL
:
16442 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16444 case OPC_DPAU_H_OBR
:
16446 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16450 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16452 case OPC_DPSQ_S_W_QH
:
16454 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16456 case OPC_DPSQ_SA_L_PW
:
16458 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16460 case OPC_DPSU_H_OBL
:
16462 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16464 case OPC_DPSU_H_OBR
:
16466 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16468 case OPC_MAQ_S_L_PWL
:
16470 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16472 case OPC_MAQ_S_L_PWR
:
16474 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16476 case OPC_MAQ_S_W_QHLL
:
16478 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16480 case OPC_MAQ_SA_W_QHLL
:
16482 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16484 case OPC_MAQ_S_W_QHLR
:
16486 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16488 case OPC_MAQ_SA_W_QHLR
:
16490 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16492 case OPC_MAQ_S_W_QHRL
:
16494 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16496 case OPC_MAQ_SA_W_QHRL
:
16498 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16500 case OPC_MAQ_S_W_QHRR
:
16502 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16504 case OPC_MAQ_SA_W_QHRR
:
16506 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16508 case OPC_MULSAQ_S_L_PW
:
16510 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16512 case OPC_MULSAQ_S_W_QH
:
16514 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16520 case OPC_ADDU_QB_DSP
:
16522 case OPC_MULEU_S_PH_QBL
:
16524 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16526 case OPC_MULEU_S_PH_QBR
:
16528 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16530 case OPC_MULQ_RS_PH
:
16532 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16534 case OPC_MULEQ_S_W_PHL
:
16536 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16538 case OPC_MULEQ_S_W_PHR
:
16540 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16542 case OPC_MULQ_S_PH
:
16544 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16548 #ifdef TARGET_MIPS64
16549 case OPC_ADDU_OB_DSP
:
16551 case OPC_MULEQ_S_PW_QHL
:
16553 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16555 case OPC_MULEQ_S_PW_QHR
:
16557 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16559 case OPC_MULEU_S_QH_OBL
:
16561 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16563 case OPC_MULEU_S_QH_OBR
:
16565 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16567 case OPC_MULQ_RS_QH
:
16569 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16576 tcg_temp_free_i32(t0
);
16577 tcg_temp_free(v1_t
);
16578 tcg_temp_free(v2_t
);
16581 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16589 /* Treat as NOP. */
16593 t0
= tcg_temp_new();
16594 val_t
= tcg_temp_new();
16595 gen_load_gpr(val_t
, val
);
16598 case OPC_ABSQ_S_PH_DSP
:
16602 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16607 target_long result
;
16608 imm
= (ctx
->opcode
>> 16) & 0xFF;
16609 result
= (uint32_t)imm
<< 24 |
16610 (uint32_t)imm
<< 16 |
16611 (uint32_t)imm
<< 8 |
16613 result
= (int32_t)result
;
16614 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16619 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16620 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16621 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16622 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16623 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16624 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16629 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16630 imm
= (int16_t)(imm
<< 6) >> 6;
16631 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16632 (target_long
)((int32_t)imm
<< 16 | \
16638 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16639 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16640 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16641 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16645 #ifdef TARGET_MIPS64
16646 case OPC_ABSQ_S_QH_DSP
:
16653 imm
= (ctx
->opcode
>> 16) & 0xFF;
16654 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16655 temp
= (temp
<< 16) | temp
;
16656 temp
= (temp
<< 32) | temp
;
16657 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16665 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16666 imm
= (int16_t)(imm
<< 6) >> 6;
16667 temp
= ((target_long
)imm
<< 32) \
16668 | ((target_long
)imm
& 0xFFFFFFFF);
16669 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16677 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16678 imm
= (int16_t)(imm
<< 6) >> 6;
16680 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16681 ((uint64_t)(uint16_t)imm
<< 32) |
16682 ((uint64_t)(uint16_t)imm
<< 16) |
16683 (uint64_t)(uint16_t)imm
;
16684 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16689 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16690 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16691 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16692 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16693 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16694 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16695 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16699 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16700 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16701 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16705 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16706 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16707 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16708 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16709 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16716 tcg_temp_free(val_t
);
16719 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16720 uint32_t op1
, uint32_t op2
,
16721 int ret
, int v1
, int v2
, int check_ret
)
16727 if ((ret
== 0) && (check_ret
== 1)) {
16728 /* Treat as NOP. */
16732 t1
= tcg_temp_new();
16733 v1_t
= tcg_temp_new();
16734 v2_t
= tcg_temp_new();
16736 gen_load_gpr(v1_t
, v1
);
16737 gen_load_gpr(v2_t
, v2
);
16740 case OPC_CMPU_EQ_QB_DSP
:
16742 case OPC_CMPU_EQ_QB
:
16744 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16746 case OPC_CMPU_LT_QB
:
16748 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16750 case OPC_CMPU_LE_QB
:
16752 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16754 case OPC_CMPGU_EQ_QB
:
16756 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16758 case OPC_CMPGU_LT_QB
:
16760 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16762 case OPC_CMPGU_LE_QB
:
16764 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16766 case OPC_CMPGDU_EQ_QB
:
16768 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16769 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16770 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16771 tcg_gen_shli_tl(t1
, t1
, 24);
16772 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16774 case OPC_CMPGDU_LT_QB
:
16776 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16777 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16778 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16779 tcg_gen_shli_tl(t1
, t1
, 24);
16780 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16782 case OPC_CMPGDU_LE_QB
:
16784 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16785 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16786 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16787 tcg_gen_shli_tl(t1
, t1
, 24);
16788 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16790 case OPC_CMP_EQ_PH
:
16792 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16794 case OPC_CMP_LT_PH
:
16796 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16798 case OPC_CMP_LE_PH
:
16800 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16804 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16808 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16810 case OPC_PACKRL_PH
:
16812 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16816 #ifdef TARGET_MIPS64
16817 case OPC_CMPU_EQ_OB_DSP
:
16819 case OPC_CMP_EQ_PW
:
16821 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16823 case OPC_CMP_LT_PW
:
16825 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16827 case OPC_CMP_LE_PW
:
16829 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16831 case OPC_CMP_EQ_QH
:
16833 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16835 case OPC_CMP_LT_QH
:
16837 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16839 case OPC_CMP_LE_QH
:
16841 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16843 case OPC_CMPGDU_EQ_OB
:
16845 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16847 case OPC_CMPGDU_LT_OB
:
16849 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16851 case OPC_CMPGDU_LE_OB
:
16853 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16855 case OPC_CMPGU_EQ_OB
:
16857 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16859 case OPC_CMPGU_LT_OB
:
16861 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16863 case OPC_CMPGU_LE_OB
:
16865 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16867 case OPC_CMPU_EQ_OB
:
16869 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16871 case OPC_CMPU_LT_OB
:
16873 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16875 case OPC_CMPU_LE_OB
:
16877 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16879 case OPC_PACKRL_PW
:
16881 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16885 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16889 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16893 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16901 tcg_temp_free(v1_t
);
16902 tcg_temp_free(v2_t
);
16905 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16906 uint32_t op1
, int rt
, int rs
, int sa
)
16913 /* Treat as NOP. */
16917 t0
= tcg_temp_new();
16918 gen_load_gpr(t0
, rs
);
16921 case OPC_APPEND_DSP
:
16922 switch (MASK_APPEND(ctx
->opcode
)) {
16925 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16927 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16931 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16932 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16933 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16934 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16936 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16940 if (sa
!= 0 && sa
!= 2) {
16941 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16942 tcg_gen_ext32u_tl(t0
, t0
);
16943 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16944 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16946 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16948 default: /* Invalid */
16949 MIPS_INVAL("MASK APPEND");
16950 generate_exception_end(ctx
, EXCP_RI
);
16954 #ifdef TARGET_MIPS64
16955 case OPC_DAPPEND_DSP
:
16956 switch (MASK_DAPPEND(ctx
->opcode
)) {
16959 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16963 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16964 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16965 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16969 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16970 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16971 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16976 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16977 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16978 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16979 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16982 default: /* Invalid */
16983 MIPS_INVAL("MASK DAPPEND");
16984 generate_exception_end(ctx
, EXCP_RI
);
16993 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16994 int ret
, int v1
, int v2
, int check_ret
)
17003 if ((ret
== 0) && (check_ret
== 1)) {
17004 /* Treat as NOP. */
17008 t0
= tcg_temp_new();
17009 t1
= tcg_temp_new();
17010 v1_t
= tcg_temp_new();
17011 v2_t
= tcg_temp_new();
17013 gen_load_gpr(v1_t
, v1
);
17014 gen_load_gpr(v2_t
, v2
);
17017 case OPC_EXTR_W_DSP
:
17021 tcg_gen_movi_tl(t0
, v2
);
17022 tcg_gen_movi_tl(t1
, v1
);
17023 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17026 tcg_gen_movi_tl(t0
, v2
);
17027 tcg_gen_movi_tl(t1
, v1
);
17028 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17030 case OPC_EXTR_RS_W
:
17031 tcg_gen_movi_tl(t0
, v2
);
17032 tcg_gen_movi_tl(t1
, v1
);
17033 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17036 tcg_gen_movi_tl(t0
, v2
);
17037 tcg_gen_movi_tl(t1
, v1
);
17038 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17040 case OPC_EXTRV_S_H
:
17041 tcg_gen_movi_tl(t0
, v2
);
17042 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17045 tcg_gen_movi_tl(t0
, v2
);
17046 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17048 case OPC_EXTRV_R_W
:
17049 tcg_gen_movi_tl(t0
, v2
);
17050 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17052 case OPC_EXTRV_RS_W
:
17053 tcg_gen_movi_tl(t0
, v2
);
17054 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17057 tcg_gen_movi_tl(t0
, v2
);
17058 tcg_gen_movi_tl(t1
, v1
);
17059 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17062 tcg_gen_movi_tl(t0
, v2
);
17063 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17066 tcg_gen_movi_tl(t0
, v2
);
17067 tcg_gen_movi_tl(t1
, v1
);
17068 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17071 tcg_gen_movi_tl(t0
, v2
);
17072 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17075 imm
= (ctx
->opcode
>> 20) & 0x3F;
17076 tcg_gen_movi_tl(t0
, ret
);
17077 tcg_gen_movi_tl(t1
, imm
);
17078 gen_helper_shilo(t0
, t1
, cpu_env
);
17081 tcg_gen_movi_tl(t0
, ret
);
17082 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17085 tcg_gen_movi_tl(t0
, ret
);
17086 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17089 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17090 tcg_gen_movi_tl(t0
, imm
);
17091 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17094 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17095 tcg_gen_movi_tl(t0
, imm
);
17096 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17100 #ifdef TARGET_MIPS64
17101 case OPC_DEXTR_W_DSP
:
17105 tcg_gen_movi_tl(t0
, ret
);
17106 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17110 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17111 int ac
= (ctx
->opcode
>> 11) & 0x03;
17112 tcg_gen_movi_tl(t0
, shift
);
17113 tcg_gen_movi_tl(t1
, ac
);
17114 gen_helper_dshilo(t0
, t1
, cpu_env
);
17119 int ac
= (ctx
->opcode
>> 11) & 0x03;
17120 tcg_gen_movi_tl(t0
, ac
);
17121 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17125 tcg_gen_movi_tl(t0
, v2
);
17126 tcg_gen_movi_tl(t1
, v1
);
17128 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17131 tcg_gen_movi_tl(t0
, v2
);
17132 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17135 tcg_gen_movi_tl(t0
, v2
);
17136 tcg_gen_movi_tl(t1
, v1
);
17137 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17140 tcg_gen_movi_tl(t0
, v2
);
17141 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17144 tcg_gen_movi_tl(t0
, v2
);
17145 tcg_gen_movi_tl(t1
, v1
);
17146 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17148 case OPC_DEXTR_R_L
:
17149 tcg_gen_movi_tl(t0
, v2
);
17150 tcg_gen_movi_tl(t1
, v1
);
17151 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17153 case OPC_DEXTR_RS_L
:
17154 tcg_gen_movi_tl(t0
, v2
);
17155 tcg_gen_movi_tl(t1
, v1
);
17156 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17159 tcg_gen_movi_tl(t0
, v2
);
17160 tcg_gen_movi_tl(t1
, v1
);
17161 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17163 case OPC_DEXTR_R_W
:
17164 tcg_gen_movi_tl(t0
, v2
);
17165 tcg_gen_movi_tl(t1
, v1
);
17166 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17168 case OPC_DEXTR_RS_W
:
17169 tcg_gen_movi_tl(t0
, v2
);
17170 tcg_gen_movi_tl(t1
, v1
);
17171 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17173 case OPC_DEXTR_S_H
:
17174 tcg_gen_movi_tl(t0
, v2
);
17175 tcg_gen_movi_tl(t1
, v1
);
17176 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17178 case OPC_DEXTRV_S_H
:
17179 tcg_gen_movi_tl(t0
, v2
);
17180 tcg_gen_movi_tl(t1
, v1
);
17181 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17184 tcg_gen_movi_tl(t0
, v2
);
17185 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17187 case OPC_DEXTRV_R_L
:
17188 tcg_gen_movi_tl(t0
, v2
);
17189 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17191 case OPC_DEXTRV_RS_L
:
17192 tcg_gen_movi_tl(t0
, v2
);
17193 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17196 tcg_gen_movi_tl(t0
, v2
);
17197 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17199 case OPC_DEXTRV_R_W
:
17200 tcg_gen_movi_tl(t0
, v2
);
17201 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17203 case OPC_DEXTRV_RS_W
:
17204 tcg_gen_movi_tl(t0
, v2
);
17205 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17214 tcg_temp_free(v1_t
);
17215 tcg_temp_free(v2_t
);
17218 /* End MIPSDSP functions. */
17220 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17222 int rs
, rt
, rd
, sa
;
17225 rs
= (ctx
->opcode
>> 21) & 0x1f;
17226 rt
= (ctx
->opcode
>> 16) & 0x1f;
17227 rd
= (ctx
->opcode
>> 11) & 0x1f;
17228 sa
= (ctx
->opcode
>> 6) & 0x1f;
17230 op1
= MASK_SPECIAL(ctx
->opcode
);
17233 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17235 case OPC_MULT
... OPC_DIVU
:
17236 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17246 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17249 MIPS_INVAL("special_r6 muldiv");
17250 generate_exception_end(ctx
, EXCP_RI
);
17256 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17260 if (rt
== 0 && sa
== 1) {
17261 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17262 We need additionally to check other fields */
17263 gen_cl(ctx
, op1
, rd
, rs
);
17265 generate_exception_end(ctx
, EXCP_RI
);
17269 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17270 gen_helper_do_semihosting(cpu_env
);
17272 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17273 generate_exception_end(ctx
, EXCP_RI
);
17275 generate_exception_end(ctx
, EXCP_DBp
);
17279 #if defined(TARGET_MIPS64)
17281 check_mips_64(ctx
);
17282 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17286 if (rt
== 0 && sa
== 1) {
17287 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17288 We need additionally to check other fields */
17289 check_mips_64(ctx
);
17290 gen_cl(ctx
, op1
, rd
, rs
);
17292 generate_exception_end(ctx
, EXCP_RI
);
17295 case OPC_DMULT
... OPC_DDIVU
:
17296 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17306 check_mips_64(ctx
);
17307 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17310 MIPS_INVAL("special_r6 muldiv");
17311 generate_exception_end(ctx
, EXCP_RI
);
17316 default: /* Invalid */
17317 MIPS_INVAL("special_r6");
17318 generate_exception_end(ctx
, EXCP_RI
);
17323 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17325 int rs
, rt
, rd
, sa
;
17328 rs
= (ctx
->opcode
>> 21) & 0x1f;
17329 rt
= (ctx
->opcode
>> 16) & 0x1f;
17330 rd
= (ctx
->opcode
>> 11) & 0x1f;
17331 sa
= (ctx
->opcode
>> 6) & 0x1f;
17333 op1
= MASK_SPECIAL(ctx
->opcode
);
17335 case OPC_MOVN
: /* Conditional move */
17337 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17338 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17339 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17341 case OPC_MFHI
: /* Move from HI/LO */
17343 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17346 case OPC_MTLO
: /* Move to HI/LO */
17347 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17350 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17351 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17352 check_cp1_enabled(ctx
);
17353 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17354 (ctx
->opcode
>> 16) & 1);
17356 generate_exception_err(ctx
, EXCP_CpU
, 1);
17362 check_insn(ctx
, INSN_VR54XX
);
17363 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17364 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17366 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17371 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17373 #if defined(TARGET_MIPS64)
17374 case OPC_DMULT
... OPC_DDIVU
:
17375 check_insn(ctx
, ISA_MIPS3
);
17376 check_mips_64(ctx
);
17377 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17381 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17384 #ifdef MIPS_STRICT_STANDARD
17385 MIPS_INVAL("SPIM");
17386 generate_exception_end(ctx
, EXCP_RI
);
17388 /* Implemented as RI exception for now. */
17389 MIPS_INVAL("spim (unofficial)");
17390 generate_exception_end(ctx
, EXCP_RI
);
17393 default: /* Invalid */
17394 MIPS_INVAL("special_legacy");
17395 generate_exception_end(ctx
, EXCP_RI
);
17400 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17402 int rs
, rt
, rd
, sa
;
17405 rs
= (ctx
->opcode
>> 21) & 0x1f;
17406 rt
= (ctx
->opcode
>> 16) & 0x1f;
17407 rd
= (ctx
->opcode
>> 11) & 0x1f;
17408 sa
= (ctx
->opcode
>> 6) & 0x1f;
17410 op1
= MASK_SPECIAL(ctx
->opcode
);
17412 case OPC_SLL
: /* Shift with immediate */
17413 if (sa
== 5 && rd
== 0 &&
17414 rs
== 0 && rt
== 0) { /* PAUSE */
17415 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17416 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17417 generate_exception_end(ctx
, EXCP_RI
);
17423 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17426 switch ((ctx
->opcode
>> 21) & 0x1f) {
17428 /* rotr is decoded as srl on non-R2 CPUs */
17429 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17434 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17437 generate_exception_end(ctx
, EXCP_RI
);
17441 case OPC_ADD
... OPC_SUBU
:
17442 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17444 case OPC_SLLV
: /* Shifts */
17446 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17449 switch ((ctx
->opcode
>> 6) & 0x1f) {
17451 /* rotrv is decoded as srlv on non-R2 CPUs */
17452 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17457 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17460 generate_exception_end(ctx
, EXCP_RI
);
17464 case OPC_SLT
: /* Set on less than */
17466 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17468 case OPC_AND
: /* Logic*/
17472 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17475 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17477 case OPC_TGE
... OPC_TEQ
: /* Traps */
17479 check_insn(ctx
, ISA_MIPS2
);
17480 gen_trap(ctx
, op1
, rs
, rt
, -1);
17482 case OPC_LSA
: /* OPC_PMON */
17483 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17484 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17485 decode_opc_special_r6(env
, ctx
);
17487 /* Pmon entry point, also R4010 selsl */
17488 #ifdef MIPS_STRICT_STANDARD
17489 MIPS_INVAL("PMON / selsl");
17490 generate_exception_end(ctx
, EXCP_RI
);
17492 gen_helper_0e0i(pmon
, sa
);
17497 generate_exception_end(ctx
, EXCP_SYSCALL
);
17500 generate_exception_end(ctx
, EXCP_BREAK
);
17503 check_insn(ctx
, ISA_MIPS2
);
17504 gen_sync(extract32(ctx
->opcode
, 6, 5));
17507 #if defined(TARGET_MIPS64)
17508 /* MIPS64 specific opcodes */
17513 check_insn(ctx
, ISA_MIPS3
);
17514 check_mips_64(ctx
);
17515 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17518 switch ((ctx
->opcode
>> 21) & 0x1f) {
17520 /* drotr is decoded as dsrl on non-R2 CPUs */
17521 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17526 check_insn(ctx
, ISA_MIPS3
);
17527 check_mips_64(ctx
);
17528 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17531 generate_exception_end(ctx
, EXCP_RI
);
17536 switch ((ctx
->opcode
>> 21) & 0x1f) {
17538 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17539 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17544 check_insn(ctx
, ISA_MIPS3
);
17545 check_mips_64(ctx
);
17546 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17549 generate_exception_end(ctx
, EXCP_RI
);
17553 case OPC_DADD
... OPC_DSUBU
:
17554 check_insn(ctx
, ISA_MIPS3
);
17555 check_mips_64(ctx
);
17556 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17560 check_insn(ctx
, ISA_MIPS3
);
17561 check_mips_64(ctx
);
17562 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17565 switch ((ctx
->opcode
>> 6) & 0x1f) {
17567 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17568 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17573 check_insn(ctx
, ISA_MIPS3
);
17574 check_mips_64(ctx
);
17575 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17578 generate_exception_end(ctx
, EXCP_RI
);
17583 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17584 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17585 decode_opc_special_r6(env
, ctx
);
17590 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17591 decode_opc_special_r6(env
, ctx
);
17593 decode_opc_special_legacy(env
, ctx
);
17598 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17605 rs
= (ctx
->opcode
>> 21) & 0x1f;
17606 rt
= (ctx
->opcode
>> 16) & 0x1f;
17607 rd
= (ctx
->opcode
>> 11) & 0x1f;
17609 op1
= MASK_SPECIAL2(ctx
->opcode
);
17611 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17612 case OPC_MSUB
... OPC_MSUBU
:
17613 check_insn(ctx
, ISA_MIPS32
);
17614 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17617 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17620 case OPC_DIVU_G_2F
:
17621 case OPC_MULT_G_2F
:
17622 case OPC_MULTU_G_2F
:
17624 case OPC_MODU_G_2F
:
17625 check_insn(ctx
, INSN_LOONGSON2F
);
17626 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17630 check_insn(ctx
, ISA_MIPS32
);
17631 gen_cl(ctx
, op1
, rd
, rs
);
17634 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17635 gen_helper_do_semihosting(cpu_env
);
17637 /* XXX: not clear which exception should be raised
17638 * when in debug mode...
17640 check_insn(ctx
, ISA_MIPS32
);
17641 generate_exception_end(ctx
, EXCP_DBp
);
17644 #if defined(TARGET_MIPS64)
17647 check_insn(ctx
, ISA_MIPS64
);
17648 check_mips_64(ctx
);
17649 gen_cl(ctx
, op1
, rd
, rs
);
17651 case OPC_DMULT_G_2F
:
17652 case OPC_DMULTU_G_2F
:
17653 case OPC_DDIV_G_2F
:
17654 case OPC_DDIVU_G_2F
:
17655 case OPC_DMOD_G_2F
:
17656 case OPC_DMODU_G_2F
:
17657 check_insn(ctx
, INSN_LOONGSON2F
);
17658 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17661 default: /* Invalid */
17662 MIPS_INVAL("special2_legacy");
17663 generate_exception_end(ctx
, EXCP_RI
);
17668 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17670 int rs
, rt
, rd
, sa
;
17674 rs
= (ctx
->opcode
>> 21) & 0x1f;
17675 rt
= (ctx
->opcode
>> 16) & 0x1f;
17676 rd
= (ctx
->opcode
>> 11) & 0x1f;
17677 sa
= (ctx
->opcode
>> 6) & 0x1f;
17678 imm
= (int16_t)ctx
->opcode
>> 7;
17680 op1
= MASK_SPECIAL3(ctx
->opcode
);
17684 /* hint codes 24-31 are reserved and signal RI */
17685 generate_exception_end(ctx
, EXCP_RI
);
17687 /* Treat as NOP. */
17690 check_cp0_enabled(ctx
);
17691 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17692 gen_cache_operation(ctx
, rt
, rs
, imm
);
17696 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17699 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17704 /* Treat as NOP. */
17707 op2
= MASK_BSHFL(ctx
->opcode
);
17709 case OPC_ALIGN
... OPC_ALIGN_END
:
17710 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17713 gen_bitswap(ctx
, op2
, rd
, rt
);
17718 #if defined(TARGET_MIPS64)
17720 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17723 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17726 check_mips_64(ctx
);
17729 /* Treat as NOP. */
17732 op2
= MASK_DBSHFL(ctx
->opcode
);
17734 case OPC_DALIGN
... OPC_DALIGN_END
:
17735 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17738 gen_bitswap(ctx
, op2
, rd
, rt
);
17745 default: /* Invalid */
17746 MIPS_INVAL("special3_r6");
17747 generate_exception_end(ctx
, EXCP_RI
);
17752 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17757 rs
= (ctx
->opcode
>> 21) & 0x1f;
17758 rt
= (ctx
->opcode
>> 16) & 0x1f;
17759 rd
= (ctx
->opcode
>> 11) & 0x1f;
17761 op1
= MASK_SPECIAL3(ctx
->opcode
);
17763 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17764 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17765 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17766 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17767 * the same mask and op1. */
17768 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17769 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17772 case OPC_ADDUH_R_QB
:
17774 case OPC_ADDQH_R_PH
:
17776 case OPC_ADDQH_R_W
:
17778 case OPC_SUBUH_R_QB
:
17780 case OPC_SUBQH_R_PH
:
17782 case OPC_SUBQH_R_W
:
17783 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17788 case OPC_MULQ_RS_W
:
17789 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17792 MIPS_INVAL("MASK ADDUH.QB");
17793 generate_exception_end(ctx
, EXCP_RI
);
17796 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17797 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17799 generate_exception_end(ctx
, EXCP_RI
);
17803 op2
= MASK_LX(ctx
->opcode
);
17805 #if defined(TARGET_MIPS64)
17811 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17813 default: /* Invalid */
17814 MIPS_INVAL("MASK LX");
17815 generate_exception_end(ctx
, EXCP_RI
);
17819 case OPC_ABSQ_S_PH_DSP
:
17820 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17822 case OPC_ABSQ_S_QB
:
17823 case OPC_ABSQ_S_PH
:
17825 case OPC_PRECEQ_W_PHL
:
17826 case OPC_PRECEQ_W_PHR
:
17827 case OPC_PRECEQU_PH_QBL
:
17828 case OPC_PRECEQU_PH_QBR
:
17829 case OPC_PRECEQU_PH_QBLA
:
17830 case OPC_PRECEQU_PH_QBRA
:
17831 case OPC_PRECEU_PH_QBL
:
17832 case OPC_PRECEU_PH_QBR
:
17833 case OPC_PRECEU_PH_QBLA
:
17834 case OPC_PRECEU_PH_QBRA
:
17835 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17842 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17845 MIPS_INVAL("MASK ABSQ_S.PH");
17846 generate_exception_end(ctx
, EXCP_RI
);
17850 case OPC_ADDU_QB_DSP
:
17851 op2
= MASK_ADDU_QB(ctx
->opcode
);
17854 case OPC_ADDQ_S_PH
:
17857 case OPC_ADDU_S_QB
:
17859 case OPC_ADDU_S_PH
:
17861 case OPC_SUBQ_S_PH
:
17864 case OPC_SUBU_S_QB
:
17866 case OPC_SUBU_S_PH
:
17870 case OPC_RADDU_W_QB
:
17871 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17873 case OPC_MULEU_S_PH_QBL
:
17874 case OPC_MULEU_S_PH_QBR
:
17875 case OPC_MULQ_RS_PH
:
17876 case OPC_MULEQ_S_W_PHL
:
17877 case OPC_MULEQ_S_W_PHR
:
17878 case OPC_MULQ_S_PH
:
17879 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17881 default: /* Invalid */
17882 MIPS_INVAL("MASK ADDU.QB");
17883 generate_exception_end(ctx
, EXCP_RI
);
17888 case OPC_CMPU_EQ_QB_DSP
:
17889 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17891 case OPC_PRECR_SRA_PH_W
:
17892 case OPC_PRECR_SRA_R_PH_W
:
17893 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17895 case OPC_PRECR_QB_PH
:
17896 case OPC_PRECRQ_QB_PH
:
17897 case OPC_PRECRQ_PH_W
:
17898 case OPC_PRECRQ_RS_PH_W
:
17899 case OPC_PRECRQU_S_QB_PH
:
17900 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17902 case OPC_CMPU_EQ_QB
:
17903 case OPC_CMPU_LT_QB
:
17904 case OPC_CMPU_LE_QB
:
17905 case OPC_CMP_EQ_PH
:
17906 case OPC_CMP_LT_PH
:
17907 case OPC_CMP_LE_PH
:
17908 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17910 case OPC_CMPGU_EQ_QB
:
17911 case OPC_CMPGU_LT_QB
:
17912 case OPC_CMPGU_LE_QB
:
17913 case OPC_CMPGDU_EQ_QB
:
17914 case OPC_CMPGDU_LT_QB
:
17915 case OPC_CMPGDU_LE_QB
:
17918 case OPC_PACKRL_PH
:
17919 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17921 default: /* Invalid */
17922 MIPS_INVAL("MASK CMPU.EQ.QB");
17923 generate_exception_end(ctx
, EXCP_RI
);
17927 case OPC_SHLL_QB_DSP
:
17928 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17930 case OPC_DPA_W_PH_DSP
:
17931 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17933 case OPC_DPAU_H_QBL
:
17934 case OPC_DPAU_H_QBR
:
17935 case OPC_DPSU_H_QBL
:
17936 case OPC_DPSU_H_QBR
:
17938 case OPC_DPAX_W_PH
:
17939 case OPC_DPAQ_S_W_PH
:
17940 case OPC_DPAQX_S_W_PH
:
17941 case OPC_DPAQX_SA_W_PH
:
17943 case OPC_DPSX_W_PH
:
17944 case OPC_DPSQ_S_W_PH
:
17945 case OPC_DPSQX_S_W_PH
:
17946 case OPC_DPSQX_SA_W_PH
:
17947 case OPC_MULSAQ_S_W_PH
:
17948 case OPC_DPAQ_SA_L_W
:
17949 case OPC_DPSQ_SA_L_W
:
17950 case OPC_MAQ_S_W_PHL
:
17951 case OPC_MAQ_S_W_PHR
:
17952 case OPC_MAQ_SA_W_PHL
:
17953 case OPC_MAQ_SA_W_PHR
:
17954 case OPC_MULSA_W_PH
:
17955 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17957 default: /* Invalid */
17958 MIPS_INVAL("MASK DPAW.PH");
17959 generate_exception_end(ctx
, EXCP_RI
);
17964 op2
= MASK_INSV(ctx
->opcode
);
17975 t0
= tcg_temp_new();
17976 t1
= tcg_temp_new();
17978 gen_load_gpr(t0
, rt
);
17979 gen_load_gpr(t1
, rs
);
17981 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17987 default: /* Invalid */
17988 MIPS_INVAL("MASK INSV");
17989 generate_exception_end(ctx
, EXCP_RI
);
17993 case OPC_APPEND_DSP
:
17994 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17996 case OPC_EXTR_W_DSP
:
17997 op2
= MASK_EXTR_W(ctx
->opcode
);
18001 case OPC_EXTR_RS_W
:
18003 case OPC_EXTRV_S_H
:
18005 case OPC_EXTRV_R_W
:
18006 case OPC_EXTRV_RS_W
:
18011 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18014 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18020 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18022 default: /* Invalid */
18023 MIPS_INVAL("MASK EXTR.W");
18024 generate_exception_end(ctx
, EXCP_RI
);
18028 #if defined(TARGET_MIPS64)
18029 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
18030 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
18031 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
18032 check_insn(ctx
, INSN_LOONGSON2E
);
18033 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18035 case OPC_ABSQ_S_QH_DSP
:
18036 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18038 case OPC_PRECEQ_L_PWL
:
18039 case OPC_PRECEQ_L_PWR
:
18040 case OPC_PRECEQ_PW_QHL
:
18041 case OPC_PRECEQ_PW_QHR
:
18042 case OPC_PRECEQ_PW_QHLA
:
18043 case OPC_PRECEQ_PW_QHRA
:
18044 case OPC_PRECEQU_QH_OBL
:
18045 case OPC_PRECEQU_QH_OBR
:
18046 case OPC_PRECEQU_QH_OBLA
:
18047 case OPC_PRECEQU_QH_OBRA
:
18048 case OPC_PRECEU_QH_OBL
:
18049 case OPC_PRECEU_QH_OBR
:
18050 case OPC_PRECEU_QH_OBLA
:
18051 case OPC_PRECEU_QH_OBRA
:
18052 case OPC_ABSQ_S_OB
:
18053 case OPC_ABSQ_S_PW
:
18054 case OPC_ABSQ_S_QH
:
18055 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18063 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18065 default: /* Invalid */
18066 MIPS_INVAL("MASK ABSQ_S.QH");
18067 generate_exception_end(ctx
, EXCP_RI
);
18071 case OPC_ADDU_OB_DSP
:
18072 op2
= MASK_ADDU_OB(ctx
->opcode
);
18074 case OPC_RADDU_L_OB
:
18076 case OPC_SUBQ_S_PW
:
18078 case OPC_SUBQ_S_QH
:
18080 case OPC_SUBU_S_OB
:
18082 case OPC_SUBU_S_QH
:
18084 case OPC_SUBUH_R_OB
:
18086 case OPC_ADDQ_S_PW
:
18088 case OPC_ADDQ_S_QH
:
18090 case OPC_ADDU_S_OB
:
18092 case OPC_ADDU_S_QH
:
18094 case OPC_ADDUH_R_OB
:
18095 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18097 case OPC_MULEQ_S_PW_QHL
:
18098 case OPC_MULEQ_S_PW_QHR
:
18099 case OPC_MULEU_S_QH_OBL
:
18100 case OPC_MULEU_S_QH_OBR
:
18101 case OPC_MULQ_RS_QH
:
18102 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18104 default: /* Invalid */
18105 MIPS_INVAL("MASK ADDU.OB");
18106 generate_exception_end(ctx
, EXCP_RI
);
18110 case OPC_CMPU_EQ_OB_DSP
:
18111 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18113 case OPC_PRECR_SRA_QH_PW
:
18114 case OPC_PRECR_SRA_R_QH_PW
:
18115 /* Return value is rt. */
18116 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18118 case OPC_PRECR_OB_QH
:
18119 case OPC_PRECRQ_OB_QH
:
18120 case OPC_PRECRQ_PW_L
:
18121 case OPC_PRECRQ_QH_PW
:
18122 case OPC_PRECRQ_RS_QH_PW
:
18123 case OPC_PRECRQU_S_OB_QH
:
18124 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18126 case OPC_CMPU_EQ_OB
:
18127 case OPC_CMPU_LT_OB
:
18128 case OPC_CMPU_LE_OB
:
18129 case OPC_CMP_EQ_QH
:
18130 case OPC_CMP_LT_QH
:
18131 case OPC_CMP_LE_QH
:
18132 case OPC_CMP_EQ_PW
:
18133 case OPC_CMP_LT_PW
:
18134 case OPC_CMP_LE_PW
:
18135 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18137 case OPC_CMPGDU_EQ_OB
:
18138 case OPC_CMPGDU_LT_OB
:
18139 case OPC_CMPGDU_LE_OB
:
18140 case OPC_CMPGU_EQ_OB
:
18141 case OPC_CMPGU_LT_OB
:
18142 case OPC_CMPGU_LE_OB
:
18143 case OPC_PACKRL_PW
:
18147 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18149 default: /* Invalid */
18150 MIPS_INVAL("MASK CMPU_EQ.OB");
18151 generate_exception_end(ctx
, EXCP_RI
);
18155 case OPC_DAPPEND_DSP
:
18156 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18158 case OPC_DEXTR_W_DSP
:
18159 op2
= MASK_DEXTR_W(ctx
->opcode
);
18166 case OPC_DEXTR_R_L
:
18167 case OPC_DEXTR_RS_L
:
18169 case OPC_DEXTR_R_W
:
18170 case OPC_DEXTR_RS_W
:
18171 case OPC_DEXTR_S_H
:
18173 case OPC_DEXTRV_R_L
:
18174 case OPC_DEXTRV_RS_L
:
18175 case OPC_DEXTRV_S_H
:
18177 case OPC_DEXTRV_R_W
:
18178 case OPC_DEXTRV_RS_W
:
18179 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18184 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18186 default: /* Invalid */
18187 MIPS_INVAL("MASK EXTR.W");
18188 generate_exception_end(ctx
, EXCP_RI
);
18192 case OPC_DPAQ_W_QH_DSP
:
18193 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18195 case OPC_DPAU_H_OBL
:
18196 case OPC_DPAU_H_OBR
:
18197 case OPC_DPSU_H_OBL
:
18198 case OPC_DPSU_H_OBR
:
18200 case OPC_DPAQ_S_W_QH
:
18202 case OPC_DPSQ_S_W_QH
:
18203 case OPC_MULSAQ_S_W_QH
:
18204 case OPC_DPAQ_SA_L_PW
:
18205 case OPC_DPSQ_SA_L_PW
:
18206 case OPC_MULSAQ_S_L_PW
:
18207 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18209 case OPC_MAQ_S_W_QHLL
:
18210 case OPC_MAQ_S_W_QHLR
:
18211 case OPC_MAQ_S_W_QHRL
:
18212 case OPC_MAQ_S_W_QHRR
:
18213 case OPC_MAQ_SA_W_QHLL
:
18214 case OPC_MAQ_SA_W_QHLR
:
18215 case OPC_MAQ_SA_W_QHRL
:
18216 case OPC_MAQ_SA_W_QHRR
:
18217 case OPC_MAQ_S_L_PWL
:
18218 case OPC_MAQ_S_L_PWR
:
18223 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18225 default: /* Invalid */
18226 MIPS_INVAL("MASK DPAQ.W.QH");
18227 generate_exception_end(ctx
, EXCP_RI
);
18231 case OPC_DINSV_DSP
:
18232 op2
= MASK_INSV(ctx
->opcode
);
18243 t0
= tcg_temp_new();
18244 t1
= tcg_temp_new();
18246 gen_load_gpr(t0
, rt
);
18247 gen_load_gpr(t1
, rs
);
18249 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18255 default: /* Invalid */
18256 MIPS_INVAL("MASK DINSV");
18257 generate_exception_end(ctx
, EXCP_RI
);
18261 case OPC_SHLL_OB_DSP
:
18262 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18265 default: /* Invalid */
18266 MIPS_INVAL("special3_legacy");
18267 generate_exception_end(ctx
, EXCP_RI
);
18272 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18274 int rs
, rt
, rd
, sa
;
18278 rs
= (ctx
->opcode
>> 21) & 0x1f;
18279 rt
= (ctx
->opcode
>> 16) & 0x1f;
18280 rd
= (ctx
->opcode
>> 11) & 0x1f;
18281 sa
= (ctx
->opcode
>> 6) & 0x1f;
18282 imm
= sextract32(ctx
->opcode
, 7, 9);
18284 op1
= MASK_SPECIAL3(ctx
->opcode
);
18287 * EVA loads and stores overlap Loongson 2E instructions decoded by
18288 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18293 case OPC_LWLE
... OPC_LWRE
:
18294 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18296 case OPC_LBUE
... OPC_LHUE
:
18297 case OPC_LBE
... OPC_LWE
:
18298 check_cp0_enabled(ctx
);
18299 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18301 case OPC_SWLE
... OPC_SWRE
:
18302 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18304 case OPC_SBE
... OPC_SHE
:
18306 check_cp0_enabled(ctx
);
18307 gen_st(ctx
, op1
, rt
, rs
, imm
);
18310 check_cp0_enabled(ctx
);
18311 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18314 check_cp0_enabled(ctx
);
18315 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18316 gen_cache_operation(ctx
, rt
, rs
, imm
);
18318 /* Treat as NOP. */
18321 check_cp0_enabled(ctx
);
18322 /* Treat as NOP. */
18330 check_insn(ctx
, ISA_MIPS32R2
);
18331 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18334 op2
= MASK_BSHFL(ctx
->opcode
);
18336 case OPC_ALIGN
... OPC_ALIGN_END
:
18338 check_insn(ctx
, ISA_MIPS32R6
);
18339 decode_opc_special3_r6(env
, ctx
);
18342 check_insn(ctx
, ISA_MIPS32R2
);
18343 gen_bshfl(ctx
, op2
, rt
, rd
);
18347 #if defined(TARGET_MIPS64)
18348 case OPC_DEXTM
... OPC_DEXT
:
18349 case OPC_DINSM
... OPC_DINS
:
18350 check_insn(ctx
, ISA_MIPS64R2
);
18351 check_mips_64(ctx
);
18352 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18355 op2
= MASK_DBSHFL(ctx
->opcode
);
18357 case OPC_DALIGN
... OPC_DALIGN_END
:
18359 check_insn(ctx
, ISA_MIPS32R6
);
18360 decode_opc_special3_r6(env
, ctx
);
18363 check_insn(ctx
, ISA_MIPS64R2
);
18364 check_mips_64(ctx
);
18365 op2
= MASK_DBSHFL(ctx
->opcode
);
18366 gen_bshfl(ctx
, op2
, rt
, rd
);
18372 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18375 check_insn(ctx
, ASE_MT
);
18377 TCGv t0
= tcg_temp_new();
18378 TCGv t1
= tcg_temp_new();
18380 gen_load_gpr(t0
, rt
);
18381 gen_load_gpr(t1
, rs
);
18382 gen_helper_fork(t0
, t1
);
18388 check_insn(ctx
, ASE_MT
);
18390 TCGv t0
= tcg_temp_new();
18392 gen_load_gpr(t0
, rs
);
18393 gen_helper_yield(t0
, cpu_env
, t0
);
18394 gen_store_gpr(t0
, rd
);
18399 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18400 decode_opc_special3_r6(env
, ctx
);
18402 decode_opc_special3_legacy(env
, ctx
);
18407 /* MIPS SIMD Architecture (MSA) */
18408 static inline int check_msa_access(DisasContext
*ctx
)
18410 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18411 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18412 generate_exception_end(ctx
, EXCP_RI
);
18416 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18417 if (ctx
->insn_flags
& ASE_MSA
) {
18418 generate_exception_end(ctx
, EXCP_MSADIS
);
18421 generate_exception_end(ctx
, EXCP_RI
);
18428 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18430 /* generates tcg ops to check if any element is 0 */
18431 /* Note this function only works with MSA_WRLEN = 128 */
18432 uint64_t eval_zero_or_big
= 0;
18433 uint64_t eval_big
= 0;
18434 TCGv_i64 t0
= tcg_temp_new_i64();
18435 TCGv_i64 t1
= tcg_temp_new_i64();
18438 eval_zero_or_big
= 0x0101010101010101ULL
;
18439 eval_big
= 0x8080808080808080ULL
;
18442 eval_zero_or_big
= 0x0001000100010001ULL
;
18443 eval_big
= 0x8000800080008000ULL
;
18446 eval_zero_or_big
= 0x0000000100000001ULL
;
18447 eval_big
= 0x8000000080000000ULL
;
18450 eval_zero_or_big
= 0x0000000000000001ULL
;
18451 eval_big
= 0x8000000000000000ULL
;
18454 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18455 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18456 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18457 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18458 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18459 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18460 tcg_gen_or_i64(t0
, t0
, t1
);
18461 /* if all bits are zero then all elements are not zero */
18462 /* if some bit is non-zero then some element is zero */
18463 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18464 tcg_gen_trunc_i64_tl(tresult
, t0
);
18465 tcg_temp_free_i64(t0
);
18466 tcg_temp_free_i64(t1
);
18469 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18471 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18472 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18473 int64_t s16
= (int16_t)ctx
->opcode
;
18475 check_msa_access(ctx
);
18477 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18478 generate_exception_end(ctx
, EXCP_RI
);
18485 TCGv_i64 t0
= tcg_temp_new_i64();
18486 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18487 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18488 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18489 tcg_gen_trunc_i64_tl(bcond
, t0
);
18490 tcg_temp_free_i64(t0
);
18497 gen_check_zero_element(bcond
, df
, wt
);
18503 gen_check_zero_element(bcond
, df
, wt
);
18504 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18508 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
18510 ctx
->hflags
|= MIPS_HFLAG_BC
;
18511 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18514 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18516 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18517 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18518 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18519 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18521 TCGv_i32 twd
= tcg_const_i32(wd
);
18522 TCGv_i32 tws
= tcg_const_i32(ws
);
18523 TCGv_i32 ti8
= tcg_const_i32(i8
);
18525 switch (MASK_MSA_I8(ctx
->opcode
)) {
18527 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18530 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18533 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18536 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18539 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18542 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18545 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18551 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18552 if (df
== DF_DOUBLE
) {
18553 generate_exception_end(ctx
, EXCP_RI
);
18555 TCGv_i32 tdf
= tcg_const_i32(df
);
18556 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18557 tcg_temp_free_i32(tdf
);
18562 MIPS_INVAL("MSA instruction");
18563 generate_exception_end(ctx
, EXCP_RI
);
18567 tcg_temp_free_i32(twd
);
18568 tcg_temp_free_i32(tws
);
18569 tcg_temp_free_i32(ti8
);
18572 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18574 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18575 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18576 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18577 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18578 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18579 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18581 TCGv_i32 tdf
= tcg_const_i32(df
);
18582 TCGv_i32 twd
= tcg_const_i32(wd
);
18583 TCGv_i32 tws
= tcg_const_i32(ws
);
18584 TCGv_i32 timm
= tcg_temp_new_i32();
18585 tcg_gen_movi_i32(timm
, u5
);
18587 switch (MASK_MSA_I5(ctx
->opcode
)) {
18589 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18592 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18594 case OPC_MAXI_S_df
:
18595 tcg_gen_movi_i32(timm
, s5
);
18596 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18598 case OPC_MAXI_U_df
:
18599 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18601 case OPC_MINI_S_df
:
18602 tcg_gen_movi_i32(timm
, s5
);
18603 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18605 case OPC_MINI_U_df
:
18606 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18609 tcg_gen_movi_i32(timm
, s5
);
18610 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18612 case OPC_CLTI_S_df
:
18613 tcg_gen_movi_i32(timm
, s5
);
18614 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18616 case OPC_CLTI_U_df
:
18617 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18619 case OPC_CLEI_S_df
:
18620 tcg_gen_movi_i32(timm
, s5
);
18621 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18623 case OPC_CLEI_U_df
:
18624 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18628 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18629 tcg_gen_movi_i32(timm
, s10
);
18630 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18634 MIPS_INVAL("MSA instruction");
18635 generate_exception_end(ctx
, EXCP_RI
);
18639 tcg_temp_free_i32(tdf
);
18640 tcg_temp_free_i32(twd
);
18641 tcg_temp_free_i32(tws
);
18642 tcg_temp_free_i32(timm
);
18645 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18647 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18648 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18649 uint32_t df
= 0, m
= 0;
18650 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18651 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18658 if ((dfm
& 0x40) == 0x00) {
18661 } else if ((dfm
& 0x60) == 0x40) {
18664 } else if ((dfm
& 0x70) == 0x60) {
18667 } else if ((dfm
& 0x78) == 0x70) {
18671 generate_exception_end(ctx
, EXCP_RI
);
18675 tdf
= tcg_const_i32(df
);
18676 tm
= tcg_const_i32(m
);
18677 twd
= tcg_const_i32(wd
);
18678 tws
= tcg_const_i32(ws
);
18680 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18682 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18685 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18688 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18691 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18694 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18697 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18699 case OPC_BINSLI_df
:
18700 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18702 case OPC_BINSRI_df
:
18703 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18706 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18709 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18712 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18715 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18718 MIPS_INVAL("MSA instruction");
18719 generate_exception_end(ctx
, EXCP_RI
);
18723 tcg_temp_free_i32(tdf
);
18724 tcg_temp_free_i32(tm
);
18725 tcg_temp_free_i32(twd
);
18726 tcg_temp_free_i32(tws
);
18729 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18731 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18732 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18733 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18734 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18735 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18737 TCGv_i32 tdf
= tcg_const_i32(df
);
18738 TCGv_i32 twd
= tcg_const_i32(wd
);
18739 TCGv_i32 tws
= tcg_const_i32(ws
);
18740 TCGv_i32 twt
= tcg_const_i32(wt
);
18742 switch (MASK_MSA_3R(ctx
->opcode
)) {
18744 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18747 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18750 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18753 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18755 case OPC_SUBS_S_df
:
18756 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18759 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18762 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18765 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18768 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18771 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18773 case OPC_ADDS_A_df
:
18774 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18776 case OPC_SUBS_U_df
:
18777 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18780 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18783 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18786 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18789 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18792 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18795 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18797 case OPC_ADDS_S_df
:
18798 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18800 case OPC_SUBSUS_U_df
:
18801 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18804 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18807 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18821 case OPC_ADDS_U_df
:
18822 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18824 case OPC_SUBSUU_S_df
:
18825 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18834 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18840 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18842 case OPC_ASUB_S_df
:
18843 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18846 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18849 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18852 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18858 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18861 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18863 case OPC_ASUB_U_df
:
18864 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18867 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18870 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18873 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18878 case OPC_AVER_S_df
:
18879 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18882 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18885 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18888 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18891 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18893 case OPC_AVER_U_df
:
18894 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18897 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18900 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18903 case OPC_DOTP_S_df
:
18904 case OPC_DOTP_U_df
:
18905 case OPC_DPADD_S_df
:
18906 case OPC_DPADD_U_df
:
18907 case OPC_DPSUB_S_df
:
18908 case OPC_HADD_S_df
:
18909 case OPC_DPSUB_U_df
:
18910 case OPC_HADD_U_df
:
18911 case OPC_HSUB_S_df
:
18912 case OPC_HSUB_U_df
:
18913 if (df
== DF_BYTE
) {
18914 generate_exception_end(ctx
, EXCP_RI
);
18917 switch (MASK_MSA_3R(ctx
->opcode
)) {
18918 case OPC_DOTP_S_df
:
18919 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18921 case OPC_DOTP_U_df
:
18922 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18924 case OPC_DPADD_S_df
:
18925 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18927 case OPC_DPADD_U_df
:
18928 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18930 case OPC_DPSUB_S_df
:
18931 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18933 case OPC_HADD_S_df
:
18934 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18936 case OPC_DPSUB_U_df
:
18937 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18939 case OPC_HADD_U_df
:
18940 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18942 case OPC_HSUB_S_df
:
18943 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18945 case OPC_HSUB_U_df
:
18946 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18951 MIPS_INVAL("MSA instruction");
18952 generate_exception_end(ctx
, EXCP_RI
);
18955 tcg_temp_free_i32(twd
);
18956 tcg_temp_free_i32(tws
);
18957 tcg_temp_free_i32(twt
);
18958 tcg_temp_free_i32(tdf
);
18961 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18963 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18964 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18965 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18966 TCGv telm
= tcg_temp_new();
18967 TCGv_i32 tsr
= tcg_const_i32(source
);
18968 TCGv_i32 tdt
= tcg_const_i32(dest
);
18970 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18972 gen_load_gpr(telm
, source
);
18973 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18976 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18977 gen_store_gpr(telm
, dest
);
18980 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18983 MIPS_INVAL("MSA instruction");
18984 generate_exception_end(ctx
, EXCP_RI
);
18988 tcg_temp_free(telm
);
18989 tcg_temp_free_i32(tdt
);
18990 tcg_temp_free_i32(tsr
);
18993 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18996 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18997 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18998 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19000 TCGv_i32 tws
= tcg_const_i32(ws
);
19001 TCGv_i32 twd
= tcg_const_i32(wd
);
19002 TCGv_i32 tn
= tcg_const_i32(n
);
19003 TCGv_i32 tdf
= tcg_const_i32(df
);
19005 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19007 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
19009 case OPC_SPLATI_df
:
19010 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
19013 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
19015 case OPC_COPY_S_df
:
19016 case OPC_COPY_U_df
:
19017 case OPC_INSERT_df
:
19018 #if !defined(TARGET_MIPS64)
19019 /* Double format valid only for MIPS64 */
19020 if (df
== DF_DOUBLE
) {
19021 generate_exception_end(ctx
, EXCP_RI
);
19025 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19026 case OPC_COPY_S_df
:
19027 if (likely(wd
!= 0)) {
19028 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19031 case OPC_COPY_U_df
:
19032 if (likely(wd
!= 0)) {
19033 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19036 case OPC_INSERT_df
:
19037 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19042 MIPS_INVAL("MSA instruction");
19043 generate_exception_end(ctx
, EXCP_RI
);
19045 tcg_temp_free_i32(twd
);
19046 tcg_temp_free_i32(tws
);
19047 tcg_temp_free_i32(tn
);
19048 tcg_temp_free_i32(tdf
);
19051 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19053 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19054 uint32_t df
= 0, n
= 0;
19056 if ((dfn
& 0x30) == 0x00) {
19059 } else if ((dfn
& 0x38) == 0x20) {
19062 } else if ((dfn
& 0x3c) == 0x30) {
19065 } else if ((dfn
& 0x3e) == 0x38) {
19068 } else if (dfn
== 0x3E) {
19069 /* CTCMSA, CFCMSA, MOVE.V */
19070 gen_msa_elm_3e(env
, ctx
);
19073 generate_exception_end(ctx
, EXCP_RI
);
19077 gen_msa_elm_df(env
, ctx
, df
, n
);
19080 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19082 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19083 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19084 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19085 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19086 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19088 TCGv_i32 twd
= tcg_const_i32(wd
);
19089 TCGv_i32 tws
= tcg_const_i32(ws
);
19090 TCGv_i32 twt
= tcg_const_i32(wt
);
19091 TCGv_i32 tdf
= tcg_temp_new_i32();
19093 /* adjust df value for floating-point instruction */
19094 tcg_gen_movi_i32(tdf
, df
+ 2);
19096 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19098 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19101 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19104 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19107 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19110 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19113 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19116 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19119 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19122 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19125 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19128 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19131 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19134 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19137 tcg_gen_movi_i32(tdf
, df
+ 1);
19138 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19141 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19144 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19146 case OPC_MADD_Q_df
:
19147 tcg_gen_movi_i32(tdf
, df
+ 1);
19148 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19151 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19153 case OPC_MSUB_Q_df
:
19154 tcg_gen_movi_i32(tdf
, df
+ 1);
19155 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19158 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19161 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19164 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19167 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19170 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19173 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19176 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19179 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19182 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19185 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19188 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19191 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19194 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19196 case OPC_MULR_Q_df
:
19197 tcg_gen_movi_i32(tdf
, df
+ 1);
19198 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19201 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19203 case OPC_FMIN_A_df
:
19204 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19206 case OPC_MADDR_Q_df
:
19207 tcg_gen_movi_i32(tdf
, df
+ 1);
19208 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19211 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19214 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19216 case OPC_MSUBR_Q_df
:
19217 tcg_gen_movi_i32(tdf
, df
+ 1);
19218 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19221 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19223 case OPC_FMAX_A_df
:
19224 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19227 MIPS_INVAL("MSA instruction");
19228 generate_exception_end(ctx
, EXCP_RI
);
19232 tcg_temp_free_i32(twd
);
19233 tcg_temp_free_i32(tws
);
19234 tcg_temp_free_i32(twt
);
19235 tcg_temp_free_i32(tdf
);
19238 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19240 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19241 (op & (0x7 << 18)))
19242 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19243 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19244 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19245 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19246 TCGv_i32 twd
= tcg_const_i32(wd
);
19247 TCGv_i32 tws
= tcg_const_i32(ws
);
19248 TCGv_i32 twt
= tcg_const_i32(wt
);
19249 TCGv_i32 tdf
= tcg_const_i32(df
);
19251 switch (MASK_MSA_2R(ctx
->opcode
)) {
19253 #if !defined(TARGET_MIPS64)
19254 /* Double format valid only for MIPS64 */
19255 if (df
== DF_DOUBLE
) {
19256 generate_exception_end(ctx
, EXCP_RI
);
19260 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19263 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19266 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19269 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19272 MIPS_INVAL("MSA instruction");
19273 generate_exception_end(ctx
, EXCP_RI
);
19277 tcg_temp_free_i32(twd
);
19278 tcg_temp_free_i32(tws
);
19279 tcg_temp_free_i32(twt
);
19280 tcg_temp_free_i32(tdf
);
19283 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19285 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19286 (op & (0xf << 17)))
19287 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19288 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19289 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19290 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19291 TCGv_i32 twd
= tcg_const_i32(wd
);
19292 TCGv_i32 tws
= tcg_const_i32(ws
);
19293 TCGv_i32 twt
= tcg_const_i32(wt
);
19294 /* adjust df value for floating-point instruction */
19295 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19297 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19298 case OPC_FCLASS_df
:
19299 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19301 case OPC_FTRUNC_S_df
:
19302 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19304 case OPC_FTRUNC_U_df
:
19305 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19308 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19310 case OPC_FRSQRT_df
:
19311 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19314 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19317 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19320 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19322 case OPC_FEXUPL_df
:
19323 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19325 case OPC_FEXUPR_df
:
19326 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19329 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19332 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19334 case OPC_FTINT_S_df
:
19335 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19337 case OPC_FTINT_U_df
:
19338 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19340 case OPC_FFINT_S_df
:
19341 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19343 case OPC_FFINT_U_df
:
19344 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19348 tcg_temp_free_i32(twd
);
19349 tcg_temp_free_i32(tws
);
19350 tcg_temp_free_i32(twt
);
19351 tcg_temp_free_i32(tdf
);
19354 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19356 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19357 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19358 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19359 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19360 TCGv_i32 twd
= tcg_const_i32(wd
);
19361 TCGv_i32 tws
= tcg_const_i32(ws
);
19362 TCGv_i32 twt
= tcg_const_i32(wt
);
19364 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19366 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19369 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19372 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19375 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19378 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19381 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19384 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19387 MIPS_INVAL("MSA instruction");
19388 generate_exception_end(ctx
, EXCP_RI
);
19392 tcg_temp_free_i32(twd
);
19393 tcg_temp_free_i32(tws
);
19394 tcg_temp_free_i32(twt
);
19397 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19399 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19407 gen_msa_vec_v(env
, ctx
);
19410 gen_msa_2r(env
, ctx
);
19413 gen_msa_2rf(env
, ctx
);
19416 MIPS_INVAL("MSA instruction");
19417 generate_exception_end(ctx
, EXCP_RI
);
19422 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19424 uint32_t opcode
= ctx
->opcode
;
19425 check_insn(ctx
, ASE_MSA
);
19426 check_msa_access(ctx
);
19428 switch (MASK_MSA_MINOR(opcode
)) {
19429 case OPC_MSA_I8_00
:
19430 case OPC_MSA_I8_01
:
19431 case OPC_MSA_I8_02
:
19432 gen_msa_i8(env
, ctx
);
19434 case OPC_MSA_I5_06
:
19435 case OPC_MSA_I5_07
:
19436 gen_msa_i5(env
, ctx
);
19438 case OPC_MSA_BIT_09
:
19439 case OPC_MSA_BIT_0A
:
19440 gen_msa_bit(env
, ctx
);
19442 case OPC_MSA_3R_0D
:
19443 case OPC_MSA_3R_0E
:
19444 case OPC_MSA_3R_0F
:
19445 case OPC_MSA_3R_10
:
19446 case OPC_MSA_3R_11
:
19447 case OPC_MSA_3R_12
:
19448 case OPC_MSA_3R_13
:
19449 case OPC_MSA_3R_14
:
19450 case OPC_MSA_3R_15
:
19451 gen_msa_3r(env
, ctx
);
19454 gen_msa_elm(env
, ctx
);
19456 case OPC_MSA_3RF_1A
:
19457 case OPC_MSA_3RF_1B
:
19458 case OPC_MSA_3RF_1C
:
19459 gen_msa_3rf(env
, ctx
);
19462 gen_msa_vec(env
, ctx
);
19473 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19474 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19475 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19476 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19478 TCGv_i32 twd
= tcg_const_i32(wd
);
19479 TCGv taddr
= tcg_temp_new();
19480 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19482 switch (MASK_MSA_MINOR(opcode
)) {
19484 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19487 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19490 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19493 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19496 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19499 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19502 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19505 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19509 tcg_temp_free_i32(twd
);
19510 tcg_temp_free(taddr
);
19514 MIPS_INVAL("MSA instruction");
19515 generate_exception_end(ctx
, EXCP_RI
);
19521 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19524 int rs
, rt
, rd
, sa
;
19528 /* make sure instructions are on a word boundary */
19529 if (ctx
->base
.pc_next
& 0x3) {
19530 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
19531 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19535 /* Handle blikely not taken case */
19536 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19537 TCGLabel
*l1
= gen_new_label();
19539 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19540 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19541 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
19545 op
= MASK_OP_MAJOR(ctx
->opcode
);
19546 rs
= (ctx
->opcode
>> 21) & 0x1f;
19547 rt
= (ctx
->opcode
>> 16) & 0x1f;
19548 rd
= (ctx
->opcode
>> 11) & 0x1f;
19549 sa
= (ctx
->opcode
>> 6) & 0x1f;
19550 imm
= (int16_t)ctx
->opcode
;
19553 decode_opc_special(env
, ctx
);
19556 decode_opc_special2_legacy(env
, ctx
);
19559 decode_opc_special3(env
, ctx
);
19562 op1
= MASK_REGIMM(ctx
->opcode
);
19564 case OPC_BLTZL
: /* REGIMM branches */
19568 check_insn(ctx
, ISA_MIPS2
);
19569 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19573 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19577 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19579 /* OPC_NAL, OPC_BAL */
19580 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19582 generate_exception_end(ctx
, EXCP_RI
);
19585 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19588 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19590 check_insn(ctx
, ISA_MIPS2
);
19591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19592 gen_trap(ctx
, op1
, rs
, -1, imm
);
19595 check_insn(ctx
, ISA_MIPS32R6
);
19596 generate_exception_end(ctx
, EXCP_RI
);
19599 check_insn(ctx
, ISA_MIPS32R2
);
19600 /* Break the TB to be able to sync copied instructions
19602 ctx
->base
.is_jmp
= DISAS_STOP
;
19604 case OPC_BPOSGE32
: /* MIPS DSP branch */
19605 #if defined(TARGET_MIPS64)
19609 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19611 #if defined(TARGET_MIPS64)
19613 check_insn(ctx
, ISA_MIPS32R6
);
19614 check_mips_64(ctx
);
19616 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19620 check_insn(ctx
, ISA_MIPS32R6
);
19621 check_mips_64(ctx
);
19623 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19627 default: /* Invalid */
19628 MIPS_INVAL("regimm");
19629 generate_exception_end(ctx
, EXCP_RI
);
19634 check_cp0_enabled(ctx
);
19635 op1
= MASK_CP0(ctx
->opcode
);
19643 #if defined(TARGET_MIPS64)
19647 #ifndef CONFIG_USER_ONLY
19648 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19649 #endif /* !CONFIG_USER_ONLY */
19651 case OPC_C0_FIRST
... OPC_C0_LAST
:
19652 #ifndef CONFIG_USER_ONLY
19653 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19654 #endif /* !CONFIG_USER_ONLY */
19657 #ifndef CONFIG_USER_ONLY
19660 TCGv t0
= tcg_temp_new();
19662 op2
= MASK_MFMC0(ctx
->opcode
);
19665 check_insn(ctx
, ASE_MT
);
19666 gen_helper_dmt(t0
);
19667 gen_store_gpr(t0
, rt
);
19670 check_insn(ctx
, ASE_MT
);
19671 gen_helper_emt(t0
);
19672 gen_store_gpr(t0
, rt
);
19675 check_insn(ctx
, ASE_MT
);
19676 gen_helper_dvpe(t0
, cpu_env
);
19677 gen_store_gpr(t0
, rt
);
19680 check_insn(ctx
, ASE_MT
);
19681 gen_helper_evpe(t0
, cpu_env
);
19682 gen_store_gpr(t0
, rt
);
19685 check_insn(ctx
, ISA_MIPS32R6
);
19687 gen_helper_dvp(t0
, cpu_env
);
19688 gen_store_gpr(t0
, rt
);
19692 check_insn(ctx
, ISA_MIPS32R6
);
19694 gen_helper_evp(t0
, cpu_env
);
19695 gen_store_gpr(t0
, rt
);
19699 check_insn(ctx
, ISA_MIPS32R2
);
19700 save_cpu_state(ctx
, 1);
19701 gen_helper_di(t0
, cpu_env
);
19702 gen_store_gpr(t0
, rt
);
19703 /* Stop translation as we may have switched
19704 the execution mode. */
19705 ctx
->base
.is_jmp
= DISAS_STOP
;
19708 check_insn(ctx
, ISA_MIPS32R2
);
19709 save_cpu_state(ctx
, 1);
19710 gen_helper_ei(t0
, cpu_env
);
19711 gen_store_gpr(t0
, rt
);
19712 /* DISAS_STOP isn't sufficient, we need to ensure we break
19713 out of translated code to check for pending interrupts */
19714 gen_save_pc(ctx
->base
.pc_next
+ 4);
19715 ctx
->base
.is_jmp
= DISAS_EXIT
;
19717 default: /* Invalid */
19718 MIPS_INVAL("mfmc0");
19719 generate_exception_end(ctx
, EXCP_RI
);
19724 #endif /* !CONFIG_USER_ONLY */
19727 check_insn(ctx
, ISA_MIPS32R2
);
19728 gen_load_srsgpr(rt
, rd
);
19731 check_insn(ctx
, ISA_MIPS32R2
);
19732 gen_store_srsgpr(rt
, rd
);
19736 generate_exception_end(ctx
, EXCP_RI
);
19740 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19741 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19742 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19743 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19746 /* Arithmetic with immediate opcode */
19747 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19751 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19753 case OPC_SLTI
: /* Set on less than with immediate opcode */
19755 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19757 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19758 case OPC_LUI
: /* OPC_AUI */
19761 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19763 case OPC_J
... OPC_JAL
: /* Jump */
19764 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19765 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19768 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19769 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19771 generate_exception_end(ctx
, EXCP_RI
);
19774 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19775 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19778 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19781 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19782 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19784 generate_exception_end(ctx
, EXCP_RI
);
19787 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19788 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19791 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19794 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19797 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19799 check_insn(ctx
, ISA_MIPS32R6
);
19800 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19801 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19804 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19807 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19809 check_insn(ctx
, ISA_MIPS32R6
);
19810 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19811 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19816 check_insn(ctx
, ISA_MIPS2
);
19817 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19821 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19823 case OPC_LL
: /* Load and stores */
19824 check_insn(ctx
, ISA_MIPS2
);
19828 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19830 case OPC_LB
... OPC_LH
:
19831 case OPC_LW
... OPC_LHU
:
19832 gen_ld(ctx
, op
, rt
, rs
, imm
);
19836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19838 case OPC_SB
... OPC_SH
:
19840 gen_st(ctx
, op
, rt
, rs
, imm
);
19843 check_insn(ctx
, ISA_MIPS2
);
19844 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19845 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19848 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19849 check_cp0_enabled(ctx
);
19850 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19851 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19852 gen_cache_operation(ctx
, rt
, rs
, imm
);
19854 /* Treat as NOP. */
19857 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19858 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19859 /* Treat as NOP. */
19862 /* Floating point (COP1). */
19867 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19871 op1
= MASK_CP1(ctx
->opcode
);
19876 check_cp1_enabled(ctx
);
19877 check_insn(ctx
, ISA_MIPS32R2
);
19882 check_cp1_enabled(ctx
);
19883 gen_cp1(ctx
, op1
, rt
, rd
);
19885 #if defined(TARGET_MIPS64)
19888 check_cp1_enabled(ctx
);
19889 check_insn(ctx
, ISA_MIPS3
);
19890 check_mips_64(ctx
);
19891 gen_cp1(ctx
, op1
, rt
, rd
);
19894 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19895 check_cp1_enabled(ctx
);
19896 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19898 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19903 check_insn(ctx
, ASE_MIPS3D
);
19904 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19905 (rt
>> 2) & 0x7, imm
<< 2);
19909 check_cp1_enabled(ctx
);
19910 check_insn(ctx
, ISA_MIPS32R6
);
19911 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19915 check_cp1_enabled(ctx
);
19916 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19918 check_insn(ctx
, ASE_MIPS3D
);
19921 check_cp1_enabled(ctx
);
19922 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19923 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19924 (rt
>> 2) & 0x7, imm
<< 2);
19931 check_cp1_enabled(ctx
);
19932 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19938 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19939 check_cp1_enabled(ctx
);
19940 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19942 case R6_OPC_CMP_AF_S
:
19943 case R6_OPC_CMP_UN_S
:
19944 case R6_OPC_CMP_EQ_S
:
19945 case R6_OPC_CMP_UEQ_S
:
19946 case R6_OPC_CMP_LT_S
:
19947 case R6_OPC_CMP_ULT_S
:
19948 case R6_OPC_CMP_LE_S
:
19949 case R6_OPC_CMP_ULE_S
:
19950 case R6_OPC_CMP_SAF_S
:
19951 case R6_OPC_CMP_SUN_S
:
19952 case R6_OPC_CMP_SEQ_S
:
19953 case R6_OPC_CMP_SEUQ_S
:
19954 case R6_OPC_CMP_SLT_S
:
19955 case R6_OPC_CMP_SULT_S
:
19956 case R6_OPC_CMP_SLE_S
:
19957 case R6_OPC_CMP_SULE_S
:
19958 case R6_OPC_CMP_OR_S
:
19959 case R6_OPC_CMP_UNE_S
:
19960 case R6_OPC_CMP_NE_S
:
19961 case R6_OPC_CMP_SOR_S
:
19962 case R6_OPC_CMP_SUNE_S
:
19963 case R6_OPC_CMP_SNE_S
:
19964 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19966 case R6_OPC_CMP_AF_D
:
19967 case R6_OPC_CMP_UN_D
:
19968 case R6_OPC_CMP_EQ_D
:
19969 case R6_OPC_CMP_UEQ_D
:
19970 case R6_OPC_CMP_LT_D
:
19971 case R6_OPC_CMP_ULT_D
:
19972 case R6_OPC_CMP_LE_D
:
19973 case R6_OPC_CMP_ULE_D
:
19974 case R6_OPC_CMP_SAF_D
:
19975 case R6_OPC_CMP_SUN_D
:
19976 case R6_OPC_CMP_SEQ_D
:
19977 case R6_OPC_CMP_SEUQ_D
:
19978 case R6_OPC_CMP_SLT_D
:
19979 case R6_OPC_CMP_SULT_D
:
19980 case R6_OPC_CMP_SLE_D
:
19981 case R6_OPC_CMP_SULE_D
:
19982 case R6_OPC_CMP_OR_D
:
19983 case R6_OPC_CMP_UNE_D
:
19984 case R6_OPC_CMP_NE_D
:
19985 case R6_OPC_CMP_SOR_D
:
19986 case R6_OPC_CMP_SUNE_D
:
19987 case R6_OPC_CMP_SNE_D
:
19988 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19991 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19992 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19997 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20012 check_insn(ctx
, ASE_MSA
);
20013 gen_msa_branch(env
, ctx
, op1
);
20017 generate_exception_end(ctx
, EXCP_RI
);
20022 /* Compact branches [R6] and COP2 [non-R6] */
20023 case OPC_BC
: /* OPC_LWC2 */
20024 case OPC_BALC
: /* OPC_SWC2 */
20025 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20026 /* OPC_BC, OPC_BALC */
20027 gen_compute_compact_branch(ctx
, op
, 0, 0,
20028 sextract32(ctx
->opcode
<< 2, 0, 28));
20030 /* OPC_LWC2, OPC_SWC2 */
20031 /* COP2: Not implemented. */
20032 generate_exception_err(ctx
, EXCP_CpU
, 2);
20035 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20036 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20037 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20039 /* OPC_BEQZC, OPC_BNEZC */
20040 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20041 sextract32(ctx
->opcode
<< 2, 0, 23));
20043 /* OPC_JIC, OPC_JIALC */
20044 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20047 /* OPC_LWC2, OPC_SWC2 */
20048 /* COP2: Not implemented. */
20049 generate_exception_err(ctx
, EXCP_CpU
, 2);
20053 check_insn(ctx
, INSN_LOONGSON2F
);
20054 /* Note that these instructions use different fields. */
20055 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20059 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20060 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20061 check_cp1_enabled(ctx
);
20062 op1
= MASK_CP3(ctx
->opcode
);
20066 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20072 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20073 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20076 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20077 /* Treat as NOP. */
20080 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20094 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20095 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20099 generate_exception_end(ctx
, EXCP_RI
);
20103 generate_exception_err(ctx
, EXCP_CpU
, 1);
20107 #if defined(TARGET_MIPS64)
20108 /* MIPS64 opcodes */
20109 case OPC_LDL
... OPC_LDR
:
20111 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20115 check_insn(ctx
, ISA_MIPS3
);
20116 check_mips_64(ctx
);
20117 gen_ld(ctx
, op
, rt
, rs
, imm
);
20119 case OPC_SDL
... OPC_SDR
:
20120 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20123 check_insn(ctx
, ISA_MIPS3
);
20124 check_mips_64(ctx
);
20125 gen_st(ctx
, op
, rt
, rs
, imm
);
20128 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20129 check_insn(ctx
, ISA_MIPS3
);
20130 check_mips_64(ctx
);
20131 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20133 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20134 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20135 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20136 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20139 check_insn(ctx
, ISA_MIPS3
);
20140 check_mips_64(ctx
);
20141 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20145 check_insn(ctx
, ISA_MIPS3
);
20146 check_mips_64(ctx
);
20147 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20150 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20151 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20152 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20154 MIPS_INVAL("major opcode");
20155 generate_exception_end(ctx
, EXCP_RI
);
20159 case OPC_DAUI
: /* OPC_JALX */
20160 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20161 #if defined(TARGET_MIPS64)
20163 check_mips_64(ctx
);
20165 generate_exception(ctx
, EXCP_RI
);
20166 } else if (rt
!= 0) {
20167 TCGv t0
= tcg_temp_new();
20168 gen_load_gpr(t0
, rs
);
20169 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20173 generate_exception_end(ctx
, EXCP_RI
);
20174 MIPS_INVAL("major opcode");
20178 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20179 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20180 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20183 case OPC_MSA
: /* OPC_MDMX */
20184 /* MDMX: Not implemented. */
20188 check_insn(ctx
, ISA_MIPS32R6
);
20189 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
20191 default: /* Invalid */
20192 MIPS_INVAL("major opcode");
20193 generate_exception_end(ctx
, EXCP_RI
);
20198 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
20200 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20201 CPUMIPSState
*env
= cs
->env_ptr
;
20203 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
20204 ctx
->saved_pc
= -1;
20205 ctx
->insn_flags
= env
->insn_flags
;
20206 ctx
->CP0_Config1
= env
->CP0_Config1
;
20208 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20209 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20210 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20211 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20212 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20213 ctx
->PAMask
= env
->PAMask
;
20214 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20215 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20216 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20217 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20218 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20219 /* Restore delay slot state from the tb context. */
20220 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
20221 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20222 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20223 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20224 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20225 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20226 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20227 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20228 restore_cpu_state(env
, ctx
);
20229 #ifdef CONFIG_USER_ONLY
20230 ctx
->mem_idx
= MIPS_HFLAG_UM
;
20232 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
20234 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
20235 MO_UNALN
: MO_ALIGN
;
20237 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
20241 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20245 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20247 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20249 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
20253 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
20254 const CPUBreakpoint
*bp
)
20256 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20258 save_cpu_state(ctx
, 1);
20259 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20260 gen_helper_raise_exception_debug(cpu_env
);
20261 /* The address covered by the breakpoint must be included in
20262 [tb->pc, tb->pc + tb->size) in order to for it to be
20263 properly cleared -- thus we increment the PC here so that
20264 the logic setting tb->size below does the right thing. */
20265 ctx
->base
.pc_next
+= 4;
20269 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
20271 CPUMIPSState
*env
= cs
->env_ptr
;
20272 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20276 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
20277 if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
20278 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
20280 decode_opc(env
, ctx
);
20281 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
20282 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20283 insn_bytes
= decode_micromips_opc(env
, ctx
);
20284 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
20285 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20286 insn_bytes
= decode_mips16_opc(env
, ctx
);
20288 generate_exception_end(ctx
, EXCP_RI
);
20289 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
20293 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
20294 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20295 MIPS_HFLAG_FBNSLOT
))) {
20296 /* force to generate branch as there is neither delay nor
20300 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
20301 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
20302 /* Force to generate branch as microMIPS R6 doesn't restrict
20303 branches in the forbidden slot. */
20308 gen_branch(ctx
, insn_bytes
);
20310 ctx
->base
.pc_next
+= insn_bytes
;
20312 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
20315 /* Execute a branch and its delay slot as a single instruction.
20316 This is what GDB expects and is consistent with what the
20317 hardware does (e.g. if a delay slot instruction faults, the
20318 reported PC is the PC of the branch). */
20319 if (ctx
->base
.singlestep_enabled
&&
20320 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
20321 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20323 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
20324 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20328 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
20330 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20332 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
20333 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
20334 gen_helper_raise_exception_debug(cpu_env
);
20336 switch (ctx
->base
.is_jmp
) {
20338 gen_save_pc(ctx
->base
.pc_next
);
20339 tcg_gen_lookup_and_goto_ptr();
20342 case DISAS_TOO_MANY
:
20343 save_cpu_state(ctx
, 0);
20344 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
20347 tcg_gen_exit_tb(NULL
, 0);
20349 case DISAS_NORETURN
:
20352 g_assert_not_reached();
20357 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
20359 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
20360 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
20363 static const TranslatorOps mips_tr_ops
= {
20364 .init_disas_context
= mips_tr_init_disas_context
,
20365 .tb_start
= mips_tr_tb_start
,
20366 .insn_start
= mips_tr_insn_start
,
20367 .breakpoint_check
= mips_tr_breakpoint_check
,
20368 .translate_insn
= mips_tr_translate_insn
,
20369 .tb_stop
= mips_tr_tb_stop
,
20370 .disas_log
= mips_tr_disas_log
,
20373 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20377 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
20380 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20384 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20386 #define printfpr(fp) \
20389 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20390 " fd:%13g fs:%13g psu: %13g\n", \
20391 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20392 (double)(fp)->fd, \
20393 (double)(fp)->fs[FP_ENDIAN_IDX], \
20394 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20397 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20398 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20399 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20400 " fd:%13g fs:%13g psu:%13g\n", \
20401 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20403 (double)tmp.fs[FP_ENDIAN_IDX], \
20404 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20409 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20410 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20411 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20412 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20413 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20414 printfpr(&env
->active_fpu
.fpr
[i
]);
20420 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20423 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20424 CPUMIPSState
*env
= &cpu
->env
;
20427 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20428 " LO=0x" TARGET_FMT_lx
" ds %04x "
20429 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20430 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20431 env
->hflags
, env
->btarget
, env
->bcond
);
20432 for (i
= 0; i
< 32; i
++) {
20434 cpu_fprintf(f
, "GPR%02d:", i
);
20435 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20437 cpu_fprintf(f
, "\n");
20440 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20441 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20442 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20444 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20445 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20446 env
->CP0_Config2
, env
->CP0_Config3
);
20447 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20448 env
->CP0_Config4
, env
->CP0_Config5
);
20449 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
20450 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20454 void mips_tcg_init(void)
20459 for (i
= 1; i
< 32; i
++)
20460 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20461 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20464 for (i
= 0; i
< 32; i
++) {
20465 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20467 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20468 /* The scalar floating-point unit (FPU) registers are mapped on
20469 * the MSA vector registers. */
20470 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20471 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20472 msa_wr_d
[i
* 2 + 1] =
20473 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20476 cpu_PC
= tcg_global_mem_new(cpu_env
,
20477 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20478 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20479 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20480 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20482 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20483 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20486 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20487 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20489 bcond
= tcg_global_mem_new(cpu_env
,
20490 offsetof(CPUMIPSState
, bcond
), "bcond");
20491 btarget
= tcg_global_mem_new(cpu_env
,
20492 offsetof(CPUMIPSState
, btarget
), "btarget");
20493 hflags
= tcg_global_mem_new_i32(cpu_env
,
20494 offsetof(CPUMIPSState
, hflags
), "hflags");
20496 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20497 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20499 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20500 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20504 #include "translate_init.inc.c"
20506 void cpu_mips_realize_env(CPUMIPSState
*env
)
20508 env
->exception_base
= (int32_t)0xBFC00000;
20510 #ifndef CONFIG_USER_ONLY
20511 mmu_init(env
, env
->cpu_model
);
20513 fpu_init(env
, env
->cpu_model
);
20514 mvp_init(env
, env
->cpu_model
);
20517 bool cpu_supports_cps_smp(const char *cpu_type
)
20519 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20520 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20523 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
20525 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20526 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
20529 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20531 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20532 vp
->env
.exception_base
= address
;
20535 void cpu_state_reset(CPUMIPSState
*env
)
20537 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20538 CPUState
*cs
= CPU(cpu
);
20540 /* Reset registers to their default values */
20541 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20542 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20543 #ifdef TARGET_WORDS_BIGENDIAN
20544 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20546 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20547 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20548 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20549 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20550 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20551 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20552 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20553 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20554 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20555 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20556 << env
->cpu_model
->CP0_LLAddr_shift
;
20557 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20558 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20559 env
->CCRes
= env
->cpu_model
->CCRes
;
20560 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20561 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20562 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20563 env
->current_tc
= 0;
20564 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20565 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20566 #if defined(TARGET_MIPS64)
20567 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20568 env
->SEGMask
|= 3ULL << 62;
20571 env
->PABITS
= env
->cpu_model
->PABITS
;
20572 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20573 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20574 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20575 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20576 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20577 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20578 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20579 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20580 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20581 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20582 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20583 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20584 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20585 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20586 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20587 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20588 env
->msair
= env
->cpu_model
->MSAIR
;
20589 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20591 #if defined(CONFIG_USER_ONLY)
20592 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20593 # ifdef TARGET_MIPS64
20594 /* Enable 64-bit register mode. */
20595 env
->CP0_Status
|= (1 << CP0St_PX
);
20597 # ifdef TARGET_ABI_MIPSN64
20598 /* Enable 64-bit address mode. */
20599 env
->CP0_Status
|= (1 << CP0St_UX
);
20601 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20602 hardware registers. */
20603 env
->CP0_HWREna
|= 0x0000000F;
20604 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20605 env
->CP0_Status
|= (1 << CP0St_CU1
);
20607 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20608 env
->CP0_Status
|= (1 << CP0St_MX
);
20610 # if defined(TARGET_MIPS64)
20611 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20612 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20613 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20614 env
->CP0_Status
|= (1 << CP0St_FR
);
20618 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20619 /* If the exception was raised from a delay slot,
20620 come back to the jump. */
20621 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20622 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20624 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20626 env
->active_tc
.PC
= env
->exception_base
;
20627 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20628 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20629 env
->CP0_Wired
= 0;
20630 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20631 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20632 if (mips_um_ksegs_enabled()) {
20633 env
->CP0_EBase
|= 0x40000000;
20635 env
->CP0_EBase
|= (int32_t)0x80000000;
20637 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20638 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20640 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20642 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20643 /* vectored interrupts not implemented, timer on int 7,
20644 no performance counters. */
20645 env
->CP0_IntCtl
= 0xe0000000;
20649 for (i
= 0; i
< 7; i
++) {
20650 env
->CP0_WatchLo
[i
] = 0;
20651 env
->CP0_WatchHi
[i
] = 0x80000000;
20653 env
->CP0_WatchLo
[7] = 0;
20654 env
->CP0_WatchHi
[7] = 0;
20656 /* Count register increments in debug mode, EJTAG version 1 */
20657 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20659 cpu_mips_store_count(env
, 1);
20661 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20664 /* Only TC0 on VPE 0 starts as active. */
20665 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20666 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20667 env
->tcs
[i
].CP0_TCHalt
= 1;
20669 env
->active_tc
.CP0_TCHalt
= 1;
20672 if (cs
->cpu_index
== 0) {
20673 /* VPE0 starts up enabled. */
20674 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20675 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20677 /* TC0 starts up unhalted. */
20679 env
->active_tc
.CP0_TCHalt
= 0;
20680 env
->tcs
[0].CP0_TCHalt
= 0;
20681 /* With thread 0 active. */
20682 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20683 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20688 * Configure default legacy segmentation control. We use this regardless of
20689 * whether segmentation control is presented to the guest.
20691 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20692 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20693 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20694 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20695 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20696 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20698 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20699 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20700 (3 << CP0SC_C
)) << 16;
20701 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20702 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20703 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20704 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20705 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20706 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20707 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20708 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20710 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20711 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20712 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20713 env
->CP0_Status
|= (1 << CP0St_FR
);
20717 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20721 compute_hflags(env
);
20722 restore_fp_status(env
);
20723 restore_pamask(env
);
20724 cs
->exception_index
= EXCP_NONE
;
20726 if (semihosting_get_argc()) {
20727 /* UHI interface can be used to obtain argc and argv */
20728 env
->active_tc
.gpr
[4] = -1;
20732 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20733 target_ulong
*data
)
20735 env
->active_tc
.PC
= data
[0];
20736 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20737 env
->hflags
|= data
[1];
20738 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20739 case MIPS_HFLAG_BR
:
20741 case MIPS_HFLAG_BC
:
20742 case MIPS_HFLAG_BL
:
20744 env
->btarget
= data
[2];