2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
467 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
468 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
476 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
477 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
478 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
479 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
485 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
492 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
493 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
494 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
505 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
506 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
523 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
582 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
583 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
666 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
667 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
685 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
686 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
687 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
688 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
689 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
790 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
791 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
792 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
894 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
895 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
896 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
897 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
898 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
899 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
900 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
901 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
902 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
903 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
904 OPC_C0
= (0x10 << 21) | OPC_CP0
,
905 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
906 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
910 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
913 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
914 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
915 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
916 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
917 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
918 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
919 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
920 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
923 /* Coprocessor 0 (with rs == C0) */
924 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
927 OPC_TLBR
= 0x01 | OPC_C0
,
928 OPC_TLBWI
= 0x02 | OPC_C0
,
929 OPC_TLBINV
= 0x03 | OPC_C0
,
930 OPC_TLBINVF
= 0x04 | OPC_C0
,
931 OPC_TLBWR
= 0x06 | OPC_C0
,
932 OPC_TLBP
= 0x08 | OPC_C0
,
933 OPC_RFE
= 0x10 | OPC_C0
,
934 OPC_ERET
= 0x18 | OPC_C0
,
935 OPC_DERET
= 0x1F | OPC_C0
,
936 OPC_WAIT
= 0x20 | OPC_C0
,
939 /* Coprocessor 1 (rs field) */
940 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
942 /* Values for the fmt field in FP instructions */
944 /* 0 - 15 are reserved */
945 FMT_S
= 16, /* single fp */
946 FMT_D
= 17, /* double fp */
947 FMT_E
= 18, /* extended fp */
948 FMT_Q
= 19, /* quad fp */
949 FMT_W
= 20, /* 32-bit fixed */
950 FMT_L
= 21, /* 64-bit fixed */
951 FMT_PS
= 22, /* paired single fp */
952 /* 23 - 31 are reserved */
956 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
957 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
958 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
959 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
960 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
961 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
962 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
963 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
964 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
965 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
966 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
967 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
968 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
969 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
970 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
971 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
972 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
973 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
974 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
975 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
976 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
977 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
978 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
979 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
980 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
981 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
982 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
983 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
984 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
985 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
988 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
989 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
992 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
993 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
994 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
995 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
999 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1000 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1004 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1005 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1008 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1011 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1012 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1013 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1014 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1015 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1016 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1017 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1018 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1019 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1020 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1021 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1024 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1027 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1028 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1029 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1030 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1031 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1032 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1033 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1034 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1036 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1037 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1038 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1039 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1040 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1041 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1042 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1043 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1045 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1046 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1047 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1048 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1049 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1050 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1051 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1052 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1054 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1055 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1056 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1057 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1058 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1059 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1060 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1061 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1063 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1064 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1065 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1066 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1067 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1068 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1070 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1071 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1072 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1073 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1074 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1075 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1077 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1078 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1079 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1080 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1081 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1082 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1084 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1085 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1086 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1087 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1088 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1089 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1091 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1092 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1093 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1094 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1095 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1096 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1098 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1099 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1100 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1101 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1102 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1103 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1105 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1106 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1107 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1108 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1109 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1110 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1112 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1113 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1114 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1115 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1116 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1117 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1121 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1124 OPC_LWXC1
= 0x00 | OPC_CP3
,
1125 OPC_LDXC1
= 0x01 | OPC_CP3
,
1126 OPC_LUXC1
= 0x05 | OPC_CP3
,
1127 OPC_SWXC1
= 0x08 | OPC_CP3
,
1128 OPC_SDXC1
= 0x09 | OPC_CP3
,
1129 OPC_SUXC1
= 0x0D | OPC_CP3
,
1130 OPC_PREFX
= 0x0F | OPC_CP3
,
1131 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1132 OPC_MADD_S
= 0x20 | OPC_CP3
,
1133 OPC_MADD_D
= 0x21 | OPC_CP3
,
1134 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1135 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1136 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1137 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1138 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1139 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1140 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1141 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1142 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1143 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1147 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1149 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1150 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1151 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1152 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1153 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1154 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1155 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1156 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1157 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1158 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1159 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1160 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1161 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1162 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1163 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1164 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1165 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1166 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1167 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1168 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1169 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1171 /* MI10 instruction */
1172 OPC_LD_B
= (0x20) | OPC_MSA
,
1173 OPC_LD_H
= (0x21) | OPC_MSA
,
1174 OPC_LD_W
= (0x22) | OPC_MSA
,
1175 OPC_LD_D
= (0x23) | OPC_MSA
,
1176 OPC_ST_B
= (0x24) | OPC_MSA
,
1177 OPC_ST_H
= (0x25) | OPC_MSA
,
1178 OPC_ST_W
= (0x26) | OPC_MSA
,
1179 OPC_ST_D
= (0x27) | OPC_MSA
,
1183 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1184 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1185 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1186 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1187 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1188 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1189 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1190 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1191 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1192 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1193 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1194 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1195 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1197 /* I8 instruction */
1198 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1199 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1200 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1201 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1202 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1203 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1204 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1205 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1206 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1207 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1209 /* VEC/2R/2RF instruction */
1210 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1211 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1212 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1213 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1214 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1215 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1216 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1218 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1219 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1221 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1222 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1223 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1224 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1225 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1227 /* 2RF instruction df(bit 16) = _w, _d */
1228 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1229 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1230 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1231 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1232 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1233 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1234 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1235 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1236 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1237 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1238 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1239 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1240 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1241 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1242 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1243 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1245 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1246 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1247 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1248 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1249 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1250 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1251 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1252 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1253 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1254 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1255 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1256 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1257 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1258 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1259 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1260 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1261 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1262 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1263 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1264 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1265 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1266 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1267 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1268 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1269 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1270 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1271 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1272 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1273 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1274 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1275 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1276 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1277 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1278 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1279 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1280 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1281 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1282 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1283 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1284 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1285 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1286 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1287 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1288 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1289 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1290 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1291 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1292 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1293 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1294 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1295 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1296 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1297 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1298 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1299 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1300 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1301 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1302 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1303 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1304 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1305 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1306 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1307 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1308 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1310 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1311 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1312 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1313 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1314 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1315 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1316 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1317 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1318 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1319 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1321 /* 3RF instruction _df(bit 21) = _w, _d */
1322 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1326 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1327 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1328 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1329 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1332 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1340 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1341 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1342 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1343 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1347 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1348 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1349 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1350 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1351 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1353 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1354 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1355 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1356 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1357 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1358 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1360 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1361 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1362 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1364 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1365 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1366 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1367 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1368 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1369 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1370 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1371 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1372 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1373 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1374 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1375 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1376 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1379 /* global register indices */
1380 static TCGv cpu_gpr
[32], cpu_PC
;
1381 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1382 static TCGv cpu_dspctrl
, btarget
, bcond
;
1383 static TCGv_i32 hflags
;
1384 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1385 static TCGv_i64 fpu_f64
[32];
1386 static TCGv_i64 msa_wr_d
[64];
1388 #include "exec/gen-icount.h"
1390 #define gen_helper_0e0i(name, arg) do { \
1391 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1392 gen_helper_##name(cpu_env, helper_tmp); \
1393 tcg_temp_free_i32(helper_tmp); \
1396 #define gen_helper_0e1i(name, arg1, arg2) do { \
1397 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1398 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1399 tcg_temp_free_i32(helper_tmp); \
1402 #define gen_helper_1e0i(name, ret, arg1) do { \
1403 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1404 gen_helper_##name(ret, cpu_env, helper_tmp); \
1405 tcg_temp_free_i32(helper_tmp); \
1408 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1409 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1410 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1411 tcg_temp_free_i32(helper_tmp); \
1414 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1415 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1416 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1417 tcg_temp_free_i32(helper_tmp); \
1420 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1421 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1422 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1423 tcg_temp_free_i32(helper_tmp); \
1426 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1427 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1428 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1429 tcg_temp_free_i32(helper_tmp); \
1432 typedef struct DisasContext
{
1433 DisasContextBase base
;
1434 target_ulong saved_pc
;
1435 target_ulong page_start
;
1438 int32_t CP0_Config1
;
1439 /* Routine used to access memory */
1441 TCGMemOp default_tcg_memop_mask
;
1442 uint32_t hflags
, saved_hflags
;
1443 target_ulong btarget
;
1454 int CP0_LLAddr_shift
;
1463 #define DISAS_STOP DISAS_TARGET_0
1464 #define DISAS_EXIT DISAS_TARGET_1
1466 static const char * const regnames
[] = {
1467 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1468 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1469 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1470 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1473 static const char * const regnames_HI
[] = {
1474 "HI0", "HI1", "HI2", "HI3",
1477 static const char * const regnames_LO
[] = {
1478 "LO0", "LO1", "LO2", "LO3",
1481 static const char * const fregnames
[] = {
1482 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1483 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1484 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1485 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1488 static const char * const msaregnames
[] = {
1489 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1490 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1491 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1492 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1493 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1494 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1495 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1496 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1497 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1498 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1499 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1500 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1501 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1502 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1503 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1504 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1507 #define LOG_DISAS(...) \
1509 if (MIPS_DEBUG_DISAS) { \
1510 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1514 #define MIPS_INVAL(op) \
1516 if (MIPS_DEBUG_DISAS) { \
1517 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1518 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1519 ctx->base.pc_next, ctx->opcode, op, \
1520 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1521 ((ctx->opcode >> 16) & 0x1F)); \
1525 /* General purpose registers moves. */
1526 static inline void gen_load_gpr (TCGv t
, int reg
)
1529 tcg_gen_movi_tl(t
, 0);
1531 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1534 static inline void gen_store_gpr (TCGv t
, int reg
)
1537 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1540 /* Moves to/from shadow registers. */
1541 static inline void gen_load_srsgpr (int from
, int to
)
1543 TCGv t0
= tcg_temp_new();
1546 tcg_gen_movi_tl(t0
, 0);
1548 TCGv_i32 t2
= tcg_temp_new_i32();
1549 TCGv_ptr addr
= tcg_temp_new_ptr();
1551 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1552 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1553 tcg_gen_andi_i32(t2
, t2
, 0xf);
1554 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1555 tcg_gen_ext_i32_ptr(addr
, t2
);
1556 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1558 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1559 tcg_temp_free_ptr(addr
);
1560 tcg_temp_free_i32(t2
);
1562 gen_store_gpr(t0
, to
);
1566 static inline void gen_store_srsgpr (int from
, int to
)
1569 TCGv t0
= tcg_temp_new();
1570 TCGv_i32 t2
= tcg_temp_new_i32();
1571 TCGv_ptr addr
= tcg_temp_new_ptr();
1573 gen_load_gpr(t0
, from
);
1574 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1575 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1576 tcg_gen_andi_i32(t2
, t2
, 0xf);
1577 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1578 tcg_gen_ext_i32_ptr(addr
, t2
);
1579 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1581 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1582 tcg_temp_free_ptr(addr
);
1583 tcg_temp_free_i32(t2
);
1589 static inline void gen_save_pc(target_ulong pc
)
1591 tcg_gen_movi_tl(cpu_PC
, pc
);
1594 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1596 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1597 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1598 gen_save_pc(ctx
->base
.pc_next
);
1599 ctx
->saved_pc
= ctx
->base
.pc_next
;
1601 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1602 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1603 ctx
->saved_hflags
= ctx
->hflags
;
1604 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1610 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1616 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1618 ctx
->saved_hflags
= ctx
->hflags
;
1619 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1625 ctx
->btarget
= env
->btarget
;
1630 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1632 TCGv_i32 texcp
= tcg_const_i32(excp
);
1633 TCGv_i32 terr
= tcg_const_i32(err
);
1634 save_cpu_state(ctx
, 1);
1635 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1636 tcg_temp_free_i32(terr
);
1637 tcg_temp_free_i32(texcp
);
1638 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1641 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1643 gen_helper_0e0i(raise_exception
, excp
);
1646 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1648 generate_exception_err(ctx
, excp
, 0);
1651 /* Floating point register moves. */
1652 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1654 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1655 generate_exception(ctx
, EXCP_RI
);
1657 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1660 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1663 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1664 generate_exception(ctx
, EXCP_RI
);
1666 t64
= tcg_temp_new_i64();
1667 tcg_gen_extu_i32_i64(t64
, t
);
1668 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1669 tcg_temp_free_i64(t64
);
1672 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1674 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1675 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1677 gen_load_fpr32(ctx
, t
, reg
| 1);
1681 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1683 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1684 TCGv_i64 t64
= tcg_temp_new_i64();
1685 tcg_gen_extu_i32_i64(t64
, t
);
1686 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1687 tcg_temp_free_i64(t64
);
1689 gen_store_fpr32(ctx
, t
, reg
| 1);
1693 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1695 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1696 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1698 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1702 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1704 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1705 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1708 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1709 t0
= tcg_temp_new_i64();
1710 tcg_gen_shri_i64(t0
, t
, 32);
1711 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1712 tcg_temp_free_i64(t0
);
1716 static inline int get_fp_bit (int cc
)
1724 /* Addresses computation */
1725 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1727 tcg_gen_add_tl(ret
, arg0
, arg1
);
1729 #if defined(TARGET_MIPS64)
1730 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1731 tcg_gen_ext32s_i64(ret
, ret
);
1736 /* Addresses computation (translation time) */
1737 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1740 target_long sum
= base
+ offset
;
1742 #if defined(TARGET_MIPS64)
1743 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1750 /* Sign-extract the low 32-bits to a target_long. */
1751 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1753 #if defined(TARGET_MIPS64)
1754 tcg_gen_ext32s_i64(ret
, arg
);
1756 tcg_gen_extrl_i64_i32(ret
, arg
);
1760 /* Sign-extract the high 32-bits to a target_long. */
1761 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1763 #if defined(TARGET_MIPS64)
1764 tcg_gen_sari_i64(ret
, arg
, 32);
1766 tcg_gen_extrh_i64_i32(ret
, arg
);
1770 static inline void check_cp0_enabled(DisasContext
*ctx
)
1772 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1773 generate_exception_err(ctx
, EXCP_CpU
, 0);
1776 static inline void check_cp1_enabled(DisasContext
*ctx
)
1778 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1779 generate_exception_err(ctx
, EXCP_CpU
, 1);
1782 /* Verify that the processor is running with COP1X instructions enabled.
1783 This is associated with the nabla symbol in the MIPS32 and MIPS64
1786 static inline void check_cop1x(DisasContext
*ctx
)
1788 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1789 generate_exception_end(ctx
, EXCP_RI
);
1792 /* Verify that the processor is running with 64-bit floating-point
1793 operations enabled. */
1795 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1797 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1798 generate_exception_end(ctx
, EXCP_RI
);
1802 * Verify if floating point register is valid; an operation is not defined
1803 * if bit 0 of any register specification is set and the FR bit in the
1804 * Status register equals zero, since the register numbers specify an
1805 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1806 * in the Status register equals one, both even and odd register numbers
1807 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1809 * Multiple 64 bit wide registers can be checked by calling
1810 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1812 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1814 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1815 generate_exception_end(ctx
, EXCP_RI
);
1818 /* Verify that the processor is running with DSP instructions enabled.
1819 This is enabled by CP0 Status register MX(24) bit.
1822 static inline void check_dsp(DisasContext
*ctx
)
1824 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1825 if (ctx
->insn_flags
& ASE_DSP
) {
1826 generate_exception_end(ctx
, EXCP_DSPDIS
);
1828 generate_exception_end(ctx
, EXCP_RI
);
1833 static inline void check_dspr2(DisasContext
*ctx
)
1835 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1836 if (ctx
->insn_flags
& ASE_DSP
) {
1837 generate_exception_end(ctx
, EXCP_DSPDIS
);
1839 generate_exception_end(ctx
, EXCP_RI
);
1844 /* This code generates a "reserved instruction" exception if the
1845 CPU does not support the instruction set corresponding to flags. */
1846 static inline void check_insn(DisasContext
*ctx
, int flags
)
1848 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1849 generate_exception_end(ctx
, EXCP_RI
);
1853 /* This code generates a "reserved instruction" exception if the
1854 CPU has corresponding flag set which indicates that the instruction
1855 has been removed. */
1856 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1858 if (unlikely(ctx
->insn_flags
& flags
)) {
1859 generate_exception_end(ctx
, EXCP_RI
);
1863 /* This code generates a "reserved instruction" exception if the
1864 CPU does not support 64-bit paired-single (PS) floating point data type */
1865 static inline void check_ps(DisasContext
*ctx
)
1867 if (unlikely(!ctx
->ps
)) {
1868 generate_exception(ctx
, EXCP_RI
);
1870 check_cp1_64bitmode(ctx
);
1873 #ifdef TARGET_MIPS64
1874 /* This code generates a "reserved instruction" exception if 64-bit
1875 instructions are not enabled. */
1876 static inline void check_mips_64(DisasContext
*ctx
)
1878 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1879 generate_exception_end(ctx
, EXCP_RI
);
1883 #ifndef CONFIG_USER_ONLY
1884 static inline void check_mvh(DisasContext
*ctx
)
1886 if (unlikely(!ctx
->mvh
)) {
1887 generate_exception(ctx
, EXCP_RI
);
1892 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1893 calling interface for 32 and 64-bit FPRs. No sense in changing
1894 all callers for gen_load_fpr32 when we need the CTX parameter for
1896 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1897 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1898 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1899 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1900 int ft, int fs, int cc) \
1902 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1903 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1912 check_cp1_registers(ctx, fs | ft); \
1920 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1921 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1923 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1924 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1925 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1926 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1927 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1928 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1929 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1930 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1931 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1932 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1933 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1934 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1935 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1936 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1937 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1938 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1941 tcg_temp_free_i##bits (fp0); \
1942 tcg_temp_free_i##bits (fp1); \
1945 FOP_CONDS(, 0, d
, FMT_D
, 64)
1946 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1947 FOP_CONDS(, 0, s
, FMT_S
, 32)
1948 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1949 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1950 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1953 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1954 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1955 int ft, int fs, int fd) \
1957 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1958 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1959 if (ifmt == FMT_D) { \
1960 check_cp1_registers(ctx, fs | ft | fd); \
1962 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1963 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1966 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2002 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2005 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2008 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2011 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2014 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2017 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2020 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2023 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2026 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2029 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2035 tcg_temp_free_i ## bits (fp0); \
2036 tcg_temp_free_i ## bits (fp1); \
2039 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2040 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2042 #undef gen_ldcmp_fpr32
2043 #undef gen_ldcmp_fpr64
2045 /* load/store instructions. */
2046 #ifdef CONFIG_USER_ONLY
2047 #define OP_LD_ATOMIC(insn,fname) \
2048 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2049 DisasContext *ctx) \
2051 TCGv t0 = tcg_temp_new(); \
2052 tcg_gen_mov_tl(t0, arg1); \
2053 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2054 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2055 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2056 tcg_temp_free(t0); \
2059 #define OP_LD_ATOMIC(insn,fname) \
2060 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2061 DisasContext *ctx) \
2063 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2066 OP_LD_ATOMIC(ll
,ld32s
);
2067 #if defined(TARGET_MIPS64)
2068 OP_LD_ATOMIC(lld
,ld64
);
2072 #ifdef CONFIG_USER_ONLY
2073 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2074 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2075 DisasContext *ctx) \
2077 TCGv t0 = tcg_temp_new(); \
2078 TCGLabel *l1 = gen_new_label(); \
2079 TCGLabel *l2 = gen_new_label(); \
2081 tcg_gen_andi_tl(t0, arg2, almask); \
2082 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2083 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2084 generate_exception(ctx, EXCP_AdES); \
2085 gen_set_label(l1); \
2086 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2087 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2088 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2089 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2090 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2091 generate_exception_end(ctx, EXCP_SC); \
2092 gen_set_label(l2); \
2093 tcg_gen_movi_tl(t0, 0); \
2094 gen_store_gpr(t0, rt); \
2095 tcg_temp_free(t0); \
2098 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2099 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2100 DisasContext *ctx) \
2102 TCGv t0 = tcg_temp_new(); \
2103 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2104 gen_store_gpr(t0, rt); \
2105 tcg_temp_free(t0); \
2108 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2109 #if defined(TARGET_MIPS64)
2110 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2114 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2115 int base
, int offset
)
2118 tcg_gen_movi_tl(addr
, offset
);
2119 } else if (offset
== 0) {
2120 gen_load_gpr(addr
, base
);
2122 tcg_gen_movi_tl(addr
, offset
);
2123 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2127 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2129 target_ulong pc
= ctx
->base
.pc_next
;
2131 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2132 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2137 pc
&= ~(target_ulong
)3;
2142 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2143 int rt
, int base
, int offset
)
2146 int mem_idx
= ctx
->mem_idx
;
2148 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2149 /* Loongson CPU uses a load to zero register for prefetch.
2150 We emulate it as a NOP. On other CPU we must perform the
2151 actual memory access. */
2155 t0
= tcg_temp_new();
2156 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2159 #if defined(TARGET_MIPS64)
2161 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2162 ctx
->default_tcg_memop_mask
);
2163 gen_store_gpr(t0
, rt
);
2166 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2167 ctx
->default_tcg_memop_mask
);
2168 gen_store_gpr(t0
, rt
);
2172 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2173 gen_store_gpr(t0
, rt
);
2176 t1
= tcg_temp_new();
2177 /* Do a byte access to possibly trigger a page
2178 fault with the unaligned address. */
2179 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2180 tcg_gen_andi_tl(t1
, t0
, 7);
2181 #ifndef TARGET_WORDS_BIGENDIAN
2182 tcg_gen_xori_tl(t1
, t1
, 7);
2184 tcg_gen_shli_tl(t1
, t1
, 3);
2185 tcg_gen_andi_tl(t0
, t0
, ~7);
2186 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2187 tcg_gen_shl_tl(t0
, t0
, t1
);
2188 t2
= tcg_const_tl(-1);
2189 tcg_gen_shl_tl(t2
, t2
, t1
);
2190 gen_load_gpr(t1
, rt
);
2191 tcg_gen_andc_tl(t1
, t1
, t2
);
2193 tcg_gen_or_tl(t0
, t0
, t1
);
2195 gen_store_gpr(t0
, rt
);
2198 t1
= tcg_temp_new();
2199 /* Do a byte access to possibly trigger a page
2200 fault with the unaligned address. */
2201 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2202 tcg_gen_andi_tl(t1
, t0
, 7);
2203 #ifdef TARGET_WORDS_BIGENDIAN
2204 tcg_gen_xori_tl(t1
, t1
, 7);
2206 tcg_gen_shli_tl(t1
, t1
, 3);
2207 tcg_gen_andi_tl(t0
, t0
, ~7);
2208 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2209 tcg_gen_shr_tl(t0
, t0
, t1
);
2210 tcg_gen_xori_tl(t1
, t1
, 63);
2211 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2212 tcg_gen_shl_tl(t2
, t2
, t1
);
2213 gen_load_gpr(t1
, rt
);
2214 tcg_gen_and_tl(t1
, t1
, t2
);
2216 tcg_gen_or_tl(t0
, t0
, t1
);
2218 gen_store_gpr(t0
, rt
);
2221 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2222 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2225 gen_store_gpr(t0
, rt
);
2229 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2230 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2232 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2233 gen_store_gpr(t0
, rt
);
2236 mem_idx
= MIPS_HFLAG_UM
;
2239 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2240 ctx
->default_tcg_memop_mask
);
2241 gen_store_gpr(t0
, rt
);
2244 mem_idx
= MIPS_HFLAG_UM
;
2247 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2248 ctx
->default_tcg_memop_mask
);
2249 gen_store_gpr(t0
, rt
);
2252 mem_idx
= MIPS_HFLAG_UM
;
2255 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2256 ctx
->default_tcg_memop_mask
);
2257 gen_store_gpr(t0
, rt
);
2260 mem_idx
= MIPS_HFLAG_UM
;
2263 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2264 gen_store_gpr(t0
, rt
);
2267 mem_idx
= MIPS_HFLAG_UM
;
2270 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2271 gen_store_gpr(t0
, rt
);
2274 mem_idx
= MIPS_HFLAG_UM
;
2277 t1
= tcg_temp_new();
2278 /* Do a byte access to possibly trigger a page
2279 fault with the unaligned address. */
2280 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2281 tcg_gen_andi_tl(t1
, t0
, 3);
2282 #ifndef TARGET_WORDS_BIGENDIAN
2283 tcg_gen_xori_tl(t1
, t1
, 3);
2285 tcg_gen_shli_tl(t1
, t1
, 3);
2286 tcg_gen_andi_tl(t0
, t0
, ~3);
2287 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2288 tcg_gen_shl_tl(t0
, t0
, t1
);
2289 t2
= tcg_const_tl(-1);
2290 tcg_gen_shl_tl(t2
, t2
, t1
);
2291 gen_load_gpr(t1
, rt
);
2292 tcg_gen_andc_tl(t1
, t1
, t2
);
2294 tcg_gen_or_tl(t0
, t0
, t1
);
2296 tcg_gen_ext32s_tl(t0
, t0
);
2297 gen_store_gpr(t0
, rt
);
2300 mem_idx
= MIPS_HFLAG_UM
;
2303 t1
= tcg_temp_new();
2304 /* Do a byte access to possibly trigger a page
2305 fault with the unaligned address. */
2306 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2307 tcg_gen_andi_tl(t1
, t0
, 3);
2308 #ifdef TARGET_WORDS_BIGENDIAN
2309 tcg_gen_xori_tl(t1
, t1
, 3);
2311 tcg_gen_shli_tl(t1
, t1
, 3);
2312 tcg_gen_andi_tl(t0
, t0
, ~3);
2313 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2314 tcg_gen_shr_tl(t0
, t0
, t1
);
2315 tcg_gen_xori_tl(t1
, t1
, 31);
2316 t2
= tcg_const_tl(0xfffffffeull
);
2317 tcg_gen_shl_tl(t2
, t2
, t1
);
2318 gen_load_gpr(t1
, rt
);
2319 tcg_gen_and_tl(t1
, t1
, t2
);
2321 tcg_gen_or_tl(t0
, t0
, t1
);
2323 tcg_gen_ext32s_tl(t0
, t0
);
2324 gen_store_gpr(t0
, rt
);
2327 mem_idx
= MIPS_HFLAG_UM
;
2331 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2332 gen_store_gpr(t0
, rt
);
2339 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2340 int base
, int offset
)
2342 TCGv t0
= tcg_temp_new();
2343 TCGv t1
= tcg_temp_new();
2344 int mem_idx
= ctx
->mem_idx
;
2346 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2347 gen_load_gpr(t1
, rt
);
2349 #if defined(TARGET_MIPS64)
2351 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2352 ctx
->default_tcg_memop_mask
);
2355 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2358 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2362 mem_idx
= MIPS_HFLAG_UM
;
2365 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2366 ctx
->default_tcg_memop_mask
);
2369 mem_idx
= MIPS_HFLAG_UM
;
2372 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2373 ctx
->default_tcg_memop_mask
);
2376 mem_idx
= MIPS_HFLAG_UM
;
2379 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2382 mem_idx
= MIPS_HFLAG_UM
;
2385 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2388 mem_idx
= MIPS_HFLAG_UM
;
2391 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2399 /* Store conditional */
2400 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2401 int base
, int16_t offset
)
2404 int mem_idx
= ctx
->mem_idx
;
2406 #ifdef CONFIG_USER_ONLY
2407 t0
= tcg_temp_local_new();
2408 t1
= tcg_temp_local_new();
2410 t0
= tcg_temp_new();
2411 t1
= tcg_temp_new();
2413 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2414 gen_load_gpr(t1
, rt
);
2416 #if defined(TARGET_MIPS64)
2419 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2423 mem_idx
= MIPS_HFLAG_UM
;
2427 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2434 /* Load and store */
2435 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2438 /* Don't do NOP if destination is zero: we must perform the actual
2443 TCGv_i32 fp0
= tcg_temp_new_i32();
2444 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2445 ctx
->default_tcg_memop_mask
);
2446 gen_store_fpr32(ctx
, fp0
, ft
);
2447 tcg_temp_free_i32(fp0
);
2452 TCGv_i32 fp0
= tcg_temp_new_i32();
2453 gen_load_fpr32(ctx
, fp0
, ft
);
2454 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2455 ctx
->default_tcg_memop_mask
);
2456 tcg_temp_free_i32(fp0
);
2461 TCGv_i64 fp0
= tcg_temp_new_i64();
2462 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2463 ctx
->default_tcg_memop_mask
);
2464 gen_store_fpr64(ctx
, fp0
, ft
);
2465 tcg_temp_free_i64(fp0
);
2470 TCGv_i64 fp0
= tcg_temp_new_i64();
2471 gen_load_fpr64(ctx
, fp0
, ft
);
2472 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2473 ctx
->default_tcg_memop_mask
);
2474 tcg_temp_free_i64(fp0
);
2478 MIPS_INVAL("flt_ldst");
2479 generate_exception_end(ctx
, EXCP_RI
);
2484 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2485 int rs
, int16_t imm
)
2487 TCGv t0
= tcg_temp_new();
2489 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2490 check_cp1_enabled(ctx
);
2494 check_insn(ctx
, ISA_MIPS2
);
2497 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2498 gen_flt_ldst(ctx
, op
, rt
, t0
);
2501 generate_exception_err(ctx
, EXCP_CpU
, 1);
2506 /* Arithmetic with immediate operand */
2507 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2508 int rt
, int rs
, int imm
)
2510 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2512 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2513 /* If no destination, treat it as a NOP.
2514 For addi, we must generate the overflow exception when needed. */
2520 TCGv t0
= tcg_temp_local_new();
2521 TCGv t1
= tcg_temp_new();
2522 TCGv t2
= tcg_temp_new();
2523 TCGLabel
*l1
= gen_new_label();
2525 gen_load_gpr(t1
, rs
);
2526 tcg_gen_addi_tl(t0
, t1
, uimm
);
2527 tcg_gen_ext32s_tl(t0
, t0
);
2529 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2530 tcg_gen_xori_tl(t2
, t0
, uimm
);
2531 tcg_gen_and_tl(t1
, t1
, t2
);
2533 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2535 /* operands of same sign, result different sign */
2536 generate_exception(ctx
, EXCP_OVERFLOW
);
2538 tcg_gen_ext32s_tl(t0
, t0
);
2539 gen_store_gpr(t0
, rt
);
2545 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2546 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2548 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2551 #if defined(TARGET_MIPS64)
2554 TCGv t0
= tcg_temp_local_new();
2555 TCGv t1
= tcg_temp_new();
2556 TCGv t2
= tcg_temp_new();
2557 TCGLabel
*l1
= gen_new_label();
2559 gen_load_gpr(t1
, rs
);
2560 tcg_gen_addi_tl(t0
, t1
, uimm
);
2562 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2563 tcg_gen_xori_tl(t2
, t0
, uimm
);
2564 tcg_gen_and_tl(t1
, t1
, t2
);
2566 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2568 /* operands of same sign, result different sign */
2569 generate_exception(ctx
, EXCP_OVERFLOW
);
2571 gen_store_gpr(t0
, rt
);
2577 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2579 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2586 /* Logic with immediate operand */
2587 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2588 int rt
, int rs
, int16_t imm
)
2593 /* If no destination, treat it as a NOP. */
2596 uimm
= (uint16_t)imm
;
2599 if (likely(rs
!= 0))
2600 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2602 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2606 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2608 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2611 if (likely(rs
!= 0))
2612 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2614 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2617 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2619 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2620 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2622 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2631 /* Set on less than with immediate operand */
2632 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2633 int rt
, int rs
, int16_t imm
)
2635 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2639 /* If no destination, treat it as a NOP. */
2642 t0
= tcg_temp_new();
2643 gen_load_gpr(t0
, rs
);
2646 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2649 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2655 /* Shifts with immediate operand */
2656 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2657 int rt
, int rs
, int16_t imm
)
2659 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2663 /* If no destination, treat it as a NOP. */
2667 t0
= tcg_temp_new();
2668 gen_load_gpr(t0
, rs
);
2671 tcg_gen_shli_tl(t0
, t0
, uimm
);
2672 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2675 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2679 tcg_gen_ext32u_tl(t0
, t0
);
2680 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2682 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2687 TCGv_i32 t1
= tcg_temp_new_i32();
2689 tcg_gen_trunc_tl_i32(t1
, t0
);
2690 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2691 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2692 tcg_temp_free_i32(t1
);
2694 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2697 #if defined(TARGET_MIPS64)
2699 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2702 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2705 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2709 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2711 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2715 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2718 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2721 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2724 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2732 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2733 int rd
, int rs
, int rt
)
2735 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2736 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2737 /* If no destination, treat it as a NOP.
2738 For add & sub, we must generate the overflow exception when needed. */
2745 TCGv t0
= tcg_temp_local_new();
2746 TCGv t1
= tcg_temp_new();
2747 TCGv t2
= tcg_temp_new();
2748 TCGLabel
*l1
= gen_new_label();
2750 gen_load_gpr(t1
, rs
);
2751 gen_load_gpr(t2
, rt
);
2752 tcg_gen_add_tl(t0
, t1
, t2
);
2753 tcg_gen_ext32s_tl(t0
, t0
);
2754 tcg_gen_xor_tl(t1
, t1
, t2
);
2755 tcg_gen_xor_tl(t2
, t0
, t2
);
2756 tcg_gen_andc_tl(t1
, t2
, t1
);
2758 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2760 /* operands of same sign, result different sign */
2761 generate_exception(ctx
, EXCP_OVERFLOW
);
2763 gen_store_gpr(t0
, rd
);
2768 if (rs
!= 0 && rt
!= 0) {
2769 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2770 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2771 } else if (rs
== 0 && rt
!= 0) {
2772 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2773 } else if (rs
!= 0 && rt
== 0) {
2774 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2776 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2781 TCGv t0
= tcg_temp_local_new();
2782 TCGv t1
= tcg_temp_new();
2783 TCGv t2
= tcg_temp_new();
2784 TCGLabel
*l1
= gen_new_label();
2786 gen_load_gpr(t1
, rs
);
2787 gen_load_gpr(t2
, rt
);
2788 tcg_gen_sub_tl(t0
, t1
, t2
);
2789 tcg_gen_ext32s_tl(t0
, t0
);
2790 tcg_gen_xor_tl(t2
, t1
, t2
);
2791 tcg_gen_xor_tl(t1
, t0
, t1
);
2792 tcg_gen_and_tl(t1
, t1
, t2
);
2794 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2796 /* operands of different sign, first operand and result different sign */
2797 generate_exception(ctx
, EXCP_OVERFLOW
);
2799 gen_store_gpr(t0
, rd
);
2804 if (rs
!= 0 && rt
!= 0) {
2805 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2806 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2807 } else if (rs
== 0 && rt
!= 0) {
2808 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2809 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2810 } else if (rs
!= 0 && rt
== 0) {
2811 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2813 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2816 #if defined(TARGET_MIPS64)
2819 TCGv t0
= tcg_temp_local_new();
2820 TCGv t1
= tcg_temp_new();
2821 TCGv t2
= tcg_temp_new();
2822 TCGLabel
*l1
= gen_new_label();
2824 gen_load_gpr(t1
, rs
);
2825 gen_load_gpr(t2
, rt
);
2826 tcg_gen_add_tl(t0
, t1
, t2
);
2827 tcg_gen_xor_tl(t1
, t1
, t2
);
2828 tcg_gen_xor_tl(t2
, t0
, t2
);
2829 tcg_gen_andc_tl(t1
, t2
, t1
);
2831 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2833 /* operands of same sign, result different sign */
2834 generate_exception(ctx
, EXCP_OVERFLOW
);
2836 gen_store_gpr(t0
, rd
);
2841 if (rs
!= 0 && rt
!= 0) {
2842 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2843 } else if (rs
== 0 && rt
!= 0) {
2844 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2845 } else if (rs
!= 0 && rt
== 0) {
2846 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2848 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2853 TCGv t0
= tcg_temp_local_new();
2854 TCGv t1
= tcg_temp_new();
2855 TCGv t2
= tcg_temp_new();
2856 TCGLabel
*l1
= gen_new_label();
2858 gen_load_gpr(t1
, rs
);
2859 gen_load_gpr(t2
, rt
);
2860 tcg_gen_sub_tl(t0
, t1
, t2
);
2861 tcg_gen_xor_tl(t2
, t1
, t2
);
2862 tcg_gen_xor_tl(t1
, t0
, t1
);
2863 tcg_gen_and_tl(t1
, t1
, t2
);
2865 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2867 /* operands of different sign, first operand and result different sign */
2868 generate_exception(ctx
, EXCP_OVERFLOW
);
2870 gen_store_gpr(t0
, rd
);
2875 if (rs
!= 0 && rt
!= 0) {
2876 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2877 } else if (rs
== 0 && rt
!= 0) {
2878 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2879 } else if (rs
!= 0 && rt
== 0) {
2880 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2882 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2887 if (likely(rs
!= 0 && rt
!= 0)) {
2888 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2889 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2891 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2897 /* Conditional move */
2898 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2899 int rd
, int rs
, int rt
)
2904 /* If no destination, treat it as a NOP. */
2908 t0
= tcg_temp_new();
2909 gen_load_gpr(t0
, rt
);
2910 t1
= tcg_const_tl(0);
2911 t2
= tcg_temp_new();
2912 gen_load_gpr(t2
, rs
);
2915 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2918 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2921 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2924 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2933 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2934 int rd
, int rs
, int rt
)
2937 /* If no destination, treat it as a NOP. */
2943 if (likely(rs
!= 0 && rt
!= 0)) {
2944 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2946 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2950 if (rs
!= 0 && rt
!= 0) {
2951 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2952 } else if (rs
== 0 && rt
!= 0) {
2953 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2954 } else if (rs
!= 0 && rt
== 0) {
2955 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2957 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2961 if (likely(rs
!= 0 && rt
!= 0)) {
2962 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2963 } else if (rs
== 0 && rt
!= 0) {
2964 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2965 } else if (rs
!= 0 && rt
== 0) {
2966 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2968 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2972 if (likely(rs
!= 0 && rt
!= 0)) {
2973 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2974 } else if (rs
== 0 && rt
!= 0) {
2975 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2976 } else if (rs
!= 0 && rt
== 0) {
2977 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2979 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2985 /* Set on lower than */
2986 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2987 int rd
, int rs
, int rt
)
2992 /* If no destination, treat it as a NOP. */
2996 t0
= tcg_temp_new();
2997 t1
= tcg_temp_new();
2998 gen_load_gpr(t0
, rs
);
2999 gen_load_gpr(t1
, rt
);
3002 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3005 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3013 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3014 int rd
, int rs
, int rt
)
3019 /* If no destination, treat it as a NOP.
3020 For add & sub, we must generate the overflow exception when needed. */
3024 t0
= tcg_temp_new();
3025 t1
= tcg_temp_new();
3026 gen_load_gpr(t0
, rs
);
3027 gen_load_gpr(t1
, rt
);
3030 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3031 tcg_gen_shl_tl(t0
, t1
, t0
);
3032 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3035 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3036 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3039 tcg_gen_ext32u_tl(t1
, t1
);
3040 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3041 tcg_gen_shr_tl(t0
, t1
, t0
);
3042 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3046 TCGv_i32 t2
= tcg_temp_new_i32();
3047 TCGv_i32 t3
= tcg_temp_new_i32();
3049 tcg_gen_trunc_tl_i32(t2
, t0
);
3050 tcg_gen_trunc_tl_i32(t3
, t1
);
3051 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3052 tcg_gen_rotr_i32(t2
, t3
, t2
);
3053 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3054 tcg_temp_free_i32(t2
);
3055 tcg_temp_free_i32(t3
);
3058 #if defined(TARGET_MIPS64)
3060 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3061 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3064 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3065 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3068 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3069 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3072 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3073 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3081 /* Arithmetic on HI/LO registers */
3082 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3084 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3095 #if defined(TARGET_MIPS64)
3097 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3101 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3105 #if defined(TARGET_MIPS64)
3107 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3111 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3116 #if defined(TARGET_MIPS64)
3118 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3122 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3125 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3130 #if defined(TARGET_MIPS64)
3132 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3136 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3139 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3145 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3148 TCGv t0
= tcg_const_tl(addr
);
3149 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3150 gen_store_gpr(t0
, reg
);
3154 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3160 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3163 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3164 addr
= addr_add(ctx
, pc
, offset
);
3165 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3169 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3170 addr
= addr_add(ctx
, pc
, offset
);
3171 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3173 #if defined(TARGET_MIPS64)
3176 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3177 addr
= addr_add(ctx
, pc
, offset
);
3178 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3182 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3185 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3186 addr
= addr_add(ctx
, pc
, offset
);
3187 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3192 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3193 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3194 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3197 #if defined(TARGET_MIPS64)
3198 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3199 case R6_OPC_LDPC
+ (1 << 16):
3200 case R6_OPC_LDPC
+ (2 << 16):
3201 case R6_OPC_LDPC
+ (3 << 16):
3203 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3204 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3205 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3209 MIPS_INVAL("OPC_PCREL");
3210 generate_exception_end(ctx
, EXCP_RI
);
3217 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3226 t0
= tcg_temp_new();
3227 t1
= tcg_temp_new();
3229 gen_load_gpr(t0
, rs
);
3230 gen_load_gpr(t1
, rt
);
3235 TCGv t2
= tcg_temp_new();
3236 TCGv t3
= tcg_temp_new();
3237 tcg_gen_ext32s_tl(t0
, t0
);
3238 tcg_gen_ext32s_tl(t1
, t1
);
3239 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3240 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3241 tcg_gen_and_tl(t2
, t2
, t3
);
3242 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3243 tcg_gen_or_tl(t2
, t2
, t3
);
3244 tcg_gen_movi_tl(t3
, 0);
3245 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3246 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3247 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3254 TCGv t2
= tcg_temp_new();
3255 TCGv t3
= tcg_temp_new();
3256 tcg_gen_ext32s_tl(t0
, t0
);
3257 tcg_gen_ext32s_tl(t1
, t1
);
3258 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3259 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3260 tcg_gen_and_tl(t2
, t2
, t3
);
3261 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3262 tcg_gen_or_tl(t2
, t2
, t3
);
3263 tcg_gen_movi_tl(t3
, 0);
3264 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3265 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3266 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3273 TCGv t2
= tcg_const_tl(0);
3274 TCGv t3
= tcg_const_tl(1);
3275 tcg_gen_ext32u_tl(t0
, t0
);
3276 tcg_gen_ext32u_tl(t1
, t1
);
3277 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3278 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3279 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3286 TCGv t2
= tcg_const_tl(0);
3287 TCGv t3
= tcg_const_tl(1);
3288 tcg_gen_ext32u_tl(t0
, t0
);
3289 tcg_gen_ext32u_tl(t1
, t1
);
3290 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3291 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3292 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3299 TCGv_i32 t2
= tcg_temp_new_i32();
3300 TCGv_i32 t3
= tcg_temp_new_i32();
3301 tcg_gen_trunc_tl_i32(t2
, t0
);
3302 tcg_gen_trunc_tl_i32(t3
, t1
);
3303 tcg_gen_mul_i32(t2
, t2
, t3
);
3304 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3305 tcg_temp_free_i32(t2
);
3306 tcg_temp_free_i32(t3
);
3311 TCGv_i32 t2
= tcg_temp_new_i32();
3312 TCGv_i32 t3
= tcg_temp_new_i32();
3313 tcg_gen_trunc_tl_i32(t2
, t0
);
3314 tcg_gen_trunc_tl_i32(t3
, t1
);
3315 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3316 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3317 tcg_temp_free_i32(t2
);
3318 tcg_temp_free_i32(t3
);
3323 TCGv_i32 t2
= tcg_temp_new_i32();
3324 TCGv_i32 t3
= tcg_temp_new_i32();
3325 tcg_gen_trunc_tl_i32(t2
, t0
);
3326 tcg_gen_trunc_tl_i32(t3
, t1
);
3327 tcg_gen_mul_i32(t2
, t2
, t3
);
3328 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3329 tcg_temp_free_i32(t2
);
3330 tcg_temp_free_i32(t3
);
3335 TCGv_i32 t2
= tcg_temp_new_i32();
3336 TCGv_i32 t3
= tcg_temp_new_i32();
3337 tcg_gen_trunc_tl_i32(t2
, t0
);
3338 tcg_gen_trunc_tl_i32(t3
, t1
);
3339 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3340 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3341 tcg_temp_free_i32(t2
);
3342 tcg_temp_free_i32(t3
);
3345 #if defined(TARGET_MIPS64)
3348 TCGv t2
= tcg_temp_new();
3349 TCGv t3
= tcg_temp_new();
3350 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3351 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3352 tcg_gen_and_tl(t2
, t2
, t3
);
3353 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3354 tcg_gen_or_tl(t2
, t2
, t3
);
3355 tcg_gen_movi_tl(t3
, 0);
3356 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3357 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3364 TCGv t2
= tcg_temp_new();
3365 TCGv t3
= tcg_temp_new();
3366 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3367 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3368 tcg_gen_and_tl(t2
, t2
, t3
);
3369 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3370 tcg_gen_or_tl(t2
, t2
, t3
);
3371 tcg_gen_movi_tl(t3
, 0);
3372 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3373 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3380 TCGv t2
= tcg_const_tl(0);
3381 TCGv t3
= tcg_const_tl(1);
3382 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3383 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3390 TCGv t2
= tcg_const_tl(0);
3391 TCGv t3
= tcg_const_tl(1);
3392 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3393 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3399 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3403 TCGv t2
= tcg_temp_new();
3404 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3409 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3413 TCGv t2
= tcg_temp_new();
3414 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3420 MIPS_INVAL("r6 mul/div");
3421 generate_exception_end(ctx
, EXCP_RI
);
3429 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3430 int acc
, int rs
, int rt
)
3434 t0
= tcg_temp_new();
3435 t1
= tcg_temp_new();
3437 gen_load_gpr(t0
, rs
);
3438 gen_load_gpr(t1
, rt
);
3447 TCGv t2
= tcg_temp_new();
3448 TCGv t3
= tcg_temp_new();
3449 tcg_gen_ext32s_tl(t0
, t0
);
3450 tcg_gen_ext32s_tl(t1
, t1
);
3451 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3452 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3453 tcg_gen_and_tl(t2
, t2
, t3
);
3454 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3455 tcg_gen_or_tl(t2
, t2
, t3
);
3456 tcg_gen_movi_tl(t3
, 0);
3457 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3458 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3459 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3460 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3461 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3468 TCGv t2
= tcg_const_tl(0);
3469 TCGv t3
= tcg_const_tl(1);
3470 tcg_gen_ext32u_tl(t0
, t0
);
3471 tcg_gen_ext32u_tl(t1
, t1
);
3472 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3473 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3474 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3475 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3476 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3483 TCGv_i32 t2
= tcg_temp_new_i32();
3484 TCGv_i32 t3
= tcg_temp_new_i32();
3485 tcg_gen_trunc_tl_i32(t2
, t0
);
3486 tcg_gen_trunc_tl_i32(t3
, t1
);
3487 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3488 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3489 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3490 tcg_temp_free_i32(t2
);
3491 tcg_temp_free_i32(t3
);
3496 TCGv_i32 t2
= tcg_temp_new_i32();
3497 TCGv_i32 t3
= tcg_temp_new_i32();
3498 tcg_gen_trunc_tl_i32(t2
, t0
);
3499 tcg_gen_trunc_tl_i32(t3
, t1
);
3500 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3501 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3502 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3503 tcg_temp_free_i32(t2
);
3504 tcg_temp_free_i32(t3
);
3507 #if defined(TARGET_MIPS64)
3510 TCGv t2
= tcg_temp_new();
3511 TCGv t3
= tcg_temp_new();
3512 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3513 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3514 tcg_gen_and_tl(t2
, t2
, t3
);
3515 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3516 tcg_gen_or_tl(t2
, t2
, t3
);
3517 tcg_gen_movi_tl(t3
, 0);
3518 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3519 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3520 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3527 TCGv t2
= tcg_const_tl(0);
3528 TCGv t3
= tcg_const_tl(1);
3529 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3530 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3531 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3537 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3540 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3545 TCGv_i64 t2
= tcg_temp_new_i64();
3546 TCGv_i64 t3
= tcg_temp_new_i64();
3548 tcg_gen_ext_tl_i64(t2
, t0
);
3549 tcg_gen_ext_tl_i64(t3
, t1
);
3550 tcg_gen_mul_i64(t2
, t2
, t3
);
3551 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3552 tcg_gen_add_i64(t2
, t2
, t3
);
3553 tcg_temp_free_i64(t3
);
3554 gen_move_low32(cpu_LO
[acc
], t2
);
3555 gen_move_high32(cpu_HI
[acc
], t2
);
3556 tcg_temp_free_i64(t2
);
3561 TCGv_i64 t2
= tcg_temp_new_i64();
3562 TCGv_i64 t3
= tcg_temp_new_i64();
3564 tcg_gen_ext32u_tl(t0
, t0
);
3565 tcg_gen_ext32u_tl(t1
, t1
);
3566 tcg_gen_extu_tl_i64(t2
, t0
);
3567 tcg_gen_extu_tl_i64(t3
, t1
);
3568 tcg_gen_mul_i64(t2
, t2
, t3
);
3569 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3570 tcg_gen_add_i64(t2
, t2
, t3
);
3571 tcg_temp_free_i64(t3
);
3572 gen_move_low32(cpu_LO
[acc
], t2
);
3573 gen_move_high32(cpu_HI
[acc
], t2
);
3574 tcg_temp_free_i64(t2
);
3579 TCGv_i64 t2
= tcg_temp_new_i64();
3580 TCGv_i64 t3
= tcg_temp_new_i64();
3582 tcg_gen_ext_tl_i64(t2
, t0
);
3583 tcg_gen_ext_tl_i64(t3
, t1
);
3584 tcg_gen_mul_i64(t2
, t2
, t3
);
3585 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3586 tcg_gen_sub_i64(t2
, t3
, t2
);
3587 tcg_temp_free_i64(t3
);
3588 gen_move_low32(cpu_LO
[acc
], t2
);
3589 gen_move_high32(cpu_HI
[acc
], t2
);
3590 tcg_temp_free_i64(t2
);
3595 TCGv_i64 t2
= tcg_temp_new_i64();
3596 TCGv_i64 t3
= tcg_temp_new_i64();
3598 tcg_gen_ext32u_tl(t0
, t0
);
3599 tcg_gen_ext32u_tl(t1
, t1
);
3600 tcg_gen_extu_tl_i64(t2
, t0
);
3601 tcg_gen_extu_tl_i64(t3
, t1
);
3602 tcg_gen_mul_i64(t2
, t2
, t3
);
3603 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3604 tcg_gen_sub_i64(t2
, t3
, t2
);
3605 tcg_temp_free_i64(t3
);
3606 gen_move_low32(cpu_LO
[acc
], t2
);
3607 gen_move_high32(cpu_HI
[acc
], t2
);
3608 tcg_temp_free_i64(t2
);
3612 MIPS_INVAL("mul/div");
3613 generate_exception_end(ctx
, EXCP_RI
);
3621 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3622 int rd
, int rs
, int rt
)
3624 TCGv t0
= tcg_temp_new();
3625 TCGv t1
= tcg_temp_new();
3627 gen_load_gpr(t0
, rs
);
3628 gen_load_gpr(t1
, rt
);
3631 case OPC_VR54XX_MULS
:
3632 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3634 case OPC_VR54XX_MULSU
:
3635 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3637 case OPC_VR54XX_MACC
:
3638 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3640 case OPC_VR54XX_MACCU
:
3641 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3643 case OPC_VR54XX_MSAC
:
3644 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3646 case OPC_VR54XX_MSACU
:
3647 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3649 case OPC_VR54XX_MULHI
:
3650 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3652 case OPC_VR54XX_MULHIU
:
3653 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3655 case OPC_VR54XX_MULSHI
:
3656 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3658 case OPC_VR54XX_MULSHIU
:
3659 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3661 case OPC_VR54XX_MACCHI
:
3662 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3664 case OPC_VR54XX_MACCHIU
:
3665 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3667 case OPC_VR54XX_MSACHI
:
3668 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3670 case OPC_VR54XX_MSACHIU
:
3671 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3674 MIPS_INVAL("mul vr54xx");
3675 generate_exception_end(ctx
, EXCP_RI
);
3678 gen_store_gpr(t0
, rd
);
3685 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3695 gen_load_gpr(t0
, rs
);
3700 #if defined(TARGET_MIPS64)
3704 tcg_gen_not_tl(t0
, t0
);
3713 tcg_gen_ext32u_tl(t0
, t0
);
3714 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3715 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3717 #if defined(TARGET_MIPS64)
3722 tcg_gen_clzi_i64(t0
, t0
, 64);
3728 /* Godson integer instructions */
3729 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3730 int rd
, int rs
, int rt
)
3742 case OPC_MULTU_G_2E
:
3743 case OPC_MULTU_G_2F
:
3744 #if defined(TARGET_MIPS64)
3745 case OPC_DMULT_G_2E
:
3746 case OPC_DMULT_G_2F
:
3747 case OPC_DMULTU_G_2E
:
3748 case OPC_DMULTU_G_2F
:
3750 t0
= tcg_temp_new();
3751 t1
= tcg_temp_new();
3754 t0
= tcg_temp_local_new();
3755 t1
= tcg_temp_local_new();
3759 gen_load_gpr(t0
, rs
);
3760 gen_load_gpr(t1
, rt
);
3765 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3766 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3768 case OPC_MULTU_G_2E
:
3769 case OPC_MULTU_G_2F
:
3770 tcg_gen_ext32u_tl(t0
, t0
);
3771 tcg_gen_ext32u_tl(t1
, t1
);
3772 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3773 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3778 TCGLabel
*l1
= gen_new_label();
3779 TCGLabel
*l2
= gen_new_label();
3780 TCGLabel
*l3
= gen_new_label();
3781 tcg_gen_ext32s_tl(t0
, t0
);
3782 tcg_gen_ext32s_tl(t1
, t1
);
3783 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3784 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3787 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3789 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3792 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3793 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3800 TCGLabel
*l1
= gen_new_label();
3801 TCGLabel
*l2
= gen_new_label();
3802 tcg_gen_ext32u_tl(t0
, t0
);
3803 tcg_gen_ext32u_tl(t1
, t1
);
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3805 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3808 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3809 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3816 TCGLabel
*l1
= gen_new_label();
3817 TCGLabel
*l2
= gen_new_label();
3818 TCGLabel
*l3
= gen_new_label();
3819 tcg_gen_ext32u_tl(t0
, t0
);
3820 tcg_gen_ext32u_tl(t1
, t1
);
3821 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3822 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3823 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3825 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3828 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3829 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3836 TCGLabel
*l1
= gen_new_label();
3837 TCGLabel
*l2
= gen_new_label();
3838 tcg_gen_ext32u_tl(t0
, t0
);
3839 tcg_gen_ext32u_tl(t1
, t1
);
3840 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3841 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3844 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3845 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3849 #if defined(TARGET_MIPS64)
3850 case OPC_DMULT_G_2E
:
3851 case OPC_DMULT_G_2F
:
3852 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3854 case OPC_DMULTU_G_2E
:
3855 case OPC_DMULTU_G_2F
:
3856 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3861 TCGLabel
*l1
= gen_new_label();
3862 TCGLabel
*l2
= gen_new_label();
3863 TCGLabel
*l3
= gen_new_label();
3864 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3865 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3868 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3869 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3870 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3873 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3877 case OPC_DDIVU_G_2E
:
3878 case OPC_DDIVU_G_2F
:
3880 TCGLabel
*l1
= gen_new_label();
3881 TCGLabel
*l2
= gen_new_label();
3882 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3883 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3886 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3893 TCGLabel
*l1
= gen_new_label();
3894 TCGLabel
*l2
= gen_new_label();
3895 TCGLabel
*l3
= gen_new_label();
3896 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3897 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3898 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3900 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3903 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3907 case OPC_DMODU_G_2E
:
3908 case OPC_DMODU_G_2F
:
3910 TCGLabel
*l1
= gen_new_label();
3911 TCGLabel
*l2
= gen_new_label();
3912 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3913 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3916 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3927 /* Loongson multimedia instructions */
3928 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3930 uint32_t opc
, shift_max
;
3933 opc
= MASK_LMI(ctx
->opcode
);
3939 t0
= tcg_temp_local_new_i64();
3940 t1
= tcg_temp_local_new_i64();
3943 t0
= tcg_temp_new_i64();
3944 t1
= tcg_temp_new_i64();
3948 check_cp1_enabled(ctx
);
3949 gen_load_fpr64(ctx
, t0
, rs
);
3950 gen_load_fpr64(ctx
, t1
, rt
);
3952 #define LMI_HELPER(UP, LO) \
3953 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3954 #define LMI_HELPER_1(UP, LO) \
3955 case OPC_##UP: gen_helper_##LO(t0, t0); break
3956 #define LMI_DIRECT(UP, LO, OP) \
3957 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3960 LMI_HELPER(PADDSH
, paddsh
);
3961 LMI_HELPER(PADDUSH
, paddush
);
3962 LMI_HELPER(PADDH
, paddh
);
3963 LMI_HELPER(PADDW
, paddw
);
3964 LMI_HELPER(PADDSB
, paddsb
);
3965 LMI_HELPER(PADDUSB
, paddusb
);
3966 LMI_HELPER(PADDB
, paddb
);
3968 LMI_HELPER(PSUBSH
, psubsh
);
3969 LMI_HELPER(PSUBUSH
, psubush
);
3970 LMI_HELPER(PSUBH
, psubh
);
3971 LMI_HELPER(PSUBW
, psubw
);
3972 LMI_HELPER(PSUBSB
, psubsb
);
3973 LMI_HELPER(PSUBUSB
, psubusb
);
3974 LMI_HELPER(PSUBB
, psubb
);
3976 LMI_HELPER(PSHUFH
, pshufh
);
3977 LMI_HELPER(PACKSSWH
, packsswh
);
3978 LMI_HELPER(PACKSSHB
, packsshb
);
3979 LMI_HELPER(PACKUSHB
, packushb
);
3981 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3982 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3983 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3984 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3985 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3986 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3988 LMI_HELPER(PAVGH
, pavgh
);
3989 LMI_HELPER(PAVGB
, pavgb
);
3990 LMI_HELPER(PMAXSH
, pmaxsh
);
3991 LMI_HELPER(PMINSH
, pminsh
);
3992 LMI_HELPER(PMAXUB
, pmaxub
);
3993 LMI_HELPER(PMINUB
, pminub
);
3995 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3996 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3997 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3998 LMI_HELPER(PCMPGTH
, pcmpgth
);
3999 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4000 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4002 LMI_HELPER(PSLLW
, psllw
);
4003 LMI_HELPER(PSLLH
, psllh
);
4004 LMI_HELPER(PSRLW
, psrlw
);
4005 LMI_HELPER(PSRLH
, psrlh
);
4006 LMI_HELPER(PSRAW
, psraw
);
4007 LMI_HELPER(PSRAH
, psrah
);
4009 LMI_HELPER(PMULLH
, pmullh
);
4010 LMI_HELPER(PMULHH
, pmulhh
);
4011 LMI_HELPER(PMULHUH
, pmulhuh
);
4012 LMI_HELPER(PMADDHW
, pmaddhw
);
4014 LMI_HELPER(PASUBUB
, pasubub
);
4015 LMI_HELPER_1(BIADD
, biadd
);
4016 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4018 LMI_DIRECT(PADDD
, paddd
, add
);
4019 LMI_DIRECT(PSUBD
, psubd
, sub
);
4020 LMI_DIRECT(XOR_CP2
, xor, xor);
4021 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4022 LMI_DIRECT(AND_CP2
, and, and);
4023 LMI_DIRECT(OR_CP2
, or, or);
4026 tcg_gen_andc_i64(t0
, t1
, t0
);
4030 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4033 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4036 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4039 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4043 tcg_gen_andi_i64(t1
, t1
, 3);
4044 tcg_gen_shli_i64(t1
, t1
, 4);
4045 tcg_gen_shr_i64(t0
, t0
, t1
);
4046 tcg_gen_ext16u_i64(t0
, t0
);
4050 tcg_gen_add_i64(t0
, t0
, t1
);
4051 tcg_gen_ext32s_i64(t0
, t0
);
4054 tcg_gen_sub_i64(t0
, t0
, t1
);
4055 tcg_gen_ext32s_i64(t0
, t0
);
4077 /* Make sure shift count isn't TCG undefined behaviour. */
4078 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4083 tcg_gen_shl_i64(t0
, t0
, t1
);
4087 /* Since SRA is UndefinedResult without sign-extended inputs,
4088 we can treat SRA and DSRA the same. */
4089 tcg_gen_sar_i64(t0
, t0
, t1
);
4092 /* We want to shift in zeros for SRL; zero-extend first. */
4093 tcg_gen_ext32u_i64(t0
, t0
);
4096 tcg_gen_shr_i64(t0
, t0
, t1
);
4100 if (shift_max
== 32) {
4101 tcg_gen_ext32s_i64(t0
, t0
);
4104 /* Shifts larger than MAX produce zero. */
4105 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4106 tcg_gen_neg_i64(t1
, t1
);
4107 tcg_gen_and_i64(t0
, t0
, t1
);
4113 TCGv_i64 t2
= tcg_temp_new_i64();
4114 TCGLabel
*lab
= gen_new_label();
4116 tcg_gen_mov_i64(t2
, t0
);
4117 tcg_gen_add_i64(t0
, t1
, t2
);
4118 if (opc
== OPC_ADD_CP2
) {
4119 tcg_gen_ext32s_i64(t0
, t0
);
4121 tcg_gen_xor_i64(t1
, t1
, t2
);
4122 tcg_gen_xor_i64(t2
, t2
, t0
);
4123 tcg_gen_andc_i64(t1
, t2
, t1
);
4124 tcg_temp_free_i64(t2
);
4125 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4126 generate_exception(ctx
, EXCP_OVERFLOW
);
4134 TCGv_i64 t2
= tcg_temp_new_i64();
4135 TCGLabel
*lab
= gen_new_label();
4137 tcg_gen_mov_i64(t2
, t0
);
4138 tcg_gen_sub_i64(t0
, t1
, t2
);
4139 if (opc
== OPC_SUB_CP2
) {
4140 tcg_gen_ext32s_i64(t0
, t0
);
4142 tcg_gen_xor_i64(t1
, t1
, t2
);
4143 tcg_gen_xor_i64(t2
, t2
, t0
);
4144 tcg_gen_and_i64(t1
, t1
, t2
);
4145 tcg_temp_free_i64(t2
);
4146 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4147 generate_exception(ctx
, EXCP_OVERFLOW
);
4153 tcg_gen_ext32u_i64(t0
, t0
);
4154 tcg_gen_ext32u_i64(t1
, t1
);
4155 tcg_gen_mul_i64(t0
, t0
, t1
);
4164 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4165 FD field is the CC field? */
4167 MIPS_INVAL("loongson_cp2");
4168 generate_exception_end(ctx
, EXCP_RI
);
4175 gen_store_fpr64(ctx
, t0
, rd
);
4177 tcg_temp_free_i64(t0
);
4178 tcg_temp_free_i64(t1
);
4182 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4183 int rs
, int rt
, int16_t imm
)
4186 TCGv t0
= tcg_temp_new();
4187 TCGv t1
= tcg_temp_new();
4190 /* Load needed operands */
4198 /* Compare two registers */
4200 gen_load_gpr(t0
, rs
);
4201 gen_load_gpr(t1
, rt
);
4211 /* Compare register to immediate */
4212 if (rs
!= 0 || imm
!= 0) {
4213 gen_load_gpr(t0
, rs
);
4214 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4221 case OPC_TEQ
: /* rs == rs */
4222 case OPC_TEQI
: /* r0 == 0 */
4223 case OPC_TGE
: /* rs >= rs */
4224 case OPC_TGEI
: /* r0 >= 0 */
4225 case OPC_TGEU
: /* rs >= rs unsigned */
4226 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4228 generate_exception_end(ctx
, EXCP_TRAP
);
4230 case OPC_TLT
: /* rs < rs */
4231 case OPC_TLTI
: /* r0 < 0 */
4232 case OPC_TLTU
: /* rs < rs unsigned */
4233 case OPC_TLTIU
: /* r0 < 0 unsigned */
4234 case OPC_TNE
: /* rs != rs */
4235 case OPC_TNEI
: /* r0 != 0 */
4236 /* Never trap: treat as NOP. */
4240 TCGLabel
*l1
= gen_new_label();
4245 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4249 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4253 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4257 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4261 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4265 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4268 generate_exception(ctx
, EXCP_TRAP
);
4275 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4277 if (unlikely(ctx
->base
.singlestep_enabled
)) {
4281 #ifndef CONFIG_USER_ONLY
4282 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4288 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4290 if (use_goto_tb(ctx
, dest
)) {
4293 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4296 if (ctx
->base
.singlestep_enabled
) {
4297 save_cpu_state(ctx
, 0);
4298 gen_helper_raise_exception_debug(cpu_env
);
4300 tcg_gen_lookup_and_goto_ptr();
4304 /* Branches (before delay slot) */
4305 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4307 int rs
, int rt
, int32_t offset
,
4310 target_ulong btgt
= -1;
4312 int bcond_compute
= 0;
4313 TCGv t0
= tcg_temp_new();
4314 TCGv t1
= tcg_temp_new();
4316 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4317 #ifdef MIPS_DEBUG_DISAS
4318 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4319 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4321 generate_exception_end(ctx
, EXCP_RI
);
4325 /* Load needed operands */
4331 /* Compare two registers */
4333 gen_load_gpr(t0
, rs
);
4334 gen_load_gpr(t1
, rt
);
4337 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4351 /* Compare to zero */
4353 gen_load_gpr(t0
, rs
);
4356 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4359 #if defined(TARGET_MIPS64)
4361 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4363 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4366 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4371 /* Jump to immediate */
4372 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4377 /* Jump to register */
4378 if (offset
!= 0 && offset
!= 16) {
4379 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4380 others are reserved. */
4381 MIPS_INVAL("jump hint");
4382 generate_exception_end(ctx
, EXCP_RI
);
4385 gen_load_gpr(btarget
, rs
);
4388 MIPS_INVAL("branch/jump");
4389 generate_exception_end(ctx
, EXCP_RI
);
4392 if (bcond_compute
== 0) {
4393 /* No condition to be computed */
4395 case OPC_BEQ
: /* rx == rx */
4396 case OPC_BEQL
: /* rx == rx likely */
4397 case OPC_BGEZ
: /* 0 >= 0 */
4398 case OPC_BGEZL
: /* 0 >= 0 likely */
4399 case OPC_BLEZ
: /* 0 <= 0 */
4400 case OPC_BLEZL
: /* 0 <= 0 likely */
4402 ctx
->hflags
|= MIPS_HFLAG_B
;
4404 case OPC_BGEZAL
: /* 0 >= 0 */
4405 case OPC_BGEZALL
: /* 0 >= 0 likely */
4406 /* Always take and link */
4408 ctx
->hflags
|= MIPS_HFLAG_B
;
4410 case OPC_BNE
: /* rx != rx */
4411 case OPC_BGTZ
: /* 0 > 0 */
4412 case OPC_BLTZ
: /* 0 < 0 */
4415 case OPC_BLTZAL
: /* 0 < 0 */
4416 /* Handle as an unconditional branch to get correct delay
4419 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4420 ctx
->hflags
|= MIPS_HFLAG_B
;
4422 case OPC_BLTZALL
: /* 0 < 0 likely */
4423 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4424 /* Skip the instruction in the delay slot */
4425 ctx
->base
.pc_next
+= 4;
4427 case OPC_BNEL
: /* rx != rx likely */
4428 case OPC_BGTZL
: /* 0 > 0 likely */
4429 case OPC_BLTZL
: /* 0 < 0 likely */
4430 /* Skip the instruction in the delay slot */
4431 ctx
->base
.pc_next
+= 4;
4434 ctx
->hflags
|= MIPS_HFLAG_B
;
4437 ctx
->hflags
|= MIPS_HFLAG_BX
;
4441 ctx
->hflags
|= MIPS_HFLAG_B
;
4444 ctx
->hflags
|= MIPS_HFLAG_BR
;
4448 ctx
->hflags
|= MIPS_HFLAG_BR
;
4451 MIPS_INVAL("branch/jump");
4452 generate_exception_end(ctx
, EXCP_RI
);
4458 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4461 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4464 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4467 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4470 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4473 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4476 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4480 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4484 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4487 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4490 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4493 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4496 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4499 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4502 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4504 #if defined(TARGET_MIPS64)
4506 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4510 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4513 ctx
->hflags
|= MIPS_HFLAG_BC
;
4516 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4519 ctx
->hflags
|= MIPS_HFLAG_BL
;
4522 MIPS_INVAL("conditional branch/jump");
4523 generate_exception_end(ctx
, EXCP_RI
);
4528 ctx
->btarget
= btgt
;
4530 switch (delayslot_size
) {
4532 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4535 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4540 int post_delay
= insn_bytes
+ delayslot_size
;
4541 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4543 tcg_gen_movi_tl(cpu_gpr
[blink
],
4544 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
4548 if (insn_bytes
== 2)
4549 ctx
->hflags
|= MIPS_HFLAG_B16
;
4554 /* special3 bitfield operations */
4555 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4556 int rs
, int lsb
, int msb
)
4558 TCGv t0
= tcg_temp_new();
4559 TCGv t1
= tcg_temp_new();
4561 gen_load_gpr(t1
, rs
);
4564 if (lsb
+ msb
> 31) {
4568 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4570 /* The two checks together imply that lsb == 0,
4571 so this is a simple sign-extension. */
4572 tcg_gen_ext32s_tl(t0
, t1
);
4575 #if defined(TARGET_MIPS64)
4584 if (lsb
+ msb
> 63) {
4587 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4594 gen_load_gpr(t0
, rt
);
4595 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4596 tcg_gen_ext32s_tl(t0
, t0
);
4598 #if defined(TARGET_MIPS64)
4609 gen_load_gpr(t0
, rt
);
4610 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4615 MIPS_INVAL("bitops");
4616 generate_exception_end(ctx
, EXCP_RI
);
4621 gen_store_gpr(t0
, rt
);
4626 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4631 /* If no destination, treat it as a NOP. */
4635 t0
= tcg_temp_new();
4636 gen_load_gpr(t0
, rt
);
4640 TCGv t1
= tcg_temp_new();
4641 TCGv t2
= tcg_const_tl(0x00FF00FF);
4643 tcg_gen_shri_tl(t1
, t0
, 8);
4644 tcg_gen_and_tl(t1
, t1
, t2
);
4645 tcg_gen_and_tl(t0
, t0
, t2
);
4646 tcg_gen_shli_tl(t0
, t0
, 8);
4647 tcg_gen_or_tl(t0
, t0
, t1
);
4650 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4654 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4657 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4659 #if defined(TARGET_MIPS64)
4662 TCGv t1
= tcg_temp_new();
4663 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4665 tcg_gen_shri_tl(t1
, t0
, 8);
4666 tcg_gen_and_tl(t1
, t1
, t2
);
4667 tcg_gen_and_tl(t0
, t0
, t2
);
4668 tcg_gen_shli_tl(t0
, t0
, 8);
4669 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4676 TCGv t1
= tcg_temp_new();
4677 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4679 tcg_gen_shri_tl(t1
, t0
, 16);
4680 tcg_gen_and_tl(t1
, t1
, t2
);
4681 tcg_gen_and_tl(t0
, t0
, t2
);
4682 tcg_gen_shli_tl(t0
, t0
, 16);
4683 tcg_gen_or_tl(t0
, t0
, t1
);
4684 tcg_gen_shri_tl(t1
, t0
, 32);
4685 tcg_gen_shli_tl(t0
, t0
, 32);
4686 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4693 MIPS_INVAL("bsfhl");
4694 generate_exception_end(ctx
, EXCP_RI
);
4701 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4710 t0
= tcg_temp_new();
4711 t1
= tcg_temp_new();
4712 gen_load_gpr(t0
, rs
);
4713 gen_load_gpr(t1
, rt
);
4714 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4715 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4716 if (opc
== OPC_LSA
) {
4717 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4726 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4734 t0
= tcg_temp_new();
4735 gen_load_gpr(t0
, rt
);
4739 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4741 #if defined(TARGET_MIPS64)
4743 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4748 TCGv t1
= tcg_temp_new();
4749 gen_load_gpr(t1
, rs
);
4753 TCGv_i64 t2
= tcg_temp_new_i64();
4754 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4755 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4756 gen_move_low32(cpu_gpr
[rd
], t2
);
4757 tcg_temp_free_i64(t2
);
4760 #if defined(TARGET_MIPS64)
4762 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4763 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4764 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4774 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4781 t0
= tcg_temp_new();
4782 gen_load_gpr(t0
, rt
);
4785 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4787 #if defined(TARGET_MIPS64)
4789 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4796 #ifndef CONFIG_USER_ONLY
4797 /* CP0 (MMU and control) */
4798 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4800 TCGv_i64 t0
= tcg_temp_new_i64();
4801 TCGv_i64 t1
= tcg_temp_new_i64();
4803 tcg_gen_ext_tl_i64(t0
, arg
);
4804 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4805 #if defined(TARGET_MIPS64)
4806 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4808 tcg_gen_concat32_i64(t1
, t1
, t0
);
4810 tcg_gen_st_i64(t1
, cpu_env
, off
);
4811 tcg_temp_free_i64(t1
);
4812 tcg_temp_free_i64(t0
);
4815 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4817 TCGv_i64 t0
= tcg_temp_new_i64();
4818 TCGv_i64 t1
= tcg_temp_new_i64();
4820 tcg_gen_ext_tl_i64(t0
, arg
);
4821 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4822 tcg_gen_concat32_i64(t1
, t1
, t0
);
4823 tcg_gen_st_i64(t1
, cpu_env
, off
);
4824 tcg_temp_free_i64(t1
);
4825 tcg_temp_free_i64(t0
);
4828 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4830 TCGv_i64 t0
= tcg_temp_new_i64();
4832 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4833 #if defined(TARGET_MIPS64)
4834 tcg_gen_shri_i64(t0
, t0
, 30);
4836 tcg_gen_shri_i64(t0
, t0
, 32);
4838 gen_move_low32(arg
, t0
);
4839 tcg_temp_free_i64(t0
);
4842 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4844 TCGv_i64 t0
= tcg_temp_new_i64();
4846 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4847 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4848 gen_move_low32(arg
, t0
);
4849 tcg_temp_free_i64(t0
);
4852 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4854 TCGv_i32 t0
= tcg_temp_new_i32();
4856 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4857 tcg_gen_ext_i32_tl(arg
, t0
);
4858 tcg_temp_free_i32(t0
);
4861 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4863 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4864 tcg_gen_ext32s_tl(arg
, arg
);
4867 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4869 TCGv_i32 t0
= tcg_temp_new_i32();
4871 tcg_gen_trunc_tl_i32(t0
, arg
);
4872 tcg_gen_st_i32(t0
, cpu_env
, off
);
4873 tcg_temp_free_i32(t0
);
4876 #define CP0_CHECK(c) \
4879 goto cp0_unimplemented; \
4883 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4885 const char *rn
= "invalid";
4887 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4893 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4897 goto cp0_unimplemented
;
4903 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4907 goto cp0_unimplemented
;
4913 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4914 ctx
->CP0_LLAddr_shift
);
4918 CP0_CHECK(ctx
->mrp
);
4919 gen_helper_mfhc0_maar(arg
, cpu_env
);
4923 goto cp0_unimplemented
;
4932 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4936 goto cp0_unimplemented
;
4940 goto cp0_unimplemented
;
4942 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4946 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4947 tcg_gen_movi_tl(arg
, 0);
4950 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4952 const char *rn
= "invalid";
4953 uint64_t mask
= ctx
->PAMask
>> 36;
4955 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4961 tcg_gen_andi_tl(arg
, arg
, mask
);
4962 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4966 goto cp0_unimplemented
;
4972 tcg_gen_andi_tl(arg
, arg
, mask
);
4973 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4977 goto cp0_unimplemented
;
4983 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4984 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4985 relevant for modern MIPS cores supporting MTHC0, therefore
4986 treating MTHC0 to LLAddr as NOP. */
4990 CP0_CHECK(ctx
->mrp
);
4991 gen_helper_mthc0_maar(cpu_env
, arg
);
4995 goto cp0_unimplemented
;
5004 tcg_gen_andi_tl(arg
, arg
, mask
);
5005 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5009 goto cp0_unimplemented
;
5013 goto cp0_unimplemented
;
5015 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5018 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5021 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5023 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5024 tcg_gen_movi_tl(arg
, 0);
5026 tcg_gen_movi_tl(arg
, ~0);
5030 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5032 const char *rn
= "invalid";
5035 check_insn(ctx
, ISA_MIPS32
);
5041 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5045 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5046 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5050 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5051 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5055 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5056 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5061 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5065 goto cp0_unimplemented
;
5071 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5072 gen_helper_mfc0_random(arg
, cpu_env
);
5076 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5077 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5081 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5082 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5086 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5087 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5091 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5092 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5096 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5097 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5101 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5102 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5103 rn
= "VPEScheFBack";
5106 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5107 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5111 goto cp0_unimplemented
;
5118 TCGv_i64 tmp
= tcg_temp_new_i64();
5119 tcg_gen_ld_i64(tmp
, cpu_env
,
5120 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5121 #if defined(TARGET_MIPS64)
5123 /* Move RI/XI fields to bits 31:30 */
5124 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5125 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5128 gen_move_low32(arg
, tmp
);
5129 tcg_temp_free_i64(tmp
);
5134 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5135 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5139 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5140 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5144 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5145 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5149 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5150 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5154 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5155 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5159 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5160 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5164 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5165 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5169 goto cp0_unimplemented
;
5176 TCGv_i64 tmp
= tcg_temp_new_i64();
5177 tcg_gen_ld_i64(tmp
, cpu_env
,
5178 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5179 #if defined(TARGET_MIPS64)
5181 /* Move RI/XI fields to bits 31:30 */
5182 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5183 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5186 gen_move_low32(arg
, tmp
);
5187 tcg_temp_free_i64(tmp
);
5193 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5194 rn
= "GlobalNumber";
5197 goto cp0_unimplemented
;
5203 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5204 tcg_gen_ext32s_tl(arg
, arg
);
5208 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5209 rn
= "ContextConfig";
5210 goto cp0_unimplemented
;
5212 CP0_CHECK(ctx
->ulri
);
5213 tcg_gen_ld_tl(arg
, cpu_env
,
5214 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5215 tcg_gen_ext32s_tl(arg
, arg
);
5219 goto cp0_unimplemented
;
5225 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5229 check_insn(ctx
, ISA_MIPS32R2
);
5230 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5235 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5236 tcg_gen_ext32s_tl(arg
, arg
);
5241 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5242 tcg_gen_ext32s_tl(arg
, arg
);
5247 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5248 tcg_gen_ext32s_tl(arg
, arg
);
5252 goto cp0_unimplemented
;
5258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5262 check_insn(ctx
, ISA_MIPS32R2
);
5263 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5267 check_insn(ctx
, ISA_MIPS32R2
);
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5272 check_insn(ctx
, ISA_MIPS32R2
);
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5277 check_insn(ctx
, ISA_MIPS32R2
);
5278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5282 check_insn(ctx
, ISA_MIPS32R2
);
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5287 goto cp0_unimplemented
;
5293 check_insn(ctx
, ISA_MIPS32R2
);
5294 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5298 goto cp0_unimplemented
;
5304 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5305 tcg_gen_ext32s_tl(arg
, arg
);
5310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5315 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5319 goto cp0_unimplemented
;
5325 /* Mark as an IO operation because we read the time. */
5326 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5329 gen_helper_mfc0_count(arg
, cpu_env
);
5330 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5333 /* Break the TB to be able to take timer interrupts immediately
5334 after reading count. DISAS_STOP isn't sufficient, we need to
5335 ensure we break completely out of translated code. */
5336 gen_save_pc(ctx
->base
.pc_next
+ 4);
5337 ctx
->base
.is_jmp
= DISAS_EXIT
;
5340 /* 6,7 are implementation dependent */
5342 goto cp0_unimplemented
;
5348 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5349 tcg_gen_ext32s_tl(arg
, arg
);
5353 goto cp0_unimplemented
;
5359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5362 /* 6,7 are implementation dependent */
5364 goto cp0_unimplemented
;
5370 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5374 check_insn(ctx
, ISA_MIPS32R2
);
5375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5379 check_insn(ctx
, ISA_MIPS32R2
);
5380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5384 check_insn(ctx
, ISA_MIPS32R2
);
5385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5389 goto cp0_unimplemented
;
5395 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5399 goto cp0_unimplemented
;
5405 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5406 tcg_gen_ext32s_tl(arg
, arg
);
5410 goto cp0_unimplemented
;
5416 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5420 check_insn(ctx
, ISA_MIPS32R2
);
5421 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5422 tcg_gen_ext32s_tl(arg
, arg
);
5426 check_insn(ctx
, ISA_MIPS32R2
);
5427 CP0_CHECK(ctx
->cmgcr
);
5428 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5429 tcg_gen_ext32s_tl(arg
, arg
);
5433 goto cp0_unimplemented
;
5439 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5443 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5451 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5455 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5462 /* 6,7 are implementation dependent */
5464 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5468 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5472 goto cp0_unimplemented
;
5478 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5482 CP0_CHECK(ctx
->mrp
);
5483 gen_helper_mfc0_maar(arg
, cpu_env
);
5487 CP0_CHECK(ctx
->mrp
);
5488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5492 goto cp0_unimplemented
;
5498 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5502 goto cp0_unimplemented
;
5508 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5512 goto cp0_unimplemented
;
5518 #if defined(TARGET_MIPS64)
5519 check_insn(ctx
, ISA_MIPS3
);
5520 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5521 tcg_gen_ext32s_tl(arg
, arg
);
5526 goto cp0_unimplemented
;
5530 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5531 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5538 goto cp0_unimplemented
;
5542 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5543 rn
= "'Diagnostic"; /* implementation dependent */
5548 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5552 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5553 rn
= "TraceControl";
5554 goto cp0_unimplemented
;
5556 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5557 rn
= "TraceControl2";
5558 goto cp0_unimplemented
;
5560 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5561 rn
= "UserTraceData";
5562 goto cp0_unimplemented
;
5564 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5566 goto cp0_unimplemented
;
5568 goto cp0_unimplemented
;
5575 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5576 tcg_gen_ext32s_tl(arg
, arg
);
5580 goto cp0_unimplemented
;
5586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5587 rn
= "Performance0";
5590 // gen_helper_mfc0_performance1(arg);
5591 rn
= "Performance1";
5592 goto cp0_unimplemented
;
5594 // gen_helper_mfc0_performance2(arg);
5595 rn
= "Performance2";
5596 goto cp0_unimplemented
;
5598 // gen_helper_mfc0_performance3(arg);
5599 rn
= "Performance3";
5600 goto cp0_unimplemented
;
5602 // gen_helper_mfc0_performance4(arg);
5603 rn
= "Performance4";
5604 goto cp0_unimplemented
;
5606 // gen_helper_mfc0_performance5(arg);
5607 rn
= "Performance5";
5608 goto cp0_unimplemented
;
5610 // gen_helper_mfc0_performance6(arg);
5611 rn
= "Performance6";
5612 goto cp0_unimplemented
;
5614 // gen_helper_mfc0_performance7(arg);
5615 rn
= "Performance7";
5616 goto cp0_unimplemented
;
5618 goto cp0_unimplemented
;
5624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5628 goto cp0_unimplemented
;
5634 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5638 goto cp0_unimplemented
;
5648 TCGv_i64 tmp
= tcg_temp_new_i64();
5649 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5650 gen_move_low32(arg
, tmp
);
5651 tcg_temp_free_i64(tmp
);
5659 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5663 goto cp0_unimplemented
;
5672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5679 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5683 goto cp0_unimplemented
;
5689 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5690 tcg_gen_ext32s_tl(arg
, arg
);
5694 goto cp0_unimplemented
;
5701 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5705 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5706 tcg_gen_ld_tl(arg
, cpu_env
,
5707 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5708 tcg_gen_ext32s_tl(arg
, arg
);
5712 goto cp0_unimplemented
;
5716 goto cp0_unimplemented
;
5718 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5722 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5723 gen_mfc0_unimplemented(ctx
, arg
);
5726 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5728 const char *rn
= "invalid";
5731 check_insn(ctx
, ISA_MIPS32
);
5733 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5741 gen_helper_mtc0_index(cpu_env
, arg
);
5745 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5746 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5750 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5755 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5765 goto cp0_unimplemented
;
5775 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5776 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5780 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5781 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5785 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5786 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5790 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5791 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5795 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5796 tcg_gen_st_tl(arg
, cpu_env
,
5797 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5801 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5802 tcg_gen_st_tl(arg
, cpu_env
,
5803 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5804 rn
= "VPEScheFBack";
5807 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5808 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5812 goto cp0_unimplemented
;
5818 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5822 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5823 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5827 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5828 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5832 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5833 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5837 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5838 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5842 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5843 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5847 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5848 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5852 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5853 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5857 goto cp0_unimplemented
;
5863 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5869 rn
= "GlobalNumber";
5872 goto cp0_unimplemented
;
5878 gen_helper_mtc0_context(cpu_env
, arg
);
5882 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5883 rn
= "ContextConfig";
5884 goto cp0_unimplemented
;
5886 CP0_CHECK(ctx
->ulri
);
5887 tcg_gen_st_tl(arg
, cpu_env
,
5888 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5892 goto cp0_unimplemented
;
5898 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5902 check_insn(ctx
, ISA_MIPS32R2
);
5903 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5905 ctx
->base
.is_jmp
= DISAS_STOP
;
5909 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5914 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5919 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5923 goto cp0_unimplemented
;
5929 gen_helper_mtc0_wired(cpu_env
, arg
);
5933 check_insn(ctx
, ISA_MIPS32R2
);
5934 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5938 check_insn(ctx
, ISA_MIPS32R2
);
5939 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5943 check_insn(ctx
, ISA_MIPS32R2
);
5944 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5948 check_insn(ctx
, ISA_MIPS32R2
);
5949 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5953 check_insn(ctx
, ISA_MIPS32R2
);
5954 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5958 goto cp0_unimplemented
;
5964 check_insn(ctx
, ISA_MIPS32R2
);
5965 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5966 ctx
->base
.is_jmp
= DISAS_STOP
;
5970 goto cp0_unimplemented
;
5988 goto cp0_unimplemented
;
5994 gen_helper_mtc0_count(cpu_env
, arg
);
5997 /* 6,7 are implementation dependent */
5999 goto cp0_unimplemented
;
6005 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6009 goto cp0_unimplemented
;
6015 gen_helper_mtc0_compare(cpu_env
, arg
);
6018 /* 6,7 are implementation dependent */
6020 goto cp0_unimplemented
;
6026 save_cpu_state(ctx
, 1);
6027 gen_helper_mtc0_status(cpu_env
, arg
);
6028 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6029 gen_save_pc(ctx
->base
.pc_next
+ 4);
6030 ctx
->base
.is_jmp
= DISAS_EXIT
;
6034 check_insn(ctx
, ISA_MIPS32R2
);
6035 gen_helper_mtc0_intctl(cpu_env
, arg
);
6036 /* Stop translation as we may have switched the execution mode */
6037 ctx
->base
.is_jmp
= DISAS_STOP
;
6041 check_insn(ctx
, ISA_MIPS32R2
);
6042 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6043 /* Stop translation as we may have switched the execution mode */
6044 ctx
->base
.is_jmp
= DISAS_STOP
;
6048 check_insn(ctx
, ISA_MIPS32R2
);
6049 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6050 /* Stop translation as we may have switched the execution mode */
6051 ctx
->base
.is_jmp
= DISAS_STOP
;
6055 goto cp0_unimplemented
;
6061 save_cpu_state(ctx
, 1);
6062 gen_helper_mtc0_cause(cpu_env
, arg
);
6063 /* Stop translation as we may have triggered an interrupt.
6064 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6065 * translated code to check for pending interrupts. */
6066 gen_save_pc(ctx
->base
.pc_next
+ 4);
6067 ctx
->base
.is_jmp
= DISAS_EXIT
;
6071 goto cp0_unimplemented
;
6077 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6081 goto cp0_unimplemented
;
6091 check_insn(ctx
, ISA_MIPS32R2
);
6092 gen_helper_mtc0_ebase(cpu_env
, arg
);
6096 goto cp0_unimplemented
;
6102 gen_helper_mtc0_config0(cpu_env
, arg
);
6104 /* Stop translation as we may have switched the execution mode */
6105 ctx
->base
.is_jmp
= DISAS_STOP
;
6108 /* ignored, read only */
6112 gen_helper_mtc0_config2(cpu_env
, arg
);
6114 /* Stop translation as we may have switched the execution mode */
6115 ctx
->base
.is_jmp
= DISAS_STOP
;
6118 gen_helper_mtc0_config3(cpu_env
, arg
);
6120 /* Stop translation as we may have switched the execution mode */
6121 ctx
->base
.is_jmp
= DISAS_STOP
;
6124 gen_helper_mtc0_config4(cpu_env
, arg
);
6126 ctx
->base
.is_jmp
= DISAS_STOP
;
6129 gen_helper_mtc0_config5(cpu_env
, arg
);
6131 /* Stop translation as we may have switched the execution mode */
6132 ctx
->base
.is_jmp
= DISAS_STOP
;
6134 /* 6,7 are implementation dependent */
6144 rn
= "Invalid config selector";
6145 goto cp0_unimplemented
;
6151 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6155 CP0_CHECK(ctx
->mrp
);
6156 gen_helper_mtc0_maar(cpu_env
, arg
);
6160 CP0_CHECK(ctx
->mrp
);
6161 gen_helper_mtc0_maari(cpu_env
, arg
);
6165 goto cp0_unimplemented
;
6171 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6175 goto cp0_unimplemented
;
6181 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6185 goto cp0_unimplemented
;
6191 #if defined(TARGET_MIPS64)
6192 check_insn(ctx
, ISA_MIPS3
);
6193 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6198 goto cp0_unimplemented
;
6202 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6203 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6206 gen_helper_mtc0_framemask(cpu_env
, arg
);
6210 goto cp0_unimplemented
;
6215 rn
= "Diagnostic"; /* implementation dependent */
6220 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6221 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6222 gen_save_pc(ctx
->base
.pc_next
+ 4);
6223 ctx
->base
.is_jmp
= DISAS_EXIT
;
6227 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6228 rn
= "TraceControl";
6229 /* Stop translation as we may have switched the execution mode */
6230 ctx
->base
.is_jmp
= DISAS_STOP
;
6231 goto cp0_unimplemented
;
6233 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6234 rn
= "TraceControl2";
6235 /* Stop translation as we may have switched the execution mode */
6236 ctx
->base
.is_jmp
= DISAS_STOP
;
6237 goto cp0_unimplemented
;
6239 /* Stop translation as we may have switched the execution mode */
6240 ctx
->base
.is_jmp
= DISAS_STOP
;
6241 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6242 rn
= "UserTraceData";
6243 /* Stop translation as we may have switched the execution mode */
6244 ctx
->base
.is_jmp
= DISAS_STOP
;
6245 goto cp0_unimplemented
;
6247 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6248 /* Stop translation as we may have switched the execution mode */
6249 ctx
->base
.is_jmp
= DISAS_STOP
;
6251 goto cp0_unimplemented
;
6253 goto cp0_unimplemented
;
6260 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6264 goto cp0_unimplemented
;
6270 gen_helper_mtc0_performance0(cpu_env
, arg
);
6271 rn
= "Performance0";
6274 // gen_helper_mtc0_performance1(arg);
6275 rn
= "Performance1";
6276 goto cp0_unimplemented
;
6278 // gen_helper_mtc0_performance2(arg);
6279 rn
= "Performance2";
6280 goto cp0_unimplemented
;
6282 // gen_helper_mtc0_performance3(arg);
6283 rn
= "Performance3";
6284 goto cp0_unimplemented
;
6286 // gen_helper_mtc0_performance4(arg);
6287 rn
= "Performance4";
6288 goto cp0_unimplemented
;
6290 // gen_helper_mtc0_performance5(arg);
6291 rn
= "Performance5";
6292 goto cp0_unimplemented
;
6294 // gen_helper_mtc0_performance6(arg);
6295 rn
= "Performance6";
6296 goto cp0_unimplemented
;
6298 // gen_helper_mtc0_performance7(arg);
6299 rn
= "Performance7";
6300 goto cp0_unimplemented
;
6302 goto cp0_unimplemented
;
6308 gen_helper_mtc0_errctl(cpu_env
, arg
);
6309 ctx
->base
.is_jmp
= DISAS_STOP
;
6313 goto cp0_unimplemented
;
6323 goto cp0_unimplemented
;
6332 gen_helper_mtc0_taglo(cpu_env
, arg
);
6339 gen_helper_mtc0_datalo(cpu_env
, arg
);
6343 goto cp0_unimplemented
;
6352 gen_helper_mtc0_taghi(cpu_env
, arg
);
6359 gen_helper_mtc0_datahi(cpu_env
, arg
);
6364 goto cp0_unimplemented
;
6370 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6374 goto cp0_unimplemented
;
6381 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6385 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6386 tcg_gen_st_tl(arg
, cpu_env
,
6387 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6391 goto cp0_unimplemented
;
6395 goto cp0_unimplemented
;
6397 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6399 /* For simplicity assume that all writes can cause interrupts. */
6400 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6402 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6403 * translated code to check for pending interrupts. */
6404 gen_save_pc(ctx
->base
.pc_next
+ 4);
6405 ctx
->base
.is_jmp
= DISAS_EXIT
;
6410 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6413 #if defined(TARGET_MIPS64)
6414 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6416 const char *rn
= "invalid";
6419 check_insn(ctx
, ISA_MIPS64
);
6425 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6429 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6430 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6434 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6435 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6439 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6440 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6445 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6449 goto cp0_unimplemented
;
6455 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6456 gen_helper_mfc0_random(arg
, cpu_env
);
6460 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6461 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6465 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6466 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6470 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6475 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6476 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6480 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6481 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6485 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6486 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6487 rn
= "VPEScheFBack";
6490 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6495 goto cp0_unimplemented
;
6501 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6505 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6506 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6510 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6511 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6515 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6516 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6520 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6521 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6525 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6526 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6530 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6531 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6535 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6536 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6540 goto cp0_unimplemented
;
6546 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6551 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6552 rn
= "GlobalNumber";
6555 goto cp0_unimplemented
;
6561 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6565 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6566 rn
= "ContextConfig";
6567 goto cp0_unimplemented
;
6569 CP0_CHECK(ctx
->ulri
);
6570 tcg_gen_ld_tl(arg
, cpu_env
,
6571 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6575 goto cp0_unimplemented
;
6581 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6585 check_insn(ctx
, ISA_MIPS32R2
);
6586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6591 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6596 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6601 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6605 goto cp0_unimplemented
;
6611 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6615 check_insn(ctx
, ISA_MIPS32R2
);
6616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6620 check_insn(ctx
, ISA_MIPS32R2
);
6621 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6625 check_insn(ctx
, ISA_MIPS32R2
);
6626 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6630 check_insn(ctx
, ISA_MIPS32R2
);
6631 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6635 check_insn(ctx
, ISA_MIPS32R2
);
6636 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6640 goto cp0_unimplemented
;
6646 check_insn(ctx
, ISA_MIPS32R2
);
6647 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6651 goto cp0_unimplemented
;
6657 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6667 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6671 goto cp0_unimplemented
;
6677 /* Mark as an IO operation because we read the time. */
6678 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6681 gen_helper_mfc0_count(arg
, cpu_env
);
6682 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6685 /* Break the TB to be able to take timer interrupts immediately
6686 after reading count. DISAS_STOP isn't sufficient, we need to
6687 ensure we break completely out of translated code. */
6688 gen_save_pc(ctx
->base
.pc_next
+ 4);
6689 ctx
->base
.is_jmp
= DISAS_EXIT
;
6692 /* 6,7 are implementation dependent */
6694 goto cp0_unimplemented
;
6700 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6704 goto cp0_unimplemented
;
6710 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6713 /* 6,7 are implementation dependent */
6715 goto cp0_unimplemented
;
6721 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6725 check_insn(ctx
, ISA_MIPS32R2
);
6726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6730 check_insn(ctx
, ISA_MIPS32R2
);
6731 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6735 check_insn(ctx
, ISA_MIPS32R2
);
6736 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6740 goto cp0_unimplemented
;
6746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6750 goto cp0_unimplemented
;
6756 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6760 goto cp0_unimplemented
;
6766 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6770 check_insn(ctx
, ISA_MIPS32R2
);
6771 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6775 check_insn(ctx
, ISA_MIPS32R2
);
6776 CP0_CHECK(ctx
->cmgcr
);
6777 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6781 goto cp0_unimplemented
;
6787 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6791 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6795 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6799 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6807 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6810 /* 6,7 are implementation dependent */
6812 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6816 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6820 goto cp0_unimplemented
;
6826 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6830 CP0_CHECK(ctx
->mrp
);
6831 gen_helper_dmfc0_maar(arg
, cpu_env
);
6835 CP0_CHECK(ctx
->mrp
);
6836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6840 goto cp0_unimplemented
;
6846 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6850 goto cp0_unimplemented
;
6856 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6860 goto cp0_unimplemented
;
6866 check_insn(ctx
, ISA_MIPS3
);
6867 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6871 goto cp0_unimplemented
;
6875 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6876 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6879 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6883 goto cp0_unimplemented
;
6887 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6888 rn
= "'Diagnostic"; /* implementation dependent */
6893 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6897 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6898 rn
= "TraceControl";
6899 goto cp0_unimplemented
;
6901 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6902 rn
= "TraceControl2";
6903 goto cp0_unimplemented
;
6905 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6906 rn
= "UserTraceData";
6907 goto cp0_unimplemented
;
6909 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6911 goto cp0_unimplemented
;
6913 goto cp0_unimplemented
;
6920 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6924 goto cp0_unimplemented
;
6930 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6931 rn
= "Performance0";
6934 // gen_helper_dmfc0_performance1(arg);
6935 rn
= "Performance1";
6936 goto cp0_unimplemented
;
6938 // gen_helper_dmfc0_performance2(arg);
6939 rn
= "Performance2";
6940 goto cp0_unimplemented
;
6942 // gen_helper_dmfc0_performance3(arg);
6943 rn
= "Performance3";
6944 goto cp0_unimplemented
;
6946 // gen_helper_dmfc0_performance4(arg);
6947 rn
= "Performance4";
6948 goto cp0_unimplemented
;
6950 // gen_helper_dmfc0_performance5(arg);
6951 rn
= "Performance5";
6952 goto cp0_unimplemented
;
6954 // gen_helper_dmfc0_performance6(arg);
6955 rn
= "Performance6";
6956 goto cp0_unimplemented
;
6958 // gen_helper_dmfc0_performance7(arg);
6959 rn
= "Performance7";
6960 goto cp0_unimplemented
;
6962 goto cp0_unimplemented
;
6968 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6972 goto cp0_unimplemented
;
6979 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6983 goto cp0_unimplemented
;
6992 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6999 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7003 goto cp0_unimplemented
;
7012 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7019 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7023 goto cp0_unimplemented
;
7029 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7033 goto cp0_unimplemented
;
7040 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7044 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7045 tcg_gen_ld_tl(arg
, cpu_env
,
7046 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7050 goto cp0_unimplemented
;
7054 goto cp0_unimplemented
;
7056 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7060 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7061 gen_mfc0_unimplemented(ctx
, arg
);
7064 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7066 const char *rn
= "invalid";
7069 check_insn(ctx
, ISA_MIPS64
);
7071 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7079 gen_helper_mtc0_index(cpu_env
, arg
);
7083 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7084 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7088 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7093 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7103 goto cp0_unimplemented
;
7113 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7114 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7118 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7119 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7123 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7124 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7128 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7129 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7133 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7134 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7138 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7139 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7140 rn
= "VPEScheFBack";
7143 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7144 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7148 goto cp0_unimplemented
;
7154 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7158 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7159 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7163 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7164 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7168 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7169 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7173 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7174 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7178 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7179 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7183 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7184 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7188 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7189 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7193 goto cp0_unimplemented
;
7199 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7205 rn
= "GlobalNumber";
7208 goto cp0_unimplemented
;
7214 gen_helper_mtc0_context(cpu_env
, arg
);
7218 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7219 rn
= "ContextConfig";
7220 goto cp0_unimplemented
;
7222 CP0_CHECK(ctx
->ulri
);
7223 tcg_gen_st_tl(arg
, cpu_env
,
7224 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7228 goto cp0_unimplemented
;
7234 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7238 check_insn(ctx
, ISA_MIPS32R2
);
7239 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7244 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7249 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7254 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7258 goto cp0_unimplemented
;
7264 gen_helper_mtc0_wired(cpu_env
, arg
);
7268 check_insn(ctx
, ISA_MIPS32R2
);
7269 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7273 check_insn(ctx
, ISA_MIPS32R2
);
7274 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7278 check_insn(ctx
, ISA_MIPS32R2
);
7279 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7283 check_insn(ctx
, ISA_MIPS32R2
);
7284 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7288 check_insn(ctx
, ISA_MIPS32R2
);
7289 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7293 goto cp0_unimplemented
;
7299 check_insn(ctx
, ISA_MIPS32R2
);
7300 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7301 ctx
->base
.is_jmp
= DISAS_STOP
;
7305 goto cp0_unimplemented
;
7323 goto cp0_unimplemented
;
7329 gen_helper_mtc0_count(cpu_env
, arg
);
7332 /* 6,7 are implementation dependent */
7334 goto cp0_unimplemented
;
7336 /* Stop translation as we may have switched the execution mode */
7337 ctx
->base
.is_jmp
= DISAS_STOP
;
7342 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7346 goto cp0_unimplemented
;
7352 gen_helper_mtc0_compare(cpu_env
, arg
);
7355 /* 6,7 are implementation dependent */
7357 goto cp0_unimplemented
;
7359 /* Stop translation as we may have switched the execution mode */
7360 ctx
->base
.is_jmp
= DISAS_STOP
;
7365 save_cpu_state(ctx
, 1);
7366 gen_helper_mtc0_status(cpu_env
, arg
);
7367 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7368 gen_save_pc(ctx
->base
.pc_next
+ 4);
7369 ctx
->base
.is_jmp
= DISAS_EXIT
;
7373 check_insn(ctx
, ISA_MIPS32R2
);
7374 gen_helper_mtc0_intctl(cpu_env
, arg
);
7375 /* Stop translation as we may have switched the execution mode */
7376 ctx
->base
.is_jmp
= DISAS_STOP
;
7380 check_insn(ctx
, ISA_MIPS32R2
);
7381 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7382 /* Stop translation as we may have switched the execution mode */
7383 ctx
->base
.is_jmp
= DISAS_STOP
;
7387 check_insn(ctx
, ISA_MIPS32R2
);
7388 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7389 /* Stop translation as we may have switched the execution mode */
7390 ctx
->base
.is_jmp
= DISAS_STOP
;
7394 goto cp0_unimplemented
;
7400 save_cpu_state(ctx
, 1);
7401 gen_helper_mtc0_cause(cpu_env
, arg
);
7402 /* Stop translation as we may have triggered an interrupt.
7403 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7404 * translated code to check for pending interrupts. */
7405 gen_save_pc(ctx
->base
.pc_next
+ 4);
7406 ctx
->base
.is_jmp
= DISAS_EXIT
;
7410 goto cp0_unimplemented
;
7416 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7420 goto cp0_unimplemented
;
7430 check_insn(ctx
, ISA_MIPS32R2
);
7431 gen_helper_mtc0_ebase(cpu_env
, arg
);
7435 goto cp0_unimplemented
;
7441 gen_helper_mtc0_config0(cpu_env
, arg
);
7443 /* Stop translation as we may have switched the execution mode */
7444 ctx
->base
.is_jmp
= DISAS_STOP
;
7447 /* ignored, read only */
7451 gen_helper_mtc0_config2(cpu_env
, arg
);
7453 /* Stop translation as we may have switched the execution mode */
7454 ctx
->base
.is_jmp
= DISAS_STOP
;
7457 gen_helper_mtc0_config3(cpu_env
, arg
);
7459 /* Stop translation as we may have switched the execution mode */
7460 ctx
->base
.is_jmp
= DISAS_STOP
;
7463 /* currently ignored */
7467 gen_helper_mtc0_config5(cpu_env
, arg
);
7469 /* Stop translation as we may have switched the execution mode */
7470 ctx
->base
.is_jmp
= DISAS_STOP
;
7472 /* 6,7 are implementation dependent */
7474 rn
= "Invalid config selector";
7475 goto cp0_unimplemented
;
7481 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7485 CP0_CHECK(ctx
->mrp
);
7486 gen_helper_mtc0_maar(cpu_env
, arg
);
7490 CP0_CHECK(ctx
->mrp
);
7491 gen_helper_mtc0_maari(cpu_env
, arg
);
7495 goto cp0_unimplemented
;
7501 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7505 goto cp0_unimplemented
;
7511 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7515 goto cp0_unimplemented
;
7521 check_insn(ctx
, ISA_MIPS3
);
7522 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7526 goto cp0_unimplemented
;
7530 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7531 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7534 gen_helper_mtc0_framemask(cpu_env
, arg
);
7538 goto cp0_unimplemented
;
7543 rn
= "Diagnostic"; /* implementation dependent */
7548 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7549 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7550 gen_save_pc(ctx
->base
.pc_next
+ 4);
7551 ctx
->base
.is_jmp
= DISAS_EXIT
;
7555 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7556 /* Stop translation as we may have switched the execution mode */
7557 ctx
->base
.is_jmp
= DISAS_STOP
;
7558 rn
= "TraceControl";
7559 goto cp0_unimplemented
;
7561 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7562 /* Stop translation as we may have switched the execution mode */
7563 ctx
->base
.is_jmp
= DISAS_STOP
;
7564 rn
= "TraceControl2";
7565 goto cp0_unimplemented
;
7567 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7568 /* Stop translation as we may have switched the execution mode */
7569 ctx
->base
.is_jmp
= DISAS_STOP
;
7570 rn
= "UserTraceData";
7571 goto cp0_unimplemented
;
7573 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7574 /* Stop translation as we may have switched the execution mode */
7575 ctx
->base
.is_jmp
= DISAS_STOP
;
7577 goto cp0_unimplemented
;
7579 goto cp0_unimplemented
;
7586 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7590 goto cp0_unimplemented
;
7596 gen_helper_mtc0_performance0(cpu_env
, arg
);
7597 rn
= "Performance0";
7600 // gen_helper_mtc0_performance1(cpu_env, arg);
7601 rn
= "Performance1";
7602 goto cp0_unimplemented
;
7604 // gen_helper_mtc0_performance2(cpu_env, arg);
7605 rn
= "Performance2";
7606 goto cp0_unimplemented
;
7608 // gen_helper_mtc0_performance3(cpu_env, arg);
7609 rn
= "Performance3";
7610 goto cp0_unimplemented
;
7612 // gen_helper_mtc0_performance4(cpu_env, arg);
7613 rn
= "Performance4";
7614 goto cp0_unimplemented
;
7616 // gen_helper_mtc0_performance5(cpu_env, arg);
7617 rn
= "Performance5";
7618 goto cp0_unimplemented
;
7620 // gen_helper_mtc0_performance6(cpu_env, arg);
7621 rn
= "Performance6";
7622 goto cp0_unimplemented
;
7624 // gen_helper_mtc0_performance7(cpu_env, arg);
7625 rn
= "Performance7";
7626 goto cp0_unimplemented
;
7628 goto cp0_unimplemented
;
7634 gen_helper_mtc0_errctl(cpu_env
, arg
);
7635 ctx
->base
.is_jmp
= DISAS_STOP
;
7639 goto cp0_unimplemented
;
7649 goto cp0_unimplemented
;
7658 gen_helper_mtc0_taglo(cpu_env
, arg
);
7665 gen_helper_mtc0_datalo(cpu_env
, arg
);
7669 goto cp0_unimplemented
;
7678 gen_helper_mtc0_taghi(cpu_env
, arg
);
7685 gen_helper_mtc0_datahi(cpu_env
, arg
);
7690 goto cp0_unimplemented
;
7696 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7700 goto cp0_unimplemented
;
7707 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7711 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7712 tcg_gen_st_tl(arg
, cpu_env
,
7713 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7717 goto cp0_unimplemented
;
7721 goto cp0_unimplemented
;
7723 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7725 /* For simplicity assume that all writes can cause interrupts. */
7726 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7728 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
7729 * translated code to check for pending interrupts. */
7730 gen_save_pc(ctx
->base
.pc_next
+ 4);
7731 ctx
->base
.is_jmp
= DISAS_EXIT
;
7736 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7738 #endif /* TARGET_MIPS64 */
7740 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7741 int u
, int sel
, int h
)
7743 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7744 TCGv t0
= tcg_temp_local_new();
7746 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7747 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7748 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7749 tcg_gen_movi_tl(t0
, -1);
7750 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7751 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7752 tcg_gen_movi_tl(t0
, -1);
7758 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7761 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7771 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7774 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7777 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7780 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7783 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7786 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7789 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7792 gen_mfc0(ctx
, t0
, rt
, sel
);
7799 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7802 gen_mfc0(ctx
, t0
, rt
, sel
);
7808 gen_helper_mftc0_status(t0
, cpu_env
);
7811 gen_mfc0(ctx
, t0
, rt
, sel
);
7817 gen_helper_mftc0_cause(t0
, cpu_env
);
7827 gen_helper_mftc0_epc(t0
, cpu_env
);
7837 gen_helper_mftc0_ebase(t0
, cpu_env
);
7847 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7857 gen_helper_mftc0_debug(t0
, cpu_env
);
7860 gen_mfc0(ctx
, t0
, rt
, sel
);
7865 gen_mfc0(ctx
, t0
, rt
, sel
);
7867 } else switch (sel
) {
7868 /* GPR registers. */
7870 gen_helper_1e0i(mftgpr
, t0
, rt
);
7872 /* Auxiliary CPU registers */
7876 gen_helper_1e0i(mftlo
, t0
, 0);
7879 gen_helper_1e0i(mfthi
, t0
, 0);
7882 gen_helper_1e0i(mftacx
, t0
, 0);
7885 gen_helper_1e0i(mftlo
, t0
, 1);
7888 gen_helper_1e0i(mfthi
, t0
, 1);
7891 gen_helper_1e0i(mftacx
, t0
, 1);
7894 gen_helper_1e0i(mftlo
, t0
, 2);
7897 gen_helper_1e0i(mfthi
, t0
, 2);
7900 gen_helper_1e0i(mftacx
, t0
, 2);
7903 gen_helper_1e0i(mftlo
, t0
, 3);
7906 gen_helper_1e0i(mfthi
, t0
, 3);
7909 gen_helper_1e0i(mftacx
, t0
, 3);
7912 gen_helper_mftdsp(t0
, cpu_env
);
7918 /* Floating point (COP1). */
7920 /* XXX: For now we support only a single FPU context. */
7922 TCGv_i32 fp0
= tcg_temp_new_i32();
7924 gen_load_fpr32(ctx
, fp0
, rt
);
7925 tcg_gen_ext_i32_tl(t0
, fp0
);
7926 tcg_temp_free_i32(fp0
);
7928 TCGv_i32 fp0
= tcg_temp_new_i32();
7930 gen_load_fpr32h(ctx
, fp0
, rt
);
7931 tcg_gen_ext_i32_tl(t0
, fp0
);
7932 tcg_temp_free_i32(fp0
);
7936 /* XXX: For now we support only a single FPU context. */
7937 gen_helper_1e0i(cfc1
, t0
, rt
);
7939 /* COP2: Not implemented. */
7946 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7947 gen_store_gpr(t0
, rd
);
7953 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7954 generate_exception_end(ctx
, EXCP_RI
);
7957 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7958 int u
, int sel
, int h
)
7960 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7961 TCGv t0
= tcg_temp_local_new();
7963 gen_load_gpr(t0
, rt
);
7964 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7965 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7966 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7968 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7969 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7976 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7979 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7989 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7992 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7995 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7998 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8001 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8004 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8007 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8010 gen_mtc0(ctx
, t0
, rd
, sel
);
8017 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8020 gen_mtc0(ctx
, t0
, rd
, sel
);
8026 gen_helper_mttc0_status(cpu_env
, t0
);
8029 gen_mtc0(ctx
, t0
, rd
, sel
);
8035 gen_helper_mttc0_cause(cpu_env
, t0
);
8045 gen_helper_mttc0_ebase(cpu_env
, t0
);
8055 gen_helper_mttc0_debug(cpu_env
, t0
);
8058 gen_mtc0(ctx
, t0
, rd
, sel
);
8063 gen_mtc0(ctx
, t0
, rd
, sel
);
8065 } else switch (sel
) {
8066 /* GPR registers. */
8068 gen_helper_0e1i(mttgpr
, t0
, rd
);
8070 /* Auxiliary CPU registers */
8074 gen_helper_0e1i(mttlo
, t0
, 0);
8077 gen_helper_0e1i(mtthi
, t0
, 0);
8080 gen_helper_0e1i(mttacx
, t0
, 0);
8083 gen_helper_0e1i(mttlo
, t0
, 1);
8086 gen_helper_0e1i(mtthi
, t0
, 1);
8089 gen_helper_0e1i(mttacx
, t0
, 1);
8092 gen_helper_0e1i(mttlo
, t0
, 2);
8095 gen_helper_0e1i(mtthi
, t0
, 2);
8098 gen_helper_0e1i(mttacx
, t0
, 2);
8101 gen_helper_0e1i(mttlo
, t0
, 3);
8104 gen_helper_0e1i(mtthi
, t0
, 3);
8107 gen_helper_0e1i(mttacx
, t0
, 3);
8110 gen_helper_mttdsp(cpu_env
, t0
);
8116 /* Floating point (COP1). */
8118 /* XXX: For now we support only a single FPU context. */
8120 TCGv_i32 fp0
= tcg_temp_new_i32();
8122 tcg_gen_trunc_tl_i32(fp0
, t0
);
8123 gen_store_fpr32(ctx
, fp0
, rd
);
8124 tcg_temp_free_i32(fp0
);
8126 TCGv_i32 fp0
= tcg_temp_new_i32();
8128 tcg_gen_trunc_tl_i32(fp0
, t0
);
8129 gen_store_fpr32h(ctx
, fp0
, rd
);
8130 tcg_temp_free_i32(fp0
);
8134 /* XXX: For now we support only a single FPU context. */
8136 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8138 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8139 tcg_temp_free_i32(fs_tmp
);
8141 /* Stop translation as we may have changed hflags */
8142 ctx
->base
.is_jmp
= DISAS_STOP
;
8144 /* COP2: Not implemented. */
8151 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8157 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8158 generate_exception_end(ctx
, EXCP_RI
);
8161 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8163 const char *opn
= "ldst";
8165 check_cp0_enabled(ctx
);
8172 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8177 TCGv t0
= tcg_temp_new();
8179 gen_load_gpr(t0
, rt
);
8180 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8185 #if defined(TARGET_MIPS64)
8187 check_insn(ctx
, ISA_MIPS3
);
8192 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8196 check_insn(ctx
, ISA_MIPS3
);
8198 TCGv t0
= tcg_temp_new();
8200 gen_load_gpr(t0
, rt
);
8201 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8213 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8219 TCGv t0
= tcg_temp_new();
8220 gen_load_gpr(t0
, rt
);
8221 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8227 check_insn(ctx
, ASE_MT
);
8232 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8233 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8237 check_insn(ctx
, ASE_MT
);
8238 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8239 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8244 if (!env
->tlb
->helper_tlbwi
)
8246 gen_helper_tlbwi(cpu_env
);
8251 if (!env
->tlb
->helper_tlbinv
) {
8254 gen_helper_tlbinv(cpu_env
);
8255 } /* treat as nop if TLBINV not supported */
8260 if (!env
->tlb
->helper_tlbinvf
) {
8263 gen_helper_tlbinvf(cpu_env
);
8264 } /* treat as nop if TLBINV not supported */
8268 if (!env
->tlb
->helper_tlbwr
)
8270 gen_helper_tlbwr(cpu_env
);
8274 if (!env
->tlb
->helper_tlbp
)
8276 gen_helper_tlbp(cpu_env
);
8280 if (!env
->tlb
->helper_tlbr
)
8282 gen_helper_tlbr(cpu_env
);
8284 case OPC_ERET
: /* OPC_ERETNC */
8285 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8286 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8289 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8290 if (ctx
->opcode
& (1 << bit_shift
)) {
8293 check_insn(ctx
, ISA_MIPS32R5
);
8294 gen_helper_eretnc(cpu_env
);
8298 check_insn(ctx
, ISA_MIPS2
);
8299 gen_helper_eret(cpu_env
);
8301 ctx
->base
.is_jmp
= DISAS_EXIT
;
8306 check_insn(ctx
, ISA_MIPS32
);
8307 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8308 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8311 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8313 generate_exception_end(ctx
, EXCP_RI
);
8315 gen_helper_deret(cpu_env
);
8316 ctx
->base
.is_jmp
= DISAS_EXIT
;
8321 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8322 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8323 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8326 /* If we get an exception, we want to restart at next instruction */
8327 ctx
->base
.pc_next
+= 4;
8328 save_cpu_state(ctx
, 1);
8329 ctx
->base
.pc_next
-= 4;
8330 gen_helper_wait(cpu_env
);
8331 ctx
->base
.is_jmp
= DISAS_NORETURN
;
8336 generate_exception_end(ctx
, EXCP_RI
);
8339 (void)opn
; /* avoid a compiler warning */
8341 #endif /* !CONFIG_USER_ONLY */
8343 /* CP1 Branches (before delay slot) */
8344 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8345 int32_t cc
, int32_t offset
)
8347 target_ulong btarget
;
8348 TCGv_i32 t0
= tcg_temp_new_i32();
8350 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8351 generate_exception_end(ctx
, EXCP_RI
);
8356 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8358 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
8362 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8363 tcg_gen_not_i32(t0
, t0
);
8364 tcg_gen_andi_i32(t0
, t0
, 1);
8365 tcg_gen_extu_i32_tl(bcond
, t0
);
8368 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8369 tcg_gen_not_i32(t0
, t0
);
8370 tcg_gen_andi_i32(t0
, t0
, 1);
8371 tcg_gen_extu_i32_tl(bcond
, t0
);
8374 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8375 tcg_gen_andi_i32(t0
, t0
, 1);
8376 tcg_gen_extu_i32_tl(bcond
, t0
);
8379 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8380 tcg_gen_andi_i32(t0
, t0
, 1);
8381 tcg_gen_extu_i32_tl(bcond
, t0
);
8383 ctx
->hflags
|= MIPS_HFLAG_BL
;
8387 TCGv_i32 t1
= tcg_temp_new_i32();
8388 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8389 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8390 tcg_gen_nand_i32(t0
, t0
, t1
);
8391 tcg_temp_free_i32(t1
);
8392 tcg_gen_andi_i32(t0
, t0
, 1);
8393 tcg_gen_extu_i32_tl(bcond
, t0
);
8398 TCGv_i32 t1
= tcg_temp_new_i32();
8399 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8400 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8401 tcg_gen_or_i32(t0
, t0
, t1
);
8402 tcg_temp_free_i32(t1
);
8403 tcg_gen_andi_i32(t0
, t0
, 1);
8404 tcg_gen_extu_i32_tl(bcond
, t0
);
8409 TCGv_i32 t1
= tcg_temp_new_i32();
8410 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8411 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8412 tcg_gen_and_i32(t0
, t0
, t1
);
8413 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8414 tcg_gen_and_i32(t0
, t0
, t1
);
8415 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8416 tcg_gen_nand_i32(t0
, t0
, t1
);
8417 tcg_temp_free_i32(t1
);
8418 tcg_gen_andi_i32(t0
, t0
, 1);
8419 tcg_gen_extu_i32_tl(bcond
, t0
);
8424 TCGv_i32 t1
= tcg_temp_new_i32();
8425 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8426 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8427 tcg_gen_or_i32(t0
, t0
, t1
);
8428 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8429 tcg_gen_or_i32(t0
, t0
, t1
);
8430 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8431 tcg_gen_or_i32(t0
, t0
, t1
);
8432 tcg_temp_free_i32(t1
);
8433 tcg_gen_andi_i32(t0
, t0
, 1);
8434 tcg_gen_extu_i32_tl(bcond
, t0
);
8437 ctx
->hflags
|= MIPS_HFLAG_BC
;
8440 MIPS_INVAL("cp1 cond branch");
8441 generate_exception_end(ctx
, EXCP_RI
);
8444 ctx
->btarget
= btarget
;
8445 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8447 tcg_temp_free_i32(t0
);
8450 /* R6 CP1 Branches */
8451 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8452 int32_t ft
, int32_t offset
,
8455 target_ulong btarget
;
8456 TCGv_i64 t0
= tcg_temp_new_i64();
8458 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8459 #ifdef MIPS_DEBUG_DISAS
8460 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8461 "\n", ctx
->base
.pc_next
);
8463 generate_exception_end(ctx
, EXCP_RI
);
8467 gen_load_fpr64(ctx
, t0
, ft
);
8468 tcg_gen_andi_i64(t0
, t0
, 1);
8470 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
8474 tcg_gen_xori_i64(t0
, t0
, 1);
8475 ctx
->hflags
|= MIPS_HFLAG_BC
;
8478 /* t0 already set */
8479 ctx
->hflags
|= MIPS_HFLAG_BC
;
8482 MIPS_INVAL("cp1 cond branch");
8483 generate_exception_end(ctx
, EXCP_RI
);
8487 tcg_gen_trunc_i64_tl(bcond
, t0
);
8489 ctx
->btarget
= btarget
;
8491 switch (delayslot_size
) {
8493 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8496 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8501 tcg_temp_free_i64(t0
);
8504 /* Coprocessor 1 (FPU) */
8506 #define FOP(func, fmt) (((fmt) << 21) | (func))
8509 OPC_ADD_S
= FOP(0, FMT_S
),
8510 OPC_SUB_S
= FOP(1, FMT_S
),
8511 OPC_MUL_S
= FOP(2, FMT_S
),
8512 OPC_DIV_S
= FOP(3, FMT_S
),
8513 OPC_SQRT_S
= FOP(4, FMT_S
),
8514 OPC_ABS_S
= FOP(5, FMT_S
),
8515 OPC_MOV_S
= FOP(6, FMT_S
),
8516 OPC_NEG_S
= FOP(7, FMT_S
),
8517 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8518 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8519 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8520 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8521 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8522 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8523 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8524 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8525 OPC_SEL_S
= FOP(16, FMT_S
),
8526 OPC_MOVCF_S
= FOP(17, FMT_S
),
8527 OPC_MOVZ_S
= FOP(18, FMT_S
),
8528 OPC_MOVN_S
= FOP(19, FMT_S
),
8529 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8530 OPC_RECIP_S
= FOP(21, FMT_S
),
8531 OPC_RSQRT_S
= FOP(22, FMT_S
),
8532 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8533 OPC_MADDF_S
= FOP(24, FMT_S
),
8534 OPC_MSUBF_S
= FOP(25, FMT_S
),
8535 OPC_RINT_S
= FOP(26, FMT_S
),
8536 OPC_CLASS_S
= FOP(27, FMT_S
),
8537 OPC_MIN_S
= FOP(28, FMT_S
),
8538 OPC_RECIP2_S
= FOP(28, FMT_S
),
8539 OPC_MINA_S
= FOP(29, FMT_S
),
8540 OPC_RECIP1_S
= FOP(29, FMT_S
),
8541 OPC_MAX_S
= FOP(30, FMT_S
),
8542 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8543 OPC_MAXA_S
= FOP(31, FMT_S
),
8544 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8545 OPC_CVT_D_S
= FOP(33, FMT_S
),
8546 OPC_CVT_W_S
= FOP(36, FMT_S
),
8547 OPC_CVT_L_S
= FOP(37, FMT_S
),
8548 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8549 OPC_CMP_F_S
= FOP (48, FMT_S
),
8550 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8551 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8552 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8553 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8554 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8555 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8556 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8557 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8558 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8559 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8560 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8561 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8562 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8563 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8564 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8566 OPC_ADD_D
= FOP(0, FMT_D
),
8567 OPC_SUB_D
= FOP(1, FMT_D
),
8568 OPC_MUL_D
= FOP(2, FMT_D
),
8569 OPC_DIV_D
= FOP(3, FMT_D
),
8570 OPC_SQRT_D
= FOP(4, FMT_D
),
8571 OPC_ABS_D
= FOP(5, FMT_D
),
8572 OPC_MOV_D
= FOP(6, FMT_D
),
8573 OPC_NEG_D
= FOP(7, FMT_D
),
8574 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8575 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8576 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8577 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8578 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8579 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8580 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8581 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8582 OPC_SEL_D
= FOP(16, FMT_D
),
8583 OPC_MOVCF_D
= FOP(17, FMT_D
),
8584 OPC_MOVZ_D
= FOP(18, FMT_D
),
8585 OPC_MOVN_D
= FOP(19, FMT_D
),
8586 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8587 OPC_RECIP_D
= FOP(21, FMT_D
),
8588 OPC_RSQRT_D
= FOP(22, FMT_D
),
8589 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8590 OPC_MADDF_D
= FOP(24, FMT_D
),
8591 OPC_MSUBF_D
= FOP(25, FMT_D
),
8592 OPC_RINT_D
= FOP(26, FMT_D
),
8593 OPC_CLASS_D
= FOP(27, FMT_D
),
8594 OPC_MIN_D
= FOP(28, FMT_D
),
8595 OPC_RECIP2_D
= FOP(28, FMT_D
),
8596 OPC_MINA_D
= FOP(29, FMT_D
),
8597 OPC_RECIP1_D
= FOP(29, FMT_D
),
8598 OPC_MAX_D
= FOP(30, FMT_D
),
8599 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8600 OPC_MAXA_D
= FOP(31, FMT_D
),
8601 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8602 OPC_CVT_S_D
= FOP(32, FMT_D
),
8603 OPC_CVT_W_D
= FOP(36, FMT_D
),
8604 OPC_CVT_L_D
= FOP(37, FMT_D
),
8605 OPC_CMP_F_D
= FOP (48, FMT_D
),
8606 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8607 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8608 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8609 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8610 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8611 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8612 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8613 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8614 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8615 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8616 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8617 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8618 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8619 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8620 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8622 OPC_CVT_S_W
= FOP(32, FMT_W
),
8623 OPC_CVT_D_W
= FOP(33, FMT_W
),
8624 OPC_CVT_S_L
= FOP(32, FMT_L
),
8625 OPC_CVT_D_L
= FOP(33, FMT_L
),
8626 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8628 OPC_ADD_PS
= FOP(0, FMT_PS
),
8629 OPC_SUB_PS
= FOP(1, FMT_PS
),
8630 OPC_MUL_PS
= FOP(2, FMT_PS
),
8631 OPC_DIV_PS
= FOP(3, FMT_PS
),
8632 OPC_ABS_PS
= FOP(5, FMT_PS
),
8633 OPC_MOV_PS
= FOP(6, FMT_PS
),
8634 OPC_NEG_PS
= FOP(7, FMT_PS
),
8635 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8636 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8637 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8638 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8639 OPC_MULR_PS
= FOP(26, FMT_PS
),
8640 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8641 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8642 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8643 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8645 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8646 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8647 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8648 OPC_PLL_PS
= FOP(44, FMT_PS
),
8649 OPC_PLU_PS
= FOP(45, FMT_PS
),
8650 OPC_PUL_PS
= FOP(46, FMT_PS
),
8651 OPC_PUU_PS
= FOP(47, FMT_PS
),
8652 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8653 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8654 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8655 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8656 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8657 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8658 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8659 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8660 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8661 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8662 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8663 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8664 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8665 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8666 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8667 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8671 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8672 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8673 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8674 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8675 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8676 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8677 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8678 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8679 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8680 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8681 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8682 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8683 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8684 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8685 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8686 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8687 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8688 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8689 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8690 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8691 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8692 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8694 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8695 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8696 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8697 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8698 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8699 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8700 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8701 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8702 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8703 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8704 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8705 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8706 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8707 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8708 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8709 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8710 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8711 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8712 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8713 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8714 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8715 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8717 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8719 TCGv t0
= tcg_temp_new();
8724 TCGv_i32 fp0
= tcg_temp_new_i32();
8726 gen_load_fpr32(ctx
, fp0
, fs
);
8727 tcg_gen_ext_i32_tl(t0
, fp0
);
8728 tcg_temp_free_i32(fp0
);
8730 gen_store_gpr(t0
, rt
);
8733 gen_load_gpr(t0
, rt
);
8735 TCGv_i32 fp0
= tcg_temp_new_i32();
8737 tcg_gen_trunc_tl_i32(fp0
, t0
);
8738 gen_store_fpr32(ctx
, fp0
, fs
);
8739 tcg_temp_free_i32(fp0
);
8743 gen_helper_1e0i(cfc1
, t0
, fs
);
8744 gen_store_gpr(t0
, rt
);
8747 gen_load_gpr(t0
, rt
);
8748 save_cpu_state(ctx
, 0);
8750 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8752 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8753 tcg_temp_free_i32(fs_tmp
);
8755 /* Stop translation as we may have changed hflags */
8756 ctx
->base
.is_jmp
= DISAS_STOP
;
8758 #if defined(TARGET_MIPS64)
8760 gen_load_fpr64(ctx
, t0
, fs
);
8761 gen_store_gpr(t0
, rt
);
8764 gen_load_gpr(t0
, rt
);
8765 gen_store_fpr64(ctx
, t0
, fs
);
8770 TCGv_i32 fp0
= tcg_temp_new_i32();
8772 gen_load_fpr32h(ctx
, fp0
, fs
);
8773 tcg_gen_ext_i32_tl(t0
, fp0
);
8774 tcg_temp_free_i32(fp0
);
8776 gen_store_gpr(t0
, rt
);
8779 gen_load_gpr(t0
, rt
);
8781 TCGv_i32 fp0
= tcg_temp_new_i32();
8783 tcg_gen_trunc_tl_i32(fp0
, t0
);
8784 gen_store_fpr32h(ctx
, fp0
, fs
);
8785 tcg_temp_free_i32(fp0
);
8789 MIPS_INVAL("cp1 move");
8790 generate_exception_end(ctx
, EXCP_RI
);
8798 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8814 l1
= gen_new_label();
8815 t0
= tcg_temp_new_i32();
8816 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8817 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8818 tcg_temp_free_i32(t0
);
8820 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8822 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8827 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8831 TCGv_i32 t0
= tcg_temp_new_i32();
8832 TCGLabel
*l1
= gen_new_label();
8839 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8840 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8841 gen_load_fpr32(ctx
, t0
, fs
);
8842 gen_store_fpr32(ctx
, t0
, fd
);
8844 tcg_temp_free_i32(t0
);
8847 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8850 TCGv_i32 t0
= tcg_temp_new_i32();
8852 TCGLabel
*l1
= gen_new_label();
8859 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8860 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8861 tcg_temp_free_i32(t0
);
8862 fp0
= tcg_temp_new_i64();
8863 gen_load_fpr64(ctx
, fp0
, fs
);
8864 gen_store_fpr64(ctx
, fp0
, fd
);
8865 tcg_temp_free_i64(fp0
);
8869 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8873 TCGv_i32 t0
= tcg_temp_new_i32();
8874 TCGLabel
*l1
= gen_new_label();
8875 TCGLabel
*l2
= gen_new_label();
8882 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8883 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8884 gen_load_fpr32(ctx
, t0
, fs
);
8885 gen_store_fpr32(ctx
, t0
, fd
);
8888 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8889 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8890 gen_load_fpr32h(ctx
, t0
, fs
);
8891 gen_store_fpr32h(ctx
, t0
, fd
);
8892 tcg_temp_free_i32(t0
);
8896 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8899 TCGv_i32 t1
= tcg_const_i32(0);
8900 TCGv_i32 fp0
= tcg_temp_new_i32();
8901 TCGv_i32 fp1
= tcg_temp_new_i32();
8902 TCGv_i32 fp2
= tcg_temp_new_i32();
8903 gen_load_fpr32(ctx
, fp0
, fd
);
8904 gen_load_fpr32(ctx
, fp1
, ft
);
8905 gen_load_fpr32(ctx
, fp2
, fs
);
8909 tcg_gen_andi_i32(fp0
, fp0
, 1);
8910 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8913 tcg_gen_andi_i32(fp1
, fp1
, 1);
8914 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8917 tcg_gen_andi_i32(fp1
, fp1
, 1);
8918 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8921 MIPS_INVAL("gen_sel_s");
8922 generate_exception_end(ctx
, EXCP_RI
);
8926 gen_store_fpr32(ctx
, fp0
, fd
);
8927 tcg_temp_free_i32(fp2
);
8928 tcg_temp_free_i32(fp1
);
8929 tcg_temp_free_i32(fp0
);
8930 tcg_temp_free_i32(t1
);
8933 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8936 TCGv_i64 t1
= tcg_const_i64(0);
8937 TCGv_i64 fp0
= tcg_temp_new_i64();
8938 TCGv_i64 fp1
= tcg_temp_new_i64();
8939 TCGv_i64 fp2
= tcg_temp_new_i64();
8940 gen_load_fpr64(ctx
, fp0
, fd
);
8941 gen_load_fpr64(ctx
, fp1
, ft
);
8942 gen_load_fpr64(ctx
, fp2
, fs
);
8946 tcg_gen_andi_i64(fp0
, fp0
, 1);
8947 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8950 tcg_gen_andi_i64(fp1
, fp1
, 1);
8951 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8954 tcg_gen_andi_i64(fp1
, fp1
, 1);
8955 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8958 MIPS_INVAL("gen_sel_d");
8959 generate_exception_end(ctx
, EXCP_RI
);
8963 gen_store_fpr64(ctx
, fp0
, fd
);
8964 tcg_temp_free_i64(fp2
);
8965 tcg_temp_free_i64(fp1
);
8966 tcg_temp_free_i64(fp0
);
8967 tcg_temp_free_i64(t1
);
8970 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8971 int ft
, int fs
, int fd
, int cc
)
8973 uint32_t func
= ctx
->opcode
& 0x3f;
8977 TCGv_i32 fp0
= tcg_temp_new_i32();
8978 TCGv_i32 fp1
= tcg_temp_new_i32();
8980 gen_load_fpr32(ctx
, fp0
, fs
);
8981 gen_load_fpr32(ctx
, fp1
, ft
);
8982 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8983 tcg_temp_free_i32(fp1
);
8984 gen_store_fpr32(ctx
, fp0
, fd
);
8985 tcg_temp_free_i32(fp0
);
8990 TCGv_i32 fp0
= tcg_temp_new_i32();
8991 TCGv_i32 fp1
= tcg_temp_new_i32();
8993 gen_load_fpr32(ctx
, fp0
, fs
);
8994 gen_load_fpr32(ctx
, fp1
, ft
);
8995 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8996 tcg_temp_free_i32(fp1
);
8997 gen_store_fpr32(ctx
, fp0
, fd
);
8998 tcg_temp_free_i32(fp0
);
9003 TCGv_i32 fp0
= tcg_temp_new_i32();
9004 TCGv_i32 fp1
= tcg_temp_new_i32();
9006 gen_load_fpr32(ctx
, fp0
, fs
);
9007 gen_load_fpr32(ctx
, fp1
, ft
);
9008 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9009 tcg_temp_free_i32(fp1
);
9010 gen_store_fpr32(ctx
, fp0
, fd
);
9011 tcg_temp_free_i32(fp0
);
9016 TCGv_i32 fp0
= tcg_temp_new_i32();
9017 TCGv_i32 fp1
= tcg_temp_new_i32();
9019 gen_load_fpr32(ctx
, fp0
, fs
);
9020 gen_load_fpr32(ctx
, fp1
, ft
);
9021 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9022 tcg_temp_free_i32(fp1
);
9023 gen_store_fpr32(ctx
, fp0
, fd
);
9024 tcg_temp_free_i32(fp0
);
9029 TCGv_i32 fp0
= tcg_temp_new_i32();
9031 gen_load_fpr32(ctx
, fp0
, fs
);
9032 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9033 gen_store_fpr32(ctx
, fp0
, fd
);
9034 tcg_temp_free_i32(fp0
);
9039 TCGv_i32 fp0
= tcg_temp_new_i32();
9041 gen_load_fpr32(ctx
, fp0
, fs
);
9043 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9045 gen_helper_float_abs_s(fp0
, fp0
);
9047 gen_store_fpr32(ctx
, fp0
, fd
);
9048 tcg_temp_free_i32(fp0
);
9053 TCGv_i32 fp0
= tcg_temp_new_i32();
9055 gen_load_fpr32(ctx
, fp0
, fs
);
9056 gen_store_fpr32(ctx
, fp0
, fd
);
9057 tcg_temp_free_i32(fp0
);
9062 TCGv_i32 fp0
= tcg_temp_new_i32();
9064 gen_load_fpr32(ctx
, fp0
, fs
);
9066 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9068 gen_helper_float_chs_s(fp0
, fp0
);
9070 gen_store_fpr32(ctx
, fp0
, fd
);
9071 tcg_temp_free_i32(fp0
);
9075 check_cp1_64bitmode(ctx
);
9077 TCGv_i32 fp32
= tcg_temp_new_i32();
9078 TCGv_i64 fp64
= tcg_temp_new_i64();
9080 gen_load_fpr32(ctx
, fp32
, fs
);
9082 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9084 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9086 tcg_temp_free_i32(fp32
);
9087 gen_store_fpr64(ctx
, fp64
, fd
);
9088 tcg_temp_free_i64(fp64
);
9092 check_cp1_64bitmode(ctx
);
9094 TCGv_i32 fp32
= tcg_temp_new_i32();
9095 TCGv_i64 fp64
= tcg_temp_new_i64();
9097 gen_load_fpr32(ctx
, fp32
, fs
);
9099 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9101 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9103 tcg_temp_free_i32(fp32
);
9104 gen_store_fpr64(ctx
, fp64
, fd
);
9105 tcg_temp_free_i64(fp64
);
9109 check_cp1_64bitmode(ctx
);
9111 TCGv_i32 fp32
= tcg_temp_new_i32();
9112 TCGv_i64 fp64
= tcg_temp_new_i64();
9114 gen_load_fpr32(ctx
, fp32
, fs
);
9116 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9118 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9120 tcg_temp_free_i32(fp32
);
9121 gen_store_fpr64(ctx
, fp64
, fd
);
9122 tcg_temp_free_i64(fp64
);
9126 check_cp1_64bitmode(ctx
);
9128 TCGv_i32 fp32
= tcg_temp_new_i32();
9129 TCGv_i64 fp64
= tcg_temp_new_i64();
9131 gen_load_fpr32(ctx
, fp32
, fs
);
9133 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9135 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9137 tcg_temp_free_i32(fp32
);
9138 gen_store_fpr64(ctx
, fp64
, fd
);
9139 tcg_temp_free_i64(fp64
);
9144 TCGv_i32 fp0
= tcg_temp_new_i32();
9146 gen_load_fpr32(ctx
, fp0
, fs
);
9148 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9150 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9152 gen_store_fpr32(ctx
, fp0
, fd
);
9153 tcg_temp_free_i32(fp0
);
9158 TCGv_i32 fp0
= tcg_temp_new_i32();
9160 gen_load_fpr32(ctx
, fp0
, fs
);
9162 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9164 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9166 gen_store_fpr32(ctx
, fp0
, fd
);
9167 tcg_temp_free_i32(fp0
);
9172 TCGv_i32 fp0
= tcg_temp_new_i32();
9174 gen_load_fpr32(ctx
, fp0
, fs
);
9176 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9178 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9180 gen_store_fpr32(ctx
, fp0
, fd
);
9181 tcg_temp_free_i32(fp0
);
9186 TCGv_i32 fp0
= tcg_temp_new_i32();
9188 gen_load_fpr32(ctx
, fp0
, fs
);
9190 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9192 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9194 gen_store_fpr32(ctx
, fp0
, fd
);
9195 tcg_temp_free_i32(fp0
);
9199 check_insn(ctx
, ISA_MIPS32R6
);
9200 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9203 check_insn(ctx
, ISA_MIPS32R6
);
9204 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9207 check_insn(ctx
, ISA_MIPS32R6
);
9208 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9212 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9217 TCGLabel
*l1
= gen_new_label();
9221 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9223 fp0
= tcg_temp_new_i32();
9224 gen_load_fpr32(ctx
, fp0
, fs
);
9225 gen_store_fpr32(ctx
, fp0
, fd
);
9226 tcg_temp_free_i32(fp0
);
9231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9233 TCGLabel
*l1
= gen_new_label();
9237 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9238 fp0
= tcg_temp_new_i32();
9239 gen_load_fpr32(ctx
, fp0
, fs
);
9240 gen_store_fpr32(ctx
, fp0
, fd
);
9241 tcg_temp_free_i32(fp0
);
9248 TCGv_i32 fp0
= tcg_temp_new_i32();
9250 gen_load_fpr32(ctx
, fp0
, fs
);
9251 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9252 gen_store_fpr32(ctx
, fp0
, fd
);
9253 tcg_temp_free_i32(fp0
);
9258 TCGv_i32 fp0
= tcg_temp_new_i32();
9260 gen_load_fpr32(ctx
, fp0
, fs
);
9261 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9262 gen_store_fpr32(ctx
, fp0
, fd
);
9263 tcg_temp_free_i32(fp0
);
9267 check_insn(ctx
, ISA_MIPS32R6
);
9269 TCGv_i32 fp0
= tcg_temp_new_i32();
9270 TCGv_i32 fp1
= tcg_temp_new_i32();
9271 TCGv_i32 fp2
= tcg_temp_new_i32();
9272 gen_load_fpr32(ctx
, fp0
, fs
);
9273 gen_load_fpr32(ctx
, fp1
, ft
);
9274 gen_load_fpr32(ctx
, fp2
, fd
);
9275 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9276 gen_store_fpr32(ctx
, fp2
, fd
);
9277 tcg_temp_free_i32(fp2
);
9278 tcg_temp_free_i32(fp1
);
9279 tcg_temp_free_i32(fp0
);
9283 check_insn(ctx
, ISA_MIPS32R6
);
9285 TCGv_i32 fp0
= tcg_temp_new_i32();
9286 TCGv_i32 fp1
= tcg_temp_new_i32();
9287 TCGv_i32 fp2
= tcg_temp_new_i32();
9288 gen_load_fpr32(ctx
, fp0
, fs
);
9289 gen_load_fpr32(ctx
, fp1
, ft
);
9290 gen_load_fpr32(ctx
, fp2
, fd
);
9291 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9292 gen_store_fpr32(ctx
, fp2
, fd
);
9293 tcg_temp_free_i32(fp2
);
9294 tcg_temp_free_i32(fp1
);
9295 tcg_temp_free_i32(fp0
);
9299 check_insn(ctx
, ISA_MIPS32R6
);
9301 TCGv_i32 fp0
= tcg_temp_new_i32();
9302 gen_load_fpr32(ctx
, fp0
, fs
);
9303 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9304 gen_store_fpr32(ctx
, fp0
, fd
);
9305 tcg_temp_free_i32(fp0
);
9309 check_insn(ctx
, ISA_MIPS32R6
);
9311 TCGv_i32 fp0
= tcg_temp_new_i32();
9312 gen_load_fpr32(ctx
, fp0
, fs
);
9313 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9314 gen_store_fpr32(ctx
, fp0
, fd
);
9315 tcg_temp_free_i32(fp0
);
9318 case OPC_MIN_S
: /* OPC_RECIP2_S */
9319 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9321 TCGv_i32 fp0
= tcg_temp_new_i32();
9322 TCGv_i32 fp1
= tcg_temp_new_i32();
9323 TCGv_i32 fp2
= tcg_temp_new_i32();
9324 gen_load_fpr32(ctx
, fp0
, fs
);
9325 gen_load_fpr32(ctx
, fp1
, ft
);
9326 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9327 gen_store_fpr32(ctx
, fp2
, fd
);
9328 tcg_temp_free_i32(fp2
);
9329 tcg_temp_free_i32(fp1
);
9330 tcg_temp_free_i32(fp0
);
9333 check_cp1_64bitmode(ctx
);
9335 TCGv_i32 fp0
= tcg_temp_new_i32();
9336 TCGv_i32 fp1
= tcg_temp_new_i32();
9338 gen_load_fpr32(ctx
, fp0
, fs
);
9339 gen_load_fpr32(ctx
, fp1
, ft
);
9340 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9341 tcg_temp_free_i32(fp1
);
9342 gen_store_fpr32(ctx
, fp0
, fd
);
9343 tcg_temp_free_i32(fp0
);
9347 case OPC_MINA_S
: /* OPC_RECIP1_S */
9348 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9350 TCGv_i32 fp0
= tcg_temp_new_i32();
9351 TCGv_i32 fp1
= tcg_temp_new_i32();
9352 TCGv_i32 fp2
= tcg_temp_new_i32();
9353 gen_load_fpr32(ctx
, fp0
, fs
);
9354 gen_load_fpr32(ctx
, fp1
, ft
);
9355 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9356 gen_store_fpr32(ctx
, fp2
, fd
);
9357 tcg_temp_free_i32(fp2
);
9358 tcg_temp_free_i32(fp1
);
9359 tcg_temp_free_i32(fp0
);
9362 check_cp1_64bitmode(ctx
);
9364 TCGv_i32 fp0
= tcg_temp_new_i32();
9366 gen_load_fpr32(ctx
, fp0
, fs
);
9367 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9368 gen_store_fpr32(ctx
, fp0
, fd
);
9369 tcg_temp_free_i32(fp0
);
9373 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9374 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9376 TCGv_i32 fp0
= tcg_temp_new_i32();
9377 TCGv_i32 fp1
= tcg_temp_new_i32();
9378 gen_load_fpr32(ctx
, fp0
, fs
);
9379 gen_load_fpr32(ctx
, fp1
, ft
);
9380 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9381 gen_store_fpr32(ctx
, fp1
, fd
);
9382 tcg_temp_free_i32(fp1
);
9383 tcg_temp_free_i32(fp0
);
9386 check_cp1_64bitmode(ctx
);
9388 TCGv_i32 fp0
= tcg_temp_new_i32();
9390 gen_load_fpr32(ctx
, fp0
, fs
);
9391 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9392 gen_store_fpr32(ctx
, fp0
, fd
);
9393 tcg_temp_free_i32(fp0
);
9397 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9398 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9400 TCGv_i32 fp0
= tcg_temp_new_i32();
9401 TCGv_i32 fp1
= tcg_temp_new_i32();
9402 gen_load_fpr32(ctx
, fp0
, fs
);
9403 gen_load_fpr32(ctx
, fp1
, ft
);
9404 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9405 gen_store_fpr32(ctx
, fp1
, fd
);
9406 tcg_temp_free_i32(fp1
);
9407 tcg_temp_free_i32(fp0
);
9410 check_cp1_64bitmode(ctx
);
9412 TCGv_i32 fp0
= tcg_temp_new_i32();
9413 TCGv_i32 fp1
= tcg_temp_new_i32();
9415 gen_load_fpr32(ctx
, fp0
, fs
);
9416 gen_load_fpr32(ctx
, fp1
, ft
);
9417 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9418 tcg_temp_free_i32(fp1
);
9419 gen_store_fpr32(ctx
, fp0
, fd
);
9420 tcg_temp_free_i32(fp0
);
9425 check_cp1_registers(ctx
, fd
);
9427 TCGv_i32 fp32
= tcg_temp_new_i32();
9428 TCGv_i64 fp64
= tcg_temp_new_i64();
9430 gen_load_fpr32(ctx
, fp32
, fs
);
9431 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9432 tcg_temp_free_i32(fp32
);
9433 gen_store_fpr64(ctx
, fp64
, fd
);
9434 tcg_temp_free_i64(fp64
);
9439 TCGv_i32 fp0
= tcg_temp_new_i32();
9441 gen_load_fpr32(ctx
, fp0
, fs
);
9443 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9445 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9447 gen_store_fpr32(ctx
, fp0
, fd
);
9448 tcg_temp_free_i32(fp0
);
9452 check_cp1_64bitmode(ctx
);
9454 TCGv_i32 fp32
= tcg_temp_new_i32();
9455 TCGv_i64 fp64
= tcg_temp_new_i64();
9457 gen_load_fpr32(ctx
, fp32
, fs
);
9459 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9461 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9463 tcg_temp_free_i32(fp32
);
9464 gen_store_fpr64(ctx
, fp64
, fd
);
9465 tcg_temp_free_i64(fp64
);
9471 TCGv_i64 fp64
= tcg_temp_new_i64();
9472 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9473 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9475 gen_load_fpr32(ctx
, fp32_0
, fs
);
9476 gen_load_fpr32(ctx
, fp32_1
, ft
);
9477 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9478 tcg_temp_free_i32(fp32_1
);
9479 tcg_temp_free_i32(fp32_0
);
9480 gen_store_fpr64(ctx
, fp64
, fd
);
9481 tcg_temp_free_i64(fp64
);
9493 case OPC_CMP_NGLE_S
:
9500 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9501 if (ctx
->opcode
& (1 << 6)) {
9502 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9504 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9508 check_cp1_registers(ctx
, fs
| ft
| fd
);
9510 TCGv_i64 fp0
= tcg_temp_new_i64();
9511 TCGv_i64 fp1
= tcg_temp_new_i64();
9513 gen_load_fpr64(ctx
, fp0
, fs
);
9514 gen_load_fpr64(ctx
, fp1
, ft
);
9515 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9516 tcg_temp_free_i64(fp1
);
9517 gen_store_fpr64(ctx
, fp0
, fd
);
9518 tcg_temp_free_i64(fp0
);
9522 check_cp1_registers(ctx
, fs
| ft
| fd
);
9524 TCGv_i64 fp0
= tcg_temp_new_i64();
9525 TCGv_i64 fp1
= tcg_temp_new_i64();
9527 gen_load_fpr64(ctx
, fp0
, fs
);
9528 gen_load_fpr64(ctx
, fp1
, ft
);
9529 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9530 tcg_temp_free_i64(fp1
);
9531 gen_store_fpr64(ctx
, fp0
, fd
);
9532 tcg_temp_free_i64(fp0
);
9536 check_cp1_registers(ctx
, fs
| ft
| fd
);
9538 TCGv_i64 fp0
= tcg_temp_new_i64();
9539 TCGv_i64 fp1
= tcg_temp_new_i64();
9541 gen_load_fpr64(ctx
, fp0
, fs
);
9542 gen_load_fpr64(ctx
, fp1
, ft
);
9543 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9544 tcg_temp_free_i64(fp1
);
9545 gen_store_fpr64(ctx
, fp0
, fd
);
9546 tcg_temp_free_i64(fp0
);
9550 check_cp1_registers(ctx
, fs
| ft
| fd
);
9552 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 TCGv_i64 fp1
= tcg_temp_new_i64();
9555 gen_load_fpr64(ctx
, fp0
, fs
);
9556 gen_load_fpr64(ctx
, fp1
, ft
);
9557 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9558 tcg_temp_free_i64(fp1
);
9559 gen_store_fpr64(ctx
, fp0
, fd
);
9560 tcg_temp_free_i64(fp0
);
9564 check_cp1_registers(ctx
, fs
| fd
);
9566 TCGv_i64 fp0
= tcg_temp_new_i64();
9568 gen_load_fpr64(ctx
, fp0
, fs
);
9569 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9570 gen_store_fpr64(ctx
, fp0
, fd
);
9571 tcg_temp_free_i64(fp0
);
9575 check_cp1_registers(ctx
, fs
| fd
);
9577 TCGv_i64 fp0
= tcg_temp_new_i64();
9579 gen_load_fpr64(ctx
, fp0
, fs
);
9581 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9583 gen_helper_float_abs_d(fp0
, fp0
);
9585 gen_store_fpr64(ctx
, fp0
, fd
);
9586 tcg_temp_free_i64(fp0
);
9590 check_cp1_registers(ctx
, fs
| fd
);
9592 TCGv_i64 fp0
= tcg_temp_new_i64();
9594 gen_load_fpr64(ctx
, fp0
, fs
);
9595 gen_store_fpr64(ctx
, fp0
, fd
);
9596 tcg_temp_free_i64(fp0
);
9600 check_cp1_registers(ctx
, fs
| fd
);
9602 TCGv_i64 fp0
= tcg_temp_new_i64();
9604 gen_load_fpr64(ctx
, fp0
, fs
);
9606 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9608 gen_helper_float_chs_d(fp0
, fp0
);
9610 gen_store_fpr64(ctx
, fp0
, fd
);
9611 tcg_temp_free_i64(fp0
);
9615 check_cp1_64bitmode(ctx
);
9617 TCGv_i64 fp0
= tcg_temp_new_i64();
9619 gen_load_fpr64(ctx
, fp0
, fs
);
9621 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9623 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9625 gen_store_fpr64(ctx
, fp0
, fd
);
9626 tcg_temp_free_i64(fp0
);
9630 check_cp1_64bitmode(ctx
);
9632 TCGv_i64 fp0
= tcg_temp_new_i64();
9634 gen_load_fpr64(ctx
, fp0
, fs
);
9636 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9638 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9640 gen_store_fpr64(ctx
, fp0
, fd
);
9641 tcg_temp_free_i64(fp0
);
9645 check_cp1_64bitmode(ctx
);
9647 TCGv_i64 fp0
= tcg_temp_new_i64();
9649 gen_load_fpr64(ctx
, fp0
, fs
);
9651 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9653 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9655 gen_store_fpr64(ctx
, fp0
, fd
);
9656 tcg_temp_free_i64(fp0
);
9660 check_cp1_64bitmode(ctx
);
9662 TCGv_i64 fp0
= tcg_temp_new_i64();
9664 gen_load_fpr64(ctx
, fp0
, fs
);
9666 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9668 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9670 gen_store_fpr64(ctx
, fp0
, fd
);
9671 tcg_temp_free_i64(fp0
);
9675 check_cp1_registers(ctx
, fs
);
9677 TCGv_i32 fp32
= tcg_temp_new_i32();
9678 TCGv_i64 fp64
= tcg_temp_new_i64();
9680 gen_load_fpr64(ctx
, fp64
, fs
);
9682 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9684 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9686 tcg_temp_free_i64(fp64
);
9687 gen_store_fpr32(ctx
, fp32
, fd
);
9688 tcg_temp_free_i32(fp32
);
9692 check_cp1_registers(ctx
, fs
);
9694 TCGv_i32 fp32
= tcg_temp_new_i32();
9695 TCGv_i64 fp64
= tcg_temp_new_i64();
9697 gen_load_fpr64(ctx
, fp64
, fs
);
9699 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9701 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9703 tcg_temp_free_i64(fp64
);
9704 gen_store_fpr32(ctx
, fp32
, fd
);
9705 tcg_temp_free_i32(fp32
);
9709 check_cp1_registers(ctx
, fs
);
9711 TCGv_i32 fp32
= tcg_temp_new_i32();
9712 TCGv_i64 fp64
= tcg_temp_new_i64();
9714 gen_load_fpr64(ctx
, fp64
, fs
);
9716 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9718 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9720 tcg_temp_free_i64(fp64
);
9721 gen_store_fpr32(ctx
, fp32
, fd
);
9722 tcg_temp_free_i32(fp32
);
9726 check_cp1_registers(ctx
, fs
);
9728 TCGv_i32 fp32
= tcg_temp_new_i32();
9729 TCGv_i64 fp64
= tcg_temp_new_i64();
9731 gen_load_fpr64(ctx
, fp64
, fs
);
9733 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9735 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9737 tcg_temp_free_i64(fp64
);
9738 gen_store_fpr32(ctx
, fp32
, fd
);
9739 tcg_temp_free_i32(fp32
);
9743 check_insn(ctx
, ISA_MIPS32R6
);
9744 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9747 check_insn(ctx
, ISA_MIPS32R6
);
9748 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9751 check_insn(ctx
, ISA_MIPS32R6
);
9752 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9755 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9756 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9759 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9761 TCGLabel
*l1
= gen_new_label();
9765 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9767 fp0
= tcg_temp_new_i64();
9768 gen_load_fpr64(ctx
, fp0
, fs
);
9769 gen_store_fpr64(ctx
, fp0
, fd
);
9770 tcg_temp_free_i64(fp0
);
9775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9777 TCGLabel
*l1
= gen_new_label();
9781 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9782 fp0
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_store_fpr64(ctx
, fp0
, fd
);
9785 tcg_temp_free_i64(fp0
);
9791 check_cp1_registers(ctx
, fs
| fd
);
9793 TCGv_i64 fp0
= tcg_temp_new_i64();
9795 gen_load_fpr64(ctx
, fp0
, fs
);
9796 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9797 gen_store_fpr64(ctx
, fp0
, fd
);
9798 tcg_temp_free_i64(fp0
);
9802 check_cp1_registers(ctx
, fs
| fd
);
9804 TCGv_i64 fp0
= tcg_temp_new_i64();
9806 gen_load_fpr64(ctx
, fp0
, fs
);
9807 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9808 gen_store_fpr64(ctx
, fp0
, fd
);
9809 tcg_temp_free_i64(fp0
);
9813 check_insn(ctx
, ISA_MIPS32R6
);
9815 TCGv_i64 fp0
= tcg_temp_new_i64();
9816 TCGv_i64 fp1
= tcg_temp_new_i64();
9817 TCGv_i64 fp2
= tcg_temp_new_i64();
9818 gen_load_fpr64(ctx
, fp0
, fs
);
9819 gen_load_fpr64(ctx
, fp1
, ft
);
9820 gen_load_fpr64(ctx
, fp2
, fd
);
9821 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9822 gen_store_fpr64(ctx
, fp2
, fd
);
9823 tcg_temp_free_i64(fp2
);
9824 tcg_temp_free_i64(fp1
);
9825 tcg_temp_free_i64(fp0
);
9829 check_insn(ctx
, ISA_MIPS32R6
);
9831 TCGv_i64 fp0
= tcg_temp_new_i64();
9832 TCGv_i64 fp1
= tcg_temp_new_i64();
9833 TCGv_i64 fp2
= tcg_temp_new_i64();
9834 gen_load_fpr64(ctx
, fp0
, fs
);
9835 gen_load_fpr64(ctx
, fp1
, ft
);
9836 gen_load_fpr64(ctx
, fp2
, fd
);
9837 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9838 gen_store_fpr64(ctx
, fp2
, fd
);
9839 tcg_temp_free_i64(fp2
);
9840 tcg_temp_free_i64(fp1
);
9841 tcg_temp_free_i64(fp0
);
9845 check_insn(ctx
, ISA_MIPS32R6
);
9847 TCGv_i64 fp0
= tcg_temp_new_i64();
9848 gen_load_fpr64(ctx
, fp0
, fs
);
9849 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9850 gen_store_fpr64(ctx
, fp0
, fd
);
9851 tcg_temp_free_i64(fp0
);
9855 check_insn(ctx
, ISA_MIPS32R6
);
9857 TCGv_i64 fp0
= tcg_temp_new_i64();
9858 gen_load_fpr64(ctx
, fp0
, fs
);
9859 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9860 gen_store_fpr64(ctx
, fp0
, fd
);
9861 tcg_temp_free_i64(fp0
);
9864 case OPC_MIN_D
: /* OPC_RECIP2_D */
9865 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9867 TCGv_i64 fp0
= tcg_temp_new_i64();
9868 TCGv_i64 fp1
= tcg_temp_new_i64();
9869 gen_load_fpr64(ctx
, fp0
, fs
);
9870 gen_load_fpr64(ctx
, fp1
, ft
);
9871 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9872 gen_store_fpr64(ctx
, fp1
, fd
);
9873 tcg_temp_free_i64(fp1
);
9874 tcg_temp_free_i64(fp0
);
9877 check_cp1_64bitmode(ctx
);
9879 TCGv_i64 fp0
= tcg_temp_new_i64();
9880 TCGv_i64 fp1
= tcg_temp_new_i64();
9882 gen_load_fpr64(ctx
, fp0
, fs
);
9883 gen_load_fpr64(ctx
, fp1
, ft
);
9884 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9885 tcg_temp_free_i64(fp1
);
9886 gen_store_fpr64(ctx
, fp0
, fd
);
9887 tcg_temp_free_i64(fp0
);
9891 case OPC_MINA_D
: /* OPC_RECIP1_D */
9892 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9894 TCGv_i64 fp0
= tcg_temp_new_i64();
9895 TCGv_i64 fp1
= tcg_temp_new_i64();
9896 gen_load_fpr64(ctx
, fp0
, fs
);
9897 gen_load_fpr64(ctx
, fp1
, ft
);
9898 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9899 gen_store_fpr64(ctx
, fp1
, fd
);
9900 tcg_temp_free_i64(fp1
);
9901 tcg_temp_free_i64(fp0
);
9904 check_cp1_64bitmode(ctx
);
9906 TCGv_i64 fp0
= tcg_temp_new_i64();
9908 gen_load_fpr64(ctx
, fp0
, fs
);
9909 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9910 gen_store_fpr64(ctx
, fp0
, fd
);
9911 tcg_temp_free_i64(fp0
);
9915 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9916 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9918 TCGv_i64 fp0
= tcg_temp_new_i64();
9919 TCGv_i64 fp1
= tcg_temp_new_i64();
9920 gen_load_fpr64(ctx
, fp0
, fs
);
9921 gen_load_fpr64(ctx
, fp1
, ft
);
9922 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9923 gen_store_fpr64(ctx
, fp1
, fd
);
9924 tcg_temp_free_i64(fp1
);
9925 tcg_temp_free_i64(fp0
);
9928 check_cp1_64bitmode(ctx
);
9930 TCGv_i64 fp0
= tcg_temp_new_i64();
9932 gen_load_fpr64(ctx
, fp0
, fs
);
9933 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9934 gen_store_fpr64(ctx
, fp0
, fd
);
9935 tcg_temp_free_i64(fp0
);
9939 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9940 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9942 TCGv_i64 fp0
= tcg_temp_new_i64();
9943 TCGv_i64 fp1
= tcg_temp_new_i64();
9944 gen_load_fpr64(ctx
, fp0
, fs
);
9945 gen_load_fpr64(ctx
, fp1
, ft
);
9946 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9947 gen_store_fpr64(ctx
, fp1
, fd
);
9948 tcg_temp_free_i64(fp1
);
9949 tcg_temp_free_i64(fp0
);
9952 check_cp1_64bitmode(ctx
);
9954 TCGv_i64 fp0
= tcg_temp_new_i64();
9955 TCGv_i64 fp1
= tcg_temp_new_i64();
9957 gen_load_fpr64(ctx
, fp0
, fs
);
9958 gen_load_fpr64(ctx
, fp1
, ft
);
9959 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9960 tcg_temp_free_i64(fp1
);
9961 gen_store_fpr64(ctx
, fp0
, fd
);
9962 tcg_temp_free_i64(fp0
);
9975 case OPC_CMP_NGLE_D
:
9982 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9983 if (ctx
->opcode
& (1 << 6)) {
9984 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9986 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9990 check_cp1_registers(ctx
, fs
);
9992 TCGv_i32 fp32
= tcg_temp_new_i32();
9993 TCGv_i64 fp64
= tcg_temp_new_i64();
9995 gen_load_fpr64(ctx
, fp64
, fs
);
9996 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9997 tcg_temp_free_i64(fp64
);
9998 gen_store_fpr32(ctx
, fp32
, fd
);
9999 tcg_temp_free_i32(fp32
);
10003 check_cp1_registers(ctx
, fs
);
10005 TCGv_i32 fp32
= tcg_temp_new_i32();
10006 TCGv_i64 fp64
= tcg_temp_new_i64();
10008 gen_load_fpr64(ctx
, fp64
, fs
);
10009 if (ctx
->nan2008
) {
10010 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10012 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10014 tcg_temp_free_i64(fp64
);
10015 gen_store_fpr32(ctx
, fp32
, fd
);
10016 tcg_temp_free_i32(fp32
);
10020 check_cp1_64bitmode(ctx
);
10022 TCGv_i64 fp0
= tcg_temp_new_i64();
10024 gen_load_fpr64(ctx
, fp0
, fs
);
10025 if (ctx
->nan2008
) {
10026 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10028 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10030 gen_store_fpr64(ctx
, fp0
, fd
);
10031 tcg_temp_free_i64(fp0
);
10036 TCGv_i32 fp0
= tcg_temp_new_i32();
10038 gen_load_fpr32(ctx
, fp0
, fs
);
10039 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10040 gen_store_fpr32(ctx
, fp0
, fd
);
10041 tcg_temp_free_i32(fp0
);
10045 check_cp1_registers(ctx
, fd
);
10047 TCGv_i32 fp32
= tcg_temp_new_i32();
10048 TCGv_i64 fp64
= tcg_temp_new_i64();
10050 gen_load_fpr32(ctx
, fp32
, fs
);
10051 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10052 tcg_temp_free_i32(fp32
);
10053 gen_store_fpr64(ctx
, fp64
, fd
);
10054 tcg_temp_free_i64(fp64
);
10058 check_cp1_64bitmode(ctx
);
10060 TCGv_i32 fp32
= tcg_temp_new_i32();
10061 TCGv_i64 fp64
= tcg_temp_new_i64();
10063 gen_load_fpr64(ctx
, fp64
, fs
);
10064 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10065 tcg_temp_free_i64(fp64
);
10066 gen_store_fpr32(ctx
, fp32
, fd
);
10067 tcg_temp_free_i32(fp32
);
10071 check_cp1_64bitmode(ctx
);
10073 TCGv_i64 fp0
= tcg_temp_new_i64();
10075 gen_load_fpr64(ctx
, fp0
, fs
);
10076 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10077 gen_store_fpr64(ctx
, fp0
, fd
);
10078 tcg_temp_free_i64(fp0
);
10081 case OPC_CVT_PS_PW
:
10084 TCGv_i64 fp0
= tcg_temp_new_i64();
10086 gen_load_fpr64(ctx
, fp0
, fs
);
10087 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10088 gen_store_fpr64(ctx
, fp0
, fd
);
10089 tcg_temp_free_i64(fp0
);
10095 TCGv_i64 fp0
= tcg_temp_new_i64();
10096 TCGv_i64 fp1
= tcg_temp_new_i64();
10098 gen_load_fpr64(ctx
, fp0
, fs
);
10099 gen_load_fpr64(ctx
, fp1
, ft
);
10100 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10101 tcg_temp_free_i64(fp1
);
10102 gen_store_fpr64(ctx
, fp0
, fd
);
10103 tcg_temp_free_i64(fp0
);
10109 TCGv_i64 fp0
= tcg_temp_new_i64();
10110 TCGv_i64 fp1
= tcg_temp_new_i64();
10112 gen_load_fpr64(ctx
, fp0
, fs
);
10113 gen_load_fpr64(ctx
, fp1
, ft
);
10114 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10115 tcg_temp_free_i64(fp1
);
10116 gen_store_fpr64(ctx
, fp0
, fd
);
10117 tcg_temp_free_i64(fp0
);
10123 TCGv_i64 fp0
= tcg_temp_new_i64();
10124 TCGv_i64 fp1
= tcg_temp_new_i64();
10126 gen_load_fpr64(ctx
, fp0
, fs
);
10127 gen_load_fpr64(ctx
, fp1
, ft
);
10128 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10129 tcg_temp_free_i64(fp1
);
10130 gen_store_fpr64(ctx
, fp0
, fd
);
10131 tcg_temp_free_i64(fp0
);
10137 TCGv_i64 fp0
= tcg_temp_new_i64();
10139 gen_load_fpr64(ctx
, fp0
, fs
);
10140 gen_helper_float_abs_ps(fp0
, fp0
);
10141 gen_store_fpr64(ctx
, fp0
, fd
);
10142 tcg_temp_free_i64(fp0
);
10148 TCGv_i64 fp0
= tcg_temp_new_i64();
10150 gen_load_fpr64(ctx
, fp0
, fs
);
10151 gen_store_fpr64(ctx
, fp0
, fd
);
10152 tcg_temp_free_i64(fp0
);
10158 TCGv_i64 fp0
= tcg_temp_new_i64();
10160 gen_load_fpr64(ctx
, fp0
, fs
);
10161 gen_helper_float_chs_ps(fp0
, fp0
);
10162 gen_store_fpr64(ctx
, fp0
, fd
);
10163 tcg_temp_free_i64(fp0
);
10168 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10173 TCGLabel
*l1
= gen_new_label();
10177 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10178 fp0
= tcg_temp_new_i64();
10179 gen_load_fpr64(ctx
, fp0
, fs
);
10180 gen_store_fpr64(ctx
, fp0
, fd
);
10181 tcg_temp_free_i64(fp0
);
10188 TCGLabel
*l1
= gen_new_label();
10192 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10193 fp0
= tcg_temp_new_i64();
10194 gen_load_fpr64(ctx
, fp0
, fs
);
10195 gen_store_fpr64(ctx
, fp0
, fd
);
10196 tcg_temp_free_i64(fp0
);
10204 TCGv_i64 fp0
= tcg_temp_new_i64();
10205 TCGv_i64 fp1
= tcg_temp_new_i64();
10207 gen_load_fpr64(ctx
, fp0
, ft
);
10208 gen_load_fpr64(ctx
, fp1
, fs
);
10209 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10210 tcg_temp_free_i64(fp1
);
10211 gen_store_fpr64(ctx
, fp0
, fd
);
10212 tcg_temp_free_i64(fp0
);
10218 TCGv_i64 fp0
= tcg_temp_new_i64();
10219 TCGv_i64 fp1
= tcg_temp_new_i64();
10221 gen_load_fpr64(ctx
, fp0
, ft
);
10222 gen_load_fpr64(ctx
, fp1
, fs
);
10223 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10224 tcg_temp_free_i64(fp1
);
10225 gen_store_fpr64(ctx
, fp0
, fd
);
10226 tcg_temp_free_i64(fp0
);
10229 case OPC_RECIP2_PS
:
10232 TCGv_i64 fp0
= tcg_temp_new_i64();
10233 TCGv_i64 fp1
= tcg_temp_new_i64();
10235 gen_load_fpr64(ctx
, fp0
, fs
);
10236 gen_load_fpr64(ctx
, fp1
, ft
);
10237 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10238 tcg_temp_free_i64(fp1
);
10239 gen_store_fpr64(ctx
, fp0
, fd
);
10240 tcg_temp_free_i64(fp0
);
10243 case OPC_RECIP1_PS
:
10246 TCGv_i64 fp0
= tcg_temp_new_i64();
10248 gen_load_fpr64(ctx
, fp0
, fs
);
10249 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10250 gen_store_fpr64(ctx
, fp0
, fd
);
10251 tcg_temp_free_i64(fp0
);
10254 case OPC_RSQRT1_PS
:
10257 TCGv_i64 fp0
= tcg_temp_new_i64();
10259 gen_load_fpr64(ctx
, fp0
, fs
);
10260 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10261 gen_store_fpr64(ctx
, fp0
, fd
);
10262 tcg_temp_free_i64(fp0
);
10265 case OPC_RSQRT2_PS
:
10268 TCGv_i64 fp0
= tcg_temp_new_i64();
10269 TCGv_i64 fp1
= tcg_temp_new_i64();
10271 gen_load_fpr64(ctx
, fp0
, fs
);
10272 gen_load_fpr64(ctx
, fp1
, ft
);
10273 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10274 tcg_temp_free_i64(fp1
);
10275 gen_store_fpr64(ctx
, fp0
, fd
);
10276 tcg_temp_free_i64(fp0
);
10280 check_cp1_64bitmode(ctx
);
10282 TCGv_i32 fp0
= tcg_temp_new_i32();
10284 gen_load_fpr32h(ctx
, fp0
, fs
);
10285 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10286 gen_store_fpr32(ctx
, fp0
, fd
);
10287 tcg_temp_free_i32(fp0
);
10290 case OPC_CVT_PW_PS
:
10293 TCGv_i64 fp0
= tcg_temp_new_i64();
10295 gen_load_fpr64(ctx
, fp0
, fs
);
10296 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10297 gen_store_fpr64(ctx
, fp0
, fd
);
10298 tcg_temp_free_i64(fp0
);
10302 check_cp1_64bitmode(ctx
);
10304 TCGv_i32 fp0
= tcg_temp_new_i32();
10306 gen_load_fpr32(ctx
, fp0
, fs
);
10307 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10308 gen_store_fpr32(ctx
, fp0
, fd
);
10309 tcg_temp_free_i32(fp0
);
10315 TCGv_i32 fp0
= tcg_temp_new_i32();
10316 TCGv_i32 fp1
= tcg_temp_new_i32();
10318 gen_load_fpr32(ctx
, fp0
, fs
);
10319 gen_load_fpr32(ctx
, fp1
, ft
);
10320 gen_store_fpr32h(ctx
, fp0
, fd
);
10321 gen_store_fpr32(ctx
, fp1
, fd
);
10322 tcg_temp_free_i32(fp0
);
10323 tcg_temp_free_i32(fp1
);
10329 TCGv_i32 fp0
= tcg_temp_new_i32();
10330 TCGv_i32 fp1
= tcg_temp_new_i32();
10332 gen_load_fpr32(ctx
, fp0
, fs
);
10333 gen_load_fpr32h(ctx
, fp1
, ft
);
10334 gen_store_fpr32(ctx
, fp1
, fd
);
10335 gen_store_fpr32h(ctx
, fp0
, fd
);
10336 tcg_temp_free_i32(fp0
);
10337 tcg_temp_free_i32(fp1
);
10343 TCGv_i32 fp0
= tcg_temp_new_i32();
10344 TCGv_i32 fp1
= tcg_temp_new_i32();
10346 gen_load_fpr32h(ctx
, fp0
, fs
);
10347 gen_load_fpr32(ctx
, fp1
, ft
);
10348 gen_store_fpr32(ctx
, fp1
, fd
);
10349 gen_store_fpr32h(ctx
, fp0
, fd
);
10350 tcg_temp_free_i32(fp0
);
10351 tcg_temp_free_i32(fp1
);
10357 TCGv_i32 fp0
= tcg_temp_new_i32();
10358 TCGv_i32 fp1
= tcg_temp_new_i32();
10360 gen_load_fpr32h(ctx
, fp0
, fs
);
10361 gen_load_fpr32h(ctx
, fp1
, ft
);
10362 gen_store_fpr32(ctx
, fp1
, fd
);
10363 gen_store_fpr32h(ctx
, fp0
, fd
);
10364 tcg_temp_free_i32(fp0
);
10365 tcg_temp_free_i32(fp1
);
10369 case OPC_CMP_UN_PS
:
10370 case OPC_CMP_EQ_PS
:
10371 case OPC_CMP_UEQ_PS
:
10372 case OPC_CMP_OLT_PS
:
10373 case OPC_CMP_ULT_PS
:
10374 case OPC_CMP_OLE_PS
:
10375 case OPC_CMP_ULE_PS
:
10376 case OPC_CMP_SF_PS
:
10377 case OPC_CMP_NGLE_PS
:
10378 case OPC_CMP_SEQ_PS
:
10379 case OPC_CMP_NGL_PS
:
10380 case OPC_CMP_LT_PS
:
10381 case OPC_CMP_NGE_PS
:
10382 case OPC_CMP_LE_PS
:
10383 case OPC_CMP_NGT_PS
:
10384 if (ctx
->opcode
& (1 << 6)) {
10385 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10387 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10391 MIPS_INVAL("farith");
10392 generate_exception_end(ctx
, EXCP_RI
);
10397 /* Coprocessor 3 (FPU) */
10398 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10399 int fd
, int fs
, int base
, int index
)
10401 TCGv t0
= tcg_temp_new();
10404 gen_load_gpr(t0
, index
);
10405 } else if (index
== 0) {
10406 gen_load_gpr(t0
, base
);
10408 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10410 /* Don't do NOP if destination is zero: we must perform the actual
10416 TCGv_i32 fp0
= tcg_temp_new_i32();
10418 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10419 tcg_gen_trunc_tl_i32(fp0
, t0
);
10420 gen_store_fpr32(ctx
, fp0
, fd
);
10421 tcg_temp_free_i32(fp0
);
10426 check_cp1_registers(ctx
, fd
);
10428 TCGv_i64 fp0
= tcg_temp_new_i64();
10429 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10430 gen_store_fpr64(ctx
, fp0
, fd
);
10431 tcg_temp_free_i64(fp0
);
10435 check_cp1_64bitmode(ctx
);
10436 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10438 TCGv_i64 fp0
= tcg_temp_new_i64();
10440 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10441 gen_store_fpr64(ctx
, fp0
, fd
);
10442 tcg_temp_free_i64(fp0
);
10448 TCGv_i32 fp0
= tcg_temp_new_i32();
10449 gen_load_fpr32(ctx
, fp0
, fs
);
10450 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10451 tcg_temp_free_i32(fp0
);
10456 check_cp1_registers(ctx
, fs
);
10458 TCGv_i64 fp0
= tcg_temp_new_i64();
10459 gen_load_fpr64(ctx
, fp0
, fs
);
10460 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10461 tcg_temp_free_i64(fp0
);
10465 check_cp1_64bitmode(ctx
);
10466 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10468 TCGv_i64 fp0
= tcg_temp_new_i64();
10469 gen_load_fpr64(ctx
, fp0
, fs
);
10470 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10471 tcg_temp_free_i64(fp0
);
10478 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10479 int fd
, int fr
, int fs
, int ft
)
10485 TCGv t0
= tcg_temp_local_new();
10486 TCGv_i32 fp
= tcg_temp_new_i32();
10487 TCGv_i32 fph
= tcg_temp_new_i32();
10488 TCGLabel
*l1
= gen_new_label();
10489 TCGLabel
*l2
= gen_new_label();
10491 gen_load_gpr(t0
, fr
);
10492 tcg_gen_andi_tl(t0
, t0
, 0x7);
10494 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10495 gen_load_fpr32(ctx
, fp
, fs
);
10496 gen_load_fpr32h(ctx
, fph
, fs
);
10497 gen_store_fpr32(ctx
, fp
, fd
);
10498 gen_store_fpr32h(ctx
, fph
, fd
);
10501 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10503 #ifdef TARGET_WORDS_BIGENDIAN
10504 gen_load_fpr32(ctx
, fp
, fs
);
10505 gen_load_fpr32h(ctx
, fph
, ft
);
10506 gen_store_fpr32h(ctx
, fp
, fd
);
10507 gen_store_fpr32(ctx
, fph
, fd
);
10509 gen_load_fpr32h(ctx
, fph
, fs
);
10510 gen_load_fpr32(ctx
, fp
, ft
);
10511 gen_store_fpr32(ctx
, fph
, fd
);
10512 gen_store_fpr32h(ctx
, fp
, fd
);
10515 tcg_temp_free_i32(fp
);
10516 tcg_temp_free_i32(fph
);
10522 TCGv_i32 fp0
= tcg_temp_new_i32();
10523 TCGv_i32 fp1
= tcg_temp_new_i32();
10524 TCGv_i32 fp2
= tcg_temp_new_i32();
10526 gen_load_fpr32(ctx
, fp0
, fs
);
10527 gen_load_fpr32(ctx
, fp1
, ft
);
10528 gen_load_fpr32(ctx
, fp2
, fr
);
10529 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10530 tcg_temp_free_i32(fp0
);
10531 tcg_temp_free_i32(fp1
);
10532 gen_store_fpr32(ctx
, fp2
, fd
);
10533 tcg_temp_free_i32(fp2
);
10538 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10540 TCGv_i64 fp0
= tcg_temp_new_i64();
10541 TCGv_i64 fp1
= tcg_temp_new_i64();
10542 TCGv_i64 fp2
= tcg_temp_new_i64();
10544 gen_load_fpr64(ctx
, fp0
, fs
);
10545 gen_load_fpr64(ctx
, fp1
, ft
);
10546 gen_load_fpr64(ctx
, fp2
, fr
);
10547 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10548 tcg_temp_free_i64(fp0
);
10549 tcg_temp_free_i64(fp1
);
10550 gen_store_fpr64(ctx
, fp2
, fd
);
10551 tcg_temp_free_i64(fp2
);
10557 TCGv_i64 fp0
= tcg_temp_new_i64();
10558 TCGv_i64 fp1
= tcg_temp_new_i64();
10559 TCGv_i64 fp2
= tcg_temp_new_i64();
10561 gen_load_fpr64(ctx
, fp0
, fs
);
10562 gen_load_fpr64(ctx
, fp1
, ft
);
10563 gen_load_fpr64(ctx
, fp2
, fr
);
10564 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10565 tcg_temp_free_i64(fp0
);
10566 tcg_temp_free_i64(fp1
);
10567 gen_store_fpr64(ctx
, fp2
, fd
);
10568 tcg_temp_free_i64(fp2
);
10574 TCGv_i32 fp0
= tcg_temp_new_i32();
10575 TCGv_i32 fp1
= tcg_temp_new_i32();
10576 TCGv_i32 fp2
= tcg_temp_new_i32();
10578 gen_load_fpr32(ctx
, fp0
, fs
);
10579 gen_load_fpr32(ctx
, fp1
, ft
);
10580 gen_load_fpr32(ctx
, fp2
, fr
);
10581 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10582 tcg_temp_free_i32(fp0
);
10583 tcg_temp_free_i32(fp1
);
10584 gen_store_fpr32(ctx
, fp2
, fd
);
10585 tcg_temp_free_i32(fp2
);
10590 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10592 TCGv_i64 fp0
= tcg_temp_new_i64();
10593 TCGv_i64 fp1
= tcg_temp_new_i64();
10594 TCGv_i64 fp2
= tcg_temp_new_i64();
10596 gen_load_fpr64(ctx
, fp0
, fs
);
10597 gen_load_fpr64(ctx
, fp1
, ft
);
10598 gen_load_fpr64(ctx
, fp2
, fr
);
10599 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10600 tcg_temp_free_i64(fp0
);
10601 tcg_temp_free_i64(fp1
);
10602 gen_store_fpr64(ctx
, fp2
, fd
);
10603 tcg_temp_free_i64(fp2
);
10609 TCGv_i64 fp0
= tcg_temp_new_i64();
10610 TCGv_i64 fp1
= tcg_temp_new_i64();
10611 TCGv_i64 fp2
= tcg_temp_new_i64();
10613 gen_load_fpr64(ctx
, fp0
, fs
);
10614 gen_load_fpr64(ctx
, fp1
, ft
);
10615 gen_load_fpr64(ctx
, fp2
, fr
);
10616 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10617 tcg_temp_free_i64(fp0
);
10618 tcg_temp_free_i64(fp1
);
10619 gen_store_fpr64(ctx
, fp2
, fd
);
10620 tcg_temp_free_i64(fp2
);
10626 TCGv_i32 fp0
= tcg_temp_new_i32();
10627 TCGv_i32 fp1
= tcg_temp_new_i32();
10628 TCGv_i32 fp2
= tcg_temp_new_i32();
10630 gen_load_fpr32(ctx
, fp0
, fs
);
10631 gen_load_fpr32(ctx
, fp1
, ft
);
10632 gen_load_fpr32(ctx
, fp2
, fr
);
10633 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10634 tcg_temp_free_i32(fp0
);
10635 tcg_temp_free_i32(fp1
);
10636 gen_store_fpr32(ctx
, fp2
, fd
);
10637 tcg_temp_free_i32(fp2
);
10642 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10644 TCGv_i64 fp0
= tcg_temp_new_i64();
10645 TCGv_i64 fp1
= tcg_temp_new_i64();
10646 TCGv_i64 fp2
= tcg_temp_new_i64();
10648 gen_load_fpr64(ctx
, fp0
, fs
);
10649 gen_load_fpr64(ctx
, fp1
, ft
);
10650 gen_load_fpr64(ctx
, fp2
, fr
);
10651 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10652 tcg_temp_free_i64(fp0
);
10653 tcg_temp_free_i64(fp1
);
10654 gen_store_fpr64(ctx
, fp2
, fd
);
10655 tcg_temp_free_i64(fp2
);
10661 TCGv_i64 fp0
= tcg_temp_new_i64();
10662 TCGv_i64 fp1
= tcg_temp_new_i64();
10663 TCGv_i64 fp2
= tcg_temp_new_i64();
10665 gen_load_fpr64(ctx
, fp0
, fs
);
10666 gen_load_fpr64(ctx
, fp1
, ft
);
10667 gen_load_fpr64(ctx
, fp2
, fr
);
10668 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10669 tcg_temp_free_i64(fp0
);
10670 tcg_temp_free_i64(fp1
);
10671 gen_store_fpr64(ctx
, fp2
, fd
);
10672 tcg_temp_free_i64(fp2
);
10678 TCGv_i32 fp0
= tcg_temp_new_i32();
10679 TCGv_i32 fp1
= tcg_temp_new_i32();
10680 TCGv_i32 fp2
= tcg_temp_new_i32();
10682 gen_load_fpr32(ctx
, fp0
, fs
);
10683 gen_load_fpr32(ctx
, fp1
, ft
);
10684 gen_load_fpr32(ctx
, fp2
, fr
);
10685 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10686 tcg_temp_free_i32(fp0
);
10687 tcg_temp_free_i32(fp1
);
10688 gen_store_fpr32(ctx
, fp2
, fd
);
10689 tcg_temp_free_i32(fp2
);
10694 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10696 TCGv_i64 fp0
= tcg_temp_new_i64();
10697 TCGv_i64 fp1
= tcg_temp_new_i64();
10698 TCGv_i64 fp2
= tcg_temp_new_i64();
10700 gen_load_fpr64(ctx
, fp0
, fs
);
10701 gen_load_fpr64(ctx
, fp1
, ft
);
10702 gen_load_fpr64(ctx
, fp2
, fr
);
10703 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10704 tcg_temp_free_i64(fp0
);
10705 tcg_temp_free_i64(fp1
);
10706 gen_store_fpr64(ctx
, fp2
, fd
);
10707 tcg_temp_free_i64(fp2
);
10713 TCGv_i64 fp0
= tcg_temp_new_i64();
10714 TCGv_i64 fp1
= tcg_temp_new_i64();
10715 TCGv_i64 fp2
= tcg_temp_new_i64();
10717 gen_load_fpr64(ctx
, fp0
, fs
);
10718 gen_load_fpr64(ctx
, fp1
, ft
);
10719 gen_load_fpr64(ctx
, fp2
, fr
);
10720 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10721 tcg_temp_free_i64(fp0
);
10722 tcg_temp_free_i64(fp1
);
10723 gen_store_fpr64(ctx
, fp2
, fd
);
10724 tcg_temp_free_i64(fp2
);
10728 MIPS_INVAL("flt3_arith");
10729 generate_exception_end(ctx
, EXCP_RI
);
10734 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10738 #if !defined(CONFIG_USER_ONLY)
10739 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10740 Therefore only check the ISA in system mode. */
10741 check_insn(ctx
, ISA_MIPS32R2
);
10743 t0
= tcg_temp_new();
10747 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10748 gen_store_gpr(t0
, rt
);
10751 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10752 gen_store_gpr(t0
, rt
);
10755 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10758 gen_helper_rdhwr_cc(t0
, cpu_env
);
10759 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
10762 gen_store_gpr(t0
, rt
);
10763 /* Break the TB to be able to take timer interrupts immediately
10764 after reading count. DISAS_STOP isn't sufficient, we need to ensure
10765 we break completely out of translated code. */
10766 gen_save_pc(ctx
->base
.pc_next
+ 4);
10767 ctx
->base
.is_jmp
= DISAS_EXIT
;
10770 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10771 gen_store_gpr(t0
, rt
);
10774 check_insn(ctx
, ISA_MIPS32R6
);
10776 /* Performance counter registers are not implemented other than
10777 * control register 0.
10779 generate_exception(ctx
, EXCP_RI
);
10781 gen_helper_rdhwr_performance(t0
, cpu_env
);
10782 gen_store_gpr(t0
, rt
);
10785 check_insn(ctx
, ISA_MIPS32R6
);
10786 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10787 gen_store_gpr(t0
, rt
);
10790 #if defined(CONFIG_USER_ONLY)
10791 tcg_gen_ld_tl(t0
, cpu_env
,
10792 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10793 gen_store_gpr(t0
, rt
);
10796 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10797 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10798 tcg_gen_ld_tl(t0
, cpu_env
,
10799 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10800 gen_store_gpr(t0
, rt
);
10802 generate_exception_end(ctx
, EXCP_RI
);
10806 default: /* Invalid */
10807 MIPS_INVAL("rdhwr");
10808 generate_exception_end(ctx
, EXCP_RI
);
10814 static inline void clear_branch_hflags(DisasContext
*ctx
)
10816 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10817 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
10818 save_cpu_state(ctx
, 0);
10820 /* it is not safe to save ctx->hflags as hflags may be changed
10821 in execution time by the instruction in delay / forbidden slot. */
10822 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10826 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10828 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10829 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10830 /* Branches completion */
10831 clear_branch_hflags(ctx
);
10832 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10833 /* FIXME: Need to clear can_do_io. */
10834 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10835 case MIPS_HFLAG_FBNSLOT
:
10836 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
10839 /* unconditional branch */
10840 if (proc_hflags
& MIPS_HFLAG_BX
) {
10841 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10843 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10845 case MIPS_HFLAG_BL
:
10846 /* blikely taken case */
10847 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10849 case MIPS_HFLAG_BC
:
10850 /* Conditional branch */
10852 TCGLabel
*l1
= gen_new_label();
10854 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10855 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
10857 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10860 case MIPS_HFLAG_BR
:
10861 /* unconditional branch to register */
10862 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10863 TCGv t0
= tcg_temp_new();
10864 TCGv_i32 t1
= tcg_temp_new_i32();
10866 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10867 tcg_gen_trunc_tl_i32(t1
, t0
);
10869 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10870 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10871 tcg_gen_or_i32(hflags
, hflags
, t1
);
10872 tcg_temp_free_i32(t1
);
10874 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10876 tcg_gen_mov_tl(cpu_PC
, btarget
);
10878 if (ctx
->base
.singlestep_enabled
) {
10879 save_cpu_state(ctx
, 0);
10880 gen_helper_raise_exception_debug(cpu_env
);
10882 tcg_gen_lookup_and_goto_ptr();
10885 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10891 /* Compact Branches */
10892 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10893 int rs
, int rt
, int32_t offset
)
10895 int bcond_compute
= 0;
10896 TCGv t0
= tcg_temp_new();
10897 TCGv t1
= tcg_temp_new();
10898 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10900 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10901 #ifdef MIPS_DEBUG_DISAS
10902 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10903 "\n", ctx
->base
.pc_next
);
10905 generate_exception_end(ctx
, EXCP_RI
);
10909 /* Load needed operands and calculate btarget */
10911 /* compact branch */
10912 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10913 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10914 gen_load_gpr(t0
, rs
);
10915 gen_load_gpr(t1
, rt
);
10917 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10918 if (rs
<= rt
&& rs
== 0) {
10919 /* OPC_BEQZALC, OPC_BNEZALC */
10920 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10923 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10924 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10925 gen_load_gpr(t0
, rs
);
10926 gen_load_gpr(t1
, rt
);
10928 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10930 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10931 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10932 if (rs
== 0 || rs
== rt
) {
10933 /* OPC_BLEZALC, OPC_BGEZALC */
10934 /* OPC_BGTZALC, OPC_BLTZALC */
10935 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10937 gen_load_gpr(t0
, rs
);
10938 gen_load_gpr(t1
, rt
);
10940 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10944 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10949 /* OPC_BEQZC, OPC_BNEZC */
10950 gen_load_gpr(t0
, rs
);
10952 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10954 /* OPC_JIC, OPC_JIALC */
10955 TCGv tbase
= tcg_temp_new();
10956 TCGv toffset
= tcg_temp_new();
10958 gen_load_gpr(tbase
, rt
);
10959 tcg_gen_movi_tl(toffset
, offset
);
10960 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10961 tcg_temp_free(tbase
);
10962 tcg_temp_free(toffset
);
10966 MIPS_INVAL("Compact branch/jump");
10967 generate_exception_end(ctx
, EXCP_RI
);
10971 if (bcond_compute
== 0) {
10972 /* Uncoditional compact branch */
10975 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10978 ctx
->hflags
|= MIPS_HFLAG_BR
;
10981 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
10984 ctx
->hflags
|= MIPS_HFLAG_B
;
10987 MIPS_INVAL("Compact branch/jump");
10988 generate_exception_end(ctx
, EXCP_RI
);
10992 /* Generating branch here as compact branches don't have delay slot */
10993 gen_branch(ctx
, 4);
10995 /* Conditional compact branch */
10996 TCGLabel
*fs
= gen_new_label();
10997 save_cpu_state(ctx
, 0);
11000 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11001 if (rs
== 0 && rt
!= 0) {
11003 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11004 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11006 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11009 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11012 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11013 if (rs
== 0 && rt
!= 0) {
11015 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11016 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11018 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11021 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11024 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11025 if (rs
== 0 && rt
!= 0) {
11027 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11028 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11030 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11033 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11036 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11037 if (rs
== 0 && rt
!= 0) {
11039 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11040 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11042 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11045 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11048 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11049 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11051 /* OPC_BOVC, OPC_BNVC */
11052 TCGv t2
= tcg_temp_new();
11053 TCGv t3
= tcg_temp_new();
11054 TCGv t4
= tcg_temp_new();
11055 TCGv input_overflow
= tcg_temp_new();
11057 gen_load_gpr(t0
, rs
);
11058 gen_load_gpr(t1
, rt
);
11059 tcg_gen_ext32s_tl(t2
, t0
);
11060 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11061 tcg_gen_ext32s_tl(t3
, t1
);
11062 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11063 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11065 tcg_gen_add_tl(t4
, t2
, t3
);
11066 tcg_gen_ext32s_tl(t4
, t4
);
11067 tcg_gen_xor_tl(t2
, t2
, t3
);
11068 tcg_gen_xor_tl(t3
, t4
, t3
);
11069 tcg_gen_andc_tl(t2
, t3
, t2
);
11070 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11071 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11072 if (opc
== OPC_BOVC
) {
11074 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11077 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11079 tcg_temp_free(input_overflow
);
11083 } else if (rs
< rt
&& rs
== 0) {
11084 /* OPC_BEQZALC, OPC_BNEZALC */
11085 if (opc
== OPC_BEQZALC
) {
11087 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11090 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11093 /* OPC_BEQC, OPC_BNEC */
11094 if (opc
== OPC_BEQC
) {
11096 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11099 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11104 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11107 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11110 MIPS_INVAL("Compact conditional branch/jump");
11111 generate_exception_end(ctx
, EXCP_RI
);
11115 /* Generating branch here as compact branches don't have delay slot */
11116 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11119 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11127 /* ISA extensions (ASEs) */
11128 /* MIPS16 extension to MIPS32 */
11130 /* MIPS16 major opcodes */
11132 M16_OPC_ADDIUSP
= 0x00,
11133 M16_OPC_ADDIUPC
= 0x01,
11135 M16_OPC_JAL
= 0x03,
11136 M16_OPC_BEQZ
= 0x04,
11137 M16_OPC_BNEQZ
= 0x05,
11138 M16_OPC_SHIFT
= 0x06,
11140 M16_OPC_RRIA
= 0x08,
11141 M16_OPC_ADDIU8
= 0x09,
11142 M16_OPC_SLTI
= 0x0a,
11143 M16_OPC_SLTIU
= 0x0b,
11146 M16_OPC_CMPI
= 0x0e,
11150 M16_OPC_LWSP
= 0x12,
11152 M16_OPC_LBU
= 0x14,
11153 M16_OPC_LHU
= 0x15,
11154 M16_OPC_LWPC
= 0x16,
11155 M16_OPC_LWU
= 0x17,
11158 M16_OPC_SWSP
= 0x1a,
11160 M16_OPC_RRR
= 0x1c,
11162 M16_OPC_EXTEND
= 0x1e,
11166 /* I8 funct field */
11185 /* RR funct field */
11219 /* I64 funct field */
11227 I64_DADDIUPC
= 0x6,
11231 /* RR ry field for CNVT */
11233 RR_RY_CNVT_ZEB
= 0x0,
11234 RR_RY_CNVT_ZEH
= 0x1,
11235 RR_RY_CNVT_ZEW
= 0x2,
11236 RR_RY_CNVT_SEB
= 0x4,
11237 RR_RY_CNVT_SEH
= 0x5,
11238 RR_RY_CNVT_SEW
= 0x6,
11241 static int xlat (int r
)
11243 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11248 static void gen_mips16_save (DisasContext
*ctx
,
11249 int xsregs
, int aregs
,
11250 int do_ra
, int do_s0
, int do_s1
,
11253 TCGv t0
= tcg_temp_new();
11254 TCGv t1
= tcg_temp_new();
11255 TCGv t2
= tcg_temp_new();
11285 generate_exception_end(ctx
, EXCP_RI
);
11291 gen_base_offset_addr(ctx
, t0
, 29, 12);
11292 gen_load_gpr(t1
, 7);
11293 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11296 gen_base_offset_addr(ctx
, t0
, 29, 8);
11297 gen_load_gpr(t1
, 6);
11298 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11301 gen_base_offset_addr(ctx
, t0
, 29, 4);
11302 gen_load_gpr(t1
, 5);
11303 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11306 gen_base_offset_addr(ctx
, t0
, 29, 0);
11307 gen_load_gpr(t1
, 4);
11308 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11311 gen_load_gpr(t0
, 29);
11313 #define DECR_AND_STORE(reg) do { \
11314 tcg_gen_movi_tl(t2, -4); \
11315 gen_op_addr_add(ctx, t0, t0, t2); \
11316 gen_load_gpr(t1, reg); \
11317 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11321 DECR_AND_STORE(31);
11326 DECR_AND_STORE(30);
11329 DECR_AND_STORE(23);
11332 DECR_AND_STORE(22);
11335 DECR_AND_STORE(21);
11338 DECR_AND_STORE(20);
11341 DECR_AND_STORE(19);
11344 DECR_AND_STORE(18);
11348 DECR_AND_STORE(17);
11351 DECR_AND_STORE(16);
11381 generate_exception_end(ctx
, EXCP_RI
);
11397 #undef DECR_AND_STORE
11399 tcg_gen_movi_tl(t2
, -framesize
);
11400 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11406 static void gen_mips16_restore (DisasContext
*ctx
,
11407 int xsregs
, int aregs
,
11408 int do_ra
, int do_s0
, int do_s1
,
11412 TCGv t0
= tcg_temp_new();
11413 TCGv t1
= tcg_temp_new();
11414 TCGv t2
= tcg_temp_new();
11416 tcg_gen_movi_tl(t2
, framesize
);
11417 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11419 #define DECR_AND_LOAD(reg) do { \
11420 tcg_gen_movi_tl(t2, -4); \
11421 gen_op_addr_add(ctx, t0, t0, t2); \
11422 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11423 gen_store_gpr(t1, reg); \
11487 generate_exception_end(ctx
, EXCP_RI
);
11503 #undef DECR_AND_LOAD
11505 tcg_gen_movi_tl(t2
, framesize
);
11506 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11512 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11513 int is_64_bit
, int extended
)
11517 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11518 generate_exception_end(ctx
, EXCP_RI
);
11522 t0
= tcg_temp_new();
11524 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11525 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11527 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11533 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11536 TCGv_i32 t0
= tcg_const_i32(op
);
11537 TCGv t1
= tcg_temp_new();
11538 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11539 gen_helper_cache(cpu_env
, t1
, t0
);
11542 #if defined(TARGET_MIPS64)
11543 static void decode_i64_mips16 (DisasContext
*ctx
,
11544 int ry
, int funct
, int16_t offset
,
11549 check_insn(ctx
, ISA_MIPS3
);
11550 check_mips_64(ctx
);
11551 offset
= extended
? offset
: offset
<< 3;
11552 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11555 check_insn(ctx
, ISA_MIPS3
);
11556 check_mips_64(ctx
);
11557 offset
= extended
? offset
: offset
<< 3;
11558 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11561 check_insn(ctx
, ISA_MIPS3
);
11562 check_mips_64(ctx
);
11563 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11564 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11567 check_insn(ctx
, ISA_MIPS3
);
11568 check_mips_64(ctx
);
11569 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11570 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11573 check_insn(ctx
, ISA_MIPS3
);
11574 check_mips_64(ctx
);
11575 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11576 generate_exception_end(ctx
, EXCP_RI
);
11578 offset
= extended
? offset
: offset
<< 3;
11579 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11583 check_insn(ctx
, ISA_MIPS3
);
11584 check_mips_64(ctx
);
11585 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11586 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11589 check_insn(ctx
, ISA_MIPS3
);
11590 check_mips_64(ctx
);
11591 offset
= extended
? offset
: offset
<< 2;
11592 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11595 check_insn(ctx
, ISA_MIPS3
);
11596 check_mips_64(ctx
);
11597 offset
= extended
? offset
: offset
<< 2;
11598 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11604 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11606 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11607 int op
, rx
, ry
, funct
, sa
;
11608 int16_t imm
, offset
;
11610 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11611 op
= (ctx
->opcode
>> 11) & 0x1f;
11612 sa
= (ctx
->opcode
>> 22) & 0x1f;
11613 funct
= (ctx
->opcode
>> 8) & 0x7;
11614 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11615 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11616 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11617 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11618 | (ctx
->opcode
& 0x1f));
11620 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11623 case M16_OPC_ADDIUSP
:
11624 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11626 case M16_OPC_ADDIUPC
:
11627 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11630 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11631 /* No delay slot, so just process as a normal instruction */
11634 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11635 /* No delay slot, so just process as a normal instruction */
11637 case M16_OPC_BNEQZ
:
11638 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11639 /* No delay slot, so just process as a normal instruction */
11641 case M16_OPC_SHIFT
:
11642 switch (ctx
->opcode
& 0x3) {
11644 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11647 #if defined(TARGET_MIPS64)
11648 check_mips_64(ctx
);
11649 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11651 generate_exception_end(ctx
, EXCP_RI
);
11655 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11658 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11662 #if defined(TARGET_MIPS64)
11664 check_insn(ctx
, ISA_MIPS3
);
11665 check_mips_64(ctx
);
11666 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11670 imm
= ctx
->opcode
& 0xf;
11671 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11672 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11673 imm
= (int16_t) (imm
<< 1) >> 1;
11674 if ((ctx
->opcode
>> 4) & 0x1) {
11675 #if defined(TARGET_MIPS64)
11676 check_mips_64(ctx
);
11677 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11679 generate_exception_end(ctx
, EXCP_RI
);
11682 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11685 case M16_OPC_ADDIU8
:
11686 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11689 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11691 case M16_OPC_SLTIU
:
11692 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11697 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11700 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11703 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11706 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11709 check_insn(ctx
, ISA_MIPS32
);
11711 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11712 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11713 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11714 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11715 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11716 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11717 | (ctx
->opcode
& 0xf)) << 3;
11719 if (ctx
->opcode
& (1 << 7)) {
11720 gen_mips16_save(ctx
, xsregs
, aregs
,
11721 do_ra
, do_s0
, do_s1
,
11724 gen_mips16_restore(ctx
, xsregs
, aregs
,
11725 do_ra
, do_s0
, do_s1
,
11731 generate_exception_end(ctx
, EXCP_RI
);
11736 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11739 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11741 #if defined(TARGET_MIPS64)
11743 check_insn(ctx
, ISA_MIPS3
);
11744 check_mips_64(ctx
);
11745 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11749 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11752 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11755 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11758 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11761 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11764 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11767 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11769 #if defined(TARGET_MIPS64)
11771 check_insn(ctx
, ISA_MIPS3
);
11772 check_mips_64(ctx
);
11773 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11777 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11780 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11783 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11786 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11788 #if defined(TARGET_MIPS64)
11790 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11794 generate_exception_end(ctx
, EXCP_RI
);
11801 static inline bool is_uhi(int sdbbp_code
)
11803 #ifdef CONFIG_USER_ONLY
11806 return semihosting_enabled() && sdbbp_code
== 1;
11810 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11814 int op
, cnvt_op
, op1
, offset
;
11818 op
= (ctx
->opcode
>> 11) & 0x1f;
11819 sa
= (ctx
->opcode
>> 2) & 0x7;
11820 sa
= sa
== 0 ? 8 : sa
;
11821 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11822 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11823 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11824 op1
= offset
= ctx
->opcode
& 0x1f;
11829 case M16_OPC_ADDIUSP
:
11831 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11833 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11836 case M16_OPC_ADDIUPC
:
11837 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11840 offset
= (ctx
->opcode
& 0x7ff) << 1;
11841 offset
= (int16_t)(offset
<< 4) >> 4;
11842 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11843 /* No delay slot, so just process as a normal instruction */
11846 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
11847 offset
= (((ctx
->opcode
& 0x1f) << 21)
11848 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11850 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11851 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11855 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11856 ((int8_t)ctx
->opcode
) << 1, 0);
11857 /* No delay slot, so just process as a normal instruction */
11859 case M16_OPC_BNEQZ
:
11860 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11861 ((int8_t)ctx
->opcode
) << 1, 0);
11862 /* No delay slot, so just process as a normal instruction */
11864 case M16_OPC_SHIFT
:
11865 switch (ctx
->opcode
& 0x3) {
11867 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11870 #if defined(TARGET_MIPS64)
11871 check_insn(ctx
, ISA_MIPS3
);
11872 check_mips_64(ctx
);
11873 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11875 generate_exception_end(ctx
, EXCP_RI
);
11879 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11882 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11886 #if defined(TARGET_MIPS64)
11888 check_insn(ctx
, ISA_MIPS3
);
11889 check_mips_64(ctx
);
11890 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11895 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11897 if ((ctx
->opcode
>> 4) & 1) {
11898 #if defined(TARGET_MIPS64)
11899 check_insn(ctx
, ISA_MIPS3
);
11900 check_mips_64(ctx
);
11901 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11903 generate_exception_end(ctx
, EXCP_RI
);
11906 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11910 case M16_OPC_ADDIU8
:
11912 int16_t imm
= (int8_t) ctx
->opcode
;
11914 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11919 int16_t imm
= (uint8_t) ctx
->opcode
;
11920 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11923 case M16_OPC_SLTIU
:
11925 int16_t imm
= (uint8_t) ctx
->opcode
;
11926 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11933 funct
= (ctx
->opcode
>> 8) & 0x7;
11936 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11937 ((int8_t)ctx
->opcode
) << 1, 0);
11940 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11941 ((int8_t)ctx
->opcode
) << 1, 0);
11944 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11947 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11948 ((int8_t)ctx
->opcode
) << 3);
11951 check_insn(ctx
, ISA_MIPS32
);
11953 int do_ra
= ctx
->opcode
& (1 << 6);
11954 int do_s0
= ctx
->opcode
& (1 << 5);
11955 int do_s1
= ctx
->opcode
& (1 << 4);
11956 int framesize
= ctx
->opcode
& 0xf;
11958 if (framesize
== 0) {
11961 framesize
= framesize
<< 3;
11964 if (ctx
->opcode
& (1 << 7)) {
11965 gen_mips16_save(ctx
, 0, 0,
11966 do_ra
, do_s0
, do_s1
, framesize
);
11968 gen_mips16_restore(ctx
, 0, 0,
11969 do_ra
, do_s0
, do_s1
, framesize
);
11975 int rz
= xlat(ctx
->opcode
& 0x7);
11977 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11978 ((ctx
->opcode
>> 5) & 0x7);
11979 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11983 reg32
= ctx
->opcode
& 0x1f;
11984 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11987 generate_exception_end(ctx
, EXCP_RI
);
11994 int16_t imm
= (uint8_t) ctx
->opcode
;
11996 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
12001 int16_t imm
= (uint8_t) ctx
->opcode
;
12002 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
12005 #if defined(TARGET_MIPS64)
12007 check_insn(ctx
, ISA_MIPS3
);
12008 check_mips_64(ctx
);
12009 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
12013 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
12016 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12019 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12022 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12025 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12028 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12031 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12033 #if defined (TARGET_MIPS64)
12035 check_insn(ctx
, ISA_MIPS3
);
12036 check_mips_64(ctx
);
12037 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12041 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12044 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12047 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12050 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12054 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12057 switch (ctx
->opcode
& 0x3) {
12059 mips32_op
= OPC_ADDU
;
12062 mips32_op
= OPC_SUBU
;
12064 #if defined(TARGET_MIPS64)
12066 mips32_op
= OPC_DADDU
;
12067 check_insn(ctx
, ISA_MIPS3
);
12068 check_mips_64(ctx
);
12071 mips32_op
= OPC_DSUBU
;
12072 check_insn(ctx
, ISA_MIPS3
);
12073 check_mips_64(ctx
);
12077 generate_exception_end(ctx
, EXCP_RI
);
12081 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12090 int nd
= (ctx
->opcode
>> 7) & 0x1;
12091 int link
= (ctx
->opcode
>> 6) & 0x1;
12092 int ra
= (ctx
->opcode
>> 5) & 0x1;
12095 check_insn(ctx
, ISA_MIPS32
);
12104 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12109 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12110 gen_helper_do_semihosting(cpu_env
);
12112 /* XXX: not clear which exception should be raised
12113 * when in debug mode...
12115 check_insn(ctx
, ISA_MIPS32
);
12116 generate_exception_end(ctx
, EXCP_DBp
);
12120 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12123 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12126 generate_exception_end(ctx
, EXCP_BREAK
);
12129 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12132 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12135 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12137 #if defined (TARGET_MIPS64)
12139 check_insn(ctx
, ISA_MIPS3
);
12140 check_mips_64(ctx
);
12141 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12145 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12148 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12151 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12154 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12157 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12160 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12163 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12166 check_insn(ctx
, ISA_MIPS32
);
12168 case RR_RY_CNVT_ZEB
:
12169 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12171 case RR_RY_CNVT_ZEH
:
12172 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12174 case RR_RY_CNVT_SEB
:
12175 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12177 case RR_RY_CNVT_SEH
:
12178 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12180 #if defined (TARGET_MIPS64)
12181 case RR_RY_CNVT_ZEW
:
12182 check_insn(ctx
, ISA_MIPS64
);
12183 check_mips_64(ctx
);
12184 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12186 case RR_RY_CNVT_SEW
:
12187 check_insn(ctx
, ISA_MIPS64
);
12188 check_mips_64(ctx
);
12189 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12193 generate_exception_end(ctx
, EXCP_RI
);
12198 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12200 #if defined (TARGET_MIPS64)
12202 check_insn(ctx
, ISA_MIPS3
);
12203 check_mips_64(ctx
);
12204 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12207 check_insn(ctx
, ISA_MIPS3
);
12208 check_mips_64(ctx
);
12209 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12212 check_insn(ctx
, ISA_MIPS3
);
12213 check_mips_64(ctx
);
12214 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12217 check_insn(ctx
, ISA_MIPS3
);
12218 check_mips_64(ctx
);
12219 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12223 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12226 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12229 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12232 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12234 #if defined (TARGET_MIPS64)
12236 check_insn(ctx
, ISA_MIPS3
);
12237 check_mips_64(ctx
);
12238 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12241 check_insn(ctx
, ISA_MIPS3
);
12242 check_mips_64(ctx
);
12243 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12246 check_insn(ctx
, ISA_MIPS3
);
12247 check_mips_64(ctx
);
12248 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12251 check_insn(ctx
, ISA_MIPS3
);
12252 check_mips_64(ctx
);
12253 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12257 generate_exception_end(ctx
, EXCP_RI
);
12261 case M16_OPC_EXTEND
:
12262 decode_extended_mips16_opc(env
, ctx
);
12265 #if defined(TARGET_MIPS64)
12267 funct
= (ctx
->opcode
>> 8) & 0x7;
12268 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12272 generate_exception_end(ctx
, EXCP_RI
);
12279 /* microMIPS extension to MIPS32/MIPS64 */
12282 * microMIPS32/microMIPS64 major opcodes
12284 * 1. MIPS Architecture for Programmers Volume II-B:
12285 * The microMIPS32 Instruction Set (Revision 3.05)
12287 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12289 * 2. MIPS Architecture For Programmers Volume II-A:
12290 * The MIPS64 Instruction Set (Revision 3.51)
12320 POOL32S
= 0x16, /* MIPS64 */
12321 DADDIU32
= 0x17, /* MIPS64 */
12350 /* 0x29 is reserved */
12363 /* 0x31 is reserved */
12376 SD32
= 0x36, /* MIPS64 */
12377 LD32
= 0x37, /* MIPS64 */
12379 /* 0x39 is reserved */
12395 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12405 /* POOL32A encoding of minor opcode field */
12408 /* These opcodes are distinguished only by bits 9..6; those bits are
12409 * what are recorded below. */
12446 /* The following can be distinguished by their lower 6 bits. */
12456 /* POOL32AXF encoding of minor opcode field extension */
12459 * 1. MIPS Architecture for Programmers Volume II-B:
12460 * The microMIPS32 Instruction Set (Revision 3.05)
12462 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12464 * 2. MIPS Architecture for Programmers VolumeIV-e:
12465 * The MIPS DSP Application-Specific Extension
12466 * to the microMIPS32 Architecture (Revision 2.34)
12468 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12483 /* begin of microMIPS32 DSP */
12485 /* bits 13..12 for 0x01 */
12491 /* bits 13..12 for 0x2a */
12497 /* bits 13..12 for 0x32 */
12501 /* end of microMIPS32 DSP */
12503 /* bits 15..12 for 0x2c */
12520 /* bits 15..12 for 0x34 */
12528 /* bits 15..12 for 0x3c */
12530 JR
= 0x0, /* alias */
12538 /* bits 15..12 for 0x05 */
12542 /* bits 15..12 for 0x0d */
12554 /* bits 15..12 for 0x15 */
12560 /* bits 15..12 for 0x1d */
12564 /* bits 15..12 for 0x2d */
12569 /* bits 15..12 for 0x35 */
12576 /* POOL32B encoding of minor opcode field (bits 15..12) */
12592 /* POOL32C encoding of minor opcode field (bits 15..12) */
12613 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12626 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12639 /* POOL32F encoding of minor opcode field (bits 5..0) */
12642 /* These are the bit 7..6 values */
12651 /* These are the bit 8..6 values */
12676 MOVZ_FMT_05
= 0x05,
12710 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12717 /* POOL32Fxf encoding of minor opcode extension field */
12755 /* POOL32I encoding of minor opcode field (bits 25..21) */
12785 /* These overlap and are distinguished by bit16 of the instruction */
12794 /* POOL16A encoding of minor opcode field */
12801 /* POOL16B encoding of minor opcode field */
12808 /* POOL16C encoding of minor opcode field */
12828 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12848 /* POOL16D encoding of minor opcode field */
12855 /* POOL16E encoding of minor opcode field */
12862 static int mmreg (int r
)
12864 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12869 /* Used for 16-bit store instructions. */
12870 static int mmreg2 (int r
)
12872 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12877 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12878 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12879 #define uMIPS_RS2(op) uMIPS_RS(op)
12880 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12881 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12882 #define uMIPS_RS5(op) (op & 0x1f)
12884 /* Signed immediate */
12885 #define SIMM(op, start, width) \
12886 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12889 /* Zero-extended immediate */
12890 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12892 static void gen_addiur1sp(DisasContext
*ctx
)
12894 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12896 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12899 static void gen_addiur2(DisasContext
*ctx
)
12901 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12902 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12903 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12905 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12908 static void gen_addiusp(DisasContext
*ctx
)
12910 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12913 if (encoded
<= 1) {
12914 decoded
= 256 + encoded
;
12915 } else if (encoded
<= 255) {
12917 } else if (encoded
<= 509) {
12918 decoded
= encoded
- 512;
12920 decoded
= encoded
- 768;
12923 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12926 static void gen_addius5(DisasContext
*ctx
)
12928 int imm
= SIMM(ctx
->opcode
, 1, 4);
12929 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12931 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12934 static void gen_andi16(DisasContext
*ctx
)
12936 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12937 31, 32, 63, 64, 255, 32768, 65535 };
12938 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12939 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12940 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12942 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12945 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12946 int base
, int16_t offset
)
12951 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12952 generate_exception_end(ctx
, EXCP_RI
);
12956 t0
= tcg_temp_new();
12958 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12960 t1
= tcg_const_tl(reglist
);
12961 t2
= tcg_const_i32(ctx
->mem_idx
);
12963 save_cpu_state(ctx
, 1);
12966 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12969 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12971 #ifdef TARGET_MIPS64
12973 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12976 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12982 tcg_temp_free_i32(t2
);
12986 static void gen_pool16c_insn(DisasContext
*ctx
)
12988 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12989 int rs
= mmreg(ctx
->opcode
& 0x7);
12991 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12996 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
13002 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
13008 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
13014 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13021 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13022 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13024 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13033 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13034 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13036 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13043 int reg
= ctx
->opcode
& 0x1f;
13045 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13051 int reg
= ctx
->opcode
& 0x1f;
13052 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13053 /* Let normal delay slot handling in our caller take us
13054 to the branch target. */
13059 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13060 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13064 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13065 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13069 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13073 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13076 generate_exception_end(ctx
, EXCP_BREAK
);
13079 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13080 gen_helper_do_semihosting(cpu_env
);
13082 /* XXX: not clear which exception should be raised
13083 * when in debug mode...
13085 check_insn(ctx
, ISA_MIPS32
);
13086 generate_exception_end(ctx
, EXCP_DBp
);
13089 case JRADDIUSP
+ 0:
13090 case JRADDIUSP
+ 1:
13092 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13093 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13094 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13095 /* Let normal delay slot handling in our caller take us
13096 to the branch target. */
13100 generate_exception_end(ctx
, EXCP_RI
);
13105 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13108 int rd
, rs
, re
, rt
;
13109 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13110 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13111 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13112 rd
= rd_enc
[enc_dest
];
13113 re
= re_enc
[enc_dest
];
13114 rs
= rs_rt_enc
[enc_rs
];
13115 rt
= rs_rt_enc
[enc_rt
];
13117 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13119 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13122 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13124 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13128 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13130 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13131 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13133 switch (ctx
->opcode
& 0xf) {
13135 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13138 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13142 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13143 int offset
= extract32(ctx
->opcode
, 4, 4);
13144 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13147 case R6_JRC16
: /* JRCADDIUSP */
13148 if ((ctx
->opcode
>> 4) & 1) {
13150 int imm
= extract32(ctx
->opcode
, 5, 5);
13151 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13152 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13155 int rs
= extract32(ctx
->opcode
, 5, 5);
13156 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13159 case MOVEP
... MOVEP_07
:
13160 case MOVEP_0C
... MOVEP_0F
:
13162 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13163 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13164 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13165 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13169 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13172 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13176 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13177 int offset
= extract32(ctx
->opcode
, 4, 4);
13178 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13181 case JALRC16
: /* BREAK16, SDBBP16 */
13182 switch (ctx
->opcode
& 0x3f) {
13184 case JALRC16
+ 0x20:
13186 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13191 generate_exception(ctx
, EXCP_BREAK
);
13195 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13196 gen_helper_do_semihosting(cpu_env
);
13198 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13199 generate_exception(ctx
, EXCP_RI
);
13201 generate_exception(ctx
, EXCP_DBp
);
13208 generate_exception(ctx
, EXCP_RI
);
13213 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13215 TCGv t0
= tcg_temp_new();
13216 TCGv t1
= tcg_temp_new();
13218 gen_load_gpr(t0
, base
);
13221 gen_load_gpr(t1
, index
);
13222 tcg_gen_shli_tl(t1
, t1
, 2);
13223 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13226 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13227 gen_store_gpr(t1
, rd
);
13233 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13234 int base
, int16_t offset
)
13238 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13239 generate_exception_end(ctx
, EXCP_RI
);
13243 t0
= tcg_temp_new();
13244 t1
= tcg_temp_new();
13246 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13251 generate_exception_end(ctx
, EXCP_RI
);
13254 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13255 gen_store_gpr(t1
, rd
);
13256 tcg_gen_movi_tl(t1
, 4);
13257 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13258 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13259 gen_store_gpr(t1
, rd
+1);
13262 gen_load_gpr(t1
, rd
);
13263 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13264 tcg_gen_movi_tl(t1
, 4);
13265 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13266 gen_load_gpr(t1
, rd
+1);
13267 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13269 #ifdef TARGET_MIPS64
13272 generate_exception_end(ctx
, EXCP_RI
);
13275 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13276 gen_store_gpr(t1
, rd
);
13277 tcg_gen_movi_tl(t1
, 8);
13278 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13279 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13280 gen_store_gpr(t1
, rd
+1);
13283 gen_load_gpr(t1
, rd
);
13284 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13285 tcg_gen_movi_tl(t1
, 8);
13286 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13287 gen_load_gpr(t1
, rd
+1);
13288 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13296 static void gen_sync(int stype
)
13298 TCGBar tcg_mo
= TCG_BAR_SC
;
13301 case 0x4: /* SYNC_WMB */
13302 tcg_mo
|= TCG_MO_ST_ST
;
13304 case 0x10: /* SYNC_MB */
13305 tcg_mo
|= TCG_MO_ALL
;
13307 case 0x11: /* SYNC_ACQUIRE */
13308 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13310 case 0x12: /* SYNC_RELEASE */
13311 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13313 case 0x13: /* SYNC_RMB */
13314 tcg_mo
|= TCG_MO_LD_LD
;
13317 tcg_mo
|= TCG_MO_ALL
;
13321 tcg_gen_mb(tcg_mo
);
13324 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13326 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13327 int minor
= (ctx
->opcode
>> 12) & 0xf;
13328 uint32_t mips32_op
;
13330 switch (extension
) {
13332 mips32_op
= OPC_TEQ
;
13335 mips32_op
= OPC_TGE
;
13338 mips32_op
= OPC_TGEU
;
13341 mips32_op
= OPC_TLT
;
13344 mips32_op
= OPC_TLTU
;
13347 mips32_op
= OPC_TNE
;
13349 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13351 #ifndef CONFIG_USER_ONLY
13354 check_cp0_enabled(ctx
);
13356 /* Treat as NOP. */
13359 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13363 check_cp0_enabled(ctx
);
13365 TCGv t0
= tcg_temp_new();
13367 gen_load_gpr(t0
, rt
);
13368 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13374 switch (minor
& 3) {
13376 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13379 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13382 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13385 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13388 goto pool32axf_invalid
;
13392 switch (minor
& 3) {
13394 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13397 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13400 goto pool32axf_invalid
;
13406 check_insn(ctx
, ISA_MIPS32R6
);
13407 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13410 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13413 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13416 mips32_op
= OPC_CLO
;
13419 mips32_op
= OPC_CLZ
;
13421 check_insn(ctx
, ISA_MIPS32
);
13422 gen_cl(ctx
, mips32_op
, rt
, rs
);
13425 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13426 gen_rdhwr(ctx
, rt
, rs
, 0);
13429 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13433 mips32_op
= OPC_MULT
;
13436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13437 mips32_op
= OPC_MULTU
;
13440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13441 mips32_op
= OPC_DIV
;
13444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13445 mips32_op
= OPC_DIVU
;
13448 check_insn(ctx
, ISA_MIPS32
);
13449 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13453 mips32_op
= OPC_MADD
;
13456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13457 mips32_op
= OPC_MADDU
;
13460 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13461 mips32_op
= OPC_MSUB
;
13464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13465 mips32_op
= OPC_MSUBU
;
13467 check_insn(ctx
, ISA_MIPS32
);
13468 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13471 goto pool32axf_invalid
;
13482 generate_exception_err(ctx
, EXCP_CpU
, 2);
13485 goto pool32axf_invalid
;
13490 case JALR
: /* JALRC */
13491 case JALR_HB
: /* JALRC_HB */
13492 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13493 /* JALRC, JALRC_HB */
13494 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13496 /* JALR, JALR_HB */
13497 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13498 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13503 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13504 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13505 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13508 goto pool32axf_invalid
;
13514 check_cp0_enabled(ctx
);
13515 check_insn(ctx
, ISA_MIPS32R2
);
13516 gen_load_srsgpr(rs
, rt
);
13519 check_cp0_enabled(ctx
);
13520 check_insn(ctx
, ISA_MIPS32R2
);
13521 gen_store_srsgpr(rs
, rt
);
13524 goto pool32axf_invalid
;
13527 #ifndef CONFIG_USER_ONLY
13531 mips32_op
= OPC_TLBP
;
13534 mips32_op
= OPC_TLBR
;
13537 mips32_op
= OPC_TLBWI
;
13540 mips32_op
= OPC_TLBWR
;
13543 mips32_op
= OPC_TLBINV
;
13546 mips32_op
= OPC_TLBINVF
;
13549 mips32_op
= OPC_WAIT
;
13552 mips32_op
= OPC_DERET
;
13555 mips32_op
= OPC_ERET
;
13557 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13560 goto pool32axf_invalid
;
13566 check_cp0_enabled(ctx
);
13568 TCGv t0
= tcg_temp_new();
13570 save_cpu_state(ctx
, 1);
13571 gen_helper_di(t0
, cpu_env
);
13572 gen_store_gpr(t0
, rs
);
13573 /* Stop translation as we may have switched the execution mode */
13574 ctx
->base
.is_jmp
= DISAS_STOP
;
13579 check_cp0_enabled(ctx
);
13581 TCGv t0
= tcg_temp_new();
13583 save_cpu_state(ctx
, 1);
13584 gen_helper_ei(t0
, cpu_env
);
13585 gen_store_gpr(t0
, rs
);
13586 /* DISAS_STOP isn't sufficient, we need to ensure we break out
13587 of translated code to check for pending interrupts. */
13588 gen_save_pc(ctx
->base
.pc_next
+ 4);
13589 ctx
->base
.is_jmp
= DISAS_EXIT
;
13594 goto pool32axf_invalid
;
13601 gen_sync(extract32(ctx
->opcode
, 16, 5));
13604 generate_exception_end(ctx
, EXCP_SYSCALL
);
13607 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13608 gen_helper_do_semihosting(cpu_env
);
13610 check_insn(ctx
, ISA_MIPS32
);
13611 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13612 generate_exception_end(ctx
, EXCP_RI
);
13614 generate_exception_end(ctx
, EXCP_DBp
);
13619 goto pool32axf_invalid
;
13623 switch (minor
& 3) {
13625 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13628 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13631 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13634 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13637 goto pool32axf_invalid
;
13641 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13644 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13647 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13650 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13653 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13656 goto pool32axf_invalid
;
13661 MIPS_INVAL("pool32axf");
13662 generate_exception_end(ctx
, EXCP_RI
);
13667 /* Values for microMIPS fmt field. Variable-width, depending on which
13668 formats the instruction supports. */
13687 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13689 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13690 uint32_t mips32_op
;
13692 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13693 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13694 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13696 switch (extension
) {
13697 case FLOAT_1BIT_FMT(CFC1
, 0):
13698 mips32_op
= OPC_CFC1
;
13700 case FLOAT_1BIT_FMT(CTC1
, 0):
13701 mips32_op
= OPC_CTC1
;
13703 case FLOAT_1BIT_FMT(MFC1
, 0):
13704 mips32_op
= OPC_MFC1
;
13706 case FLOAT_1BIT_FMT(MTC1
, 0):
13707 mips32_op
= OPC_MTC1
;
13709 case FLOAT_1BIT_FMT(MFHC1
, 0):
13710 mips32_op
= OPC_MFHC1
;
13712 case FLOAT_1BIT_FMT(MTHC1
, 0):
13713 mips32_op
= OPC_MTHC1
;
13715 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13718 /* Reciprocal square root */
13719 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13720 mips32_op
= OPC_RSQRT_S
;
13722 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13723 mips32_op
= OPC_RSQRT_D
;
13727 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13728 mips32_op
= OPC_SQRT_S
;
13730 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13731 mips32_op
= OPC_SQRT_D
;
13735 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13736 mips32_op
= OPC_RECIP_S
;
13738 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13739 mips32_op
= OPC_RECIP_D
;
13743 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13744 mips32_op
= OPC_FLOOR_L_S
;
13746 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13747 mips32_op
= OPC_FLOOR_L_D
;
13749 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13750 mips32_op
= OPC_FLOOR_W_S
;
13752 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13753 mips32_op
= OPC_FLOOR_W_D
;
13757 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13758 mips32_op
= OPC_CEIL_L_S
;
13760 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13761 mips32_op
= OPC_CEIL_L_D
;
13763 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13764 mips32_op
= OPC_CEIL_W_S
;
13766 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13767 mips32_op
= OPC_CEIL_W_D
;
13771 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13772 mips32_op
= OPC_TRUNC_L_S
;
13774 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13775 mips32_op
= OPC_TRUNC_L_D
;
13777 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13778 mips32_op
= OPC_TRUNC_W_S
;
13780 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13781 mips32_op
= OPC_TRUNC_W_D
;
13785 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13786 mips32_op
= OPC_ROUND_L_S
;
13788 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13789 mips32_op
= OPC_ROUND_L_D
;
13791 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13792 mips32_op
= OPC_ROUND_W_S
;
13794 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13795 mips32_op
= OPC_ROUND_W_D
;
13798 /* Integer to floating-point conversion */
13799 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13800 mips32_op
= OPC_CVT_L_S
;
13802 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13803 mips32_op
= OPC_CVT_L_D
;
13805 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13806 mips32_op
= OPC_CVT_W_S
;
13808 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13809 mips32_op
= OPC_CVT_W_D
;
13812 /* Paired-foo conversions */
13813 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13814 mips32_op
= OPC_CVT_S_PL
;
13816 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13817 mips32_op
= OPC_CVT_S_PU
;
13819 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13820 mips32_op
= OPC_CVT_PW_PS
;
13822 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13823 mips32_op
= OPC_CVT_PS_PW
;
13826 /* Floating-point moves */
13827 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13828 mips32_op
= OPC_MOV_S
;
13830 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13831 mips32_op
= OPC_MOV_D
;
13833 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13834 mips32_op
= OPC_MOV_PS
;
13837 /* Absolute value */
13838 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13839 mips32_op
= OPC_ABS_S
;
13841 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13842 mips32_op
= OPC_ABS_D
;
13844 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13845 mips32_op
= OPC_ABS_PS
;
13849 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13850 mips32_op
= OPC_NEG_S
;
13852 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13853 mips32_op
= OPC_NEG_D
;
13855 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13856 mips32_op
= OPC_NEG_PS
;
13859 /* Reciprocal square root step */
13860 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13861 mips32_op
= OPC_RSQRT1_S
;
13863 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13864 mips32_op
= OPC_RSQRT1_D
;
13866 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13867 mips32_op
= OPC_RSQRT1_PS
;
13870 /* Reciprocal step */
13871 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13872 mips32_op
= OPC_RECIP1_S
;
13874 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13875 mips32_op
= OPC_RECIP1_S
;
13877 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13878 mips32_op
= OPC_RECIP1_PS
;
13881 /* Conversions from double */
13882 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13883 mips32_op
= OPC_CVT_D_S
;
13885 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13886 mips32_op
= OPC_CVT_D_W
;
13888 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13889 mips32_op
= OPC_CVT_D_L
;
13892 /* Conversions from single */
13893 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13894 mips32_op
= OPC_CVT_S_D
;
13896 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13897 mips32_op
= OPC_CVT_S_W
;
13899 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13900 mips32_op
= OPC_CVT_S_L
;
13902 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13905 /* Conditional moves on floating-point codes */
13906 case COND_FLOAT_MOV(MOVT
, 0):
13907 case COND_FLOAT_MOV(MOVT
, 1):
13908 case COND_FLOAT_MOV(MOVT
, 2):
13909 case COND_FLOAT_MOV(MOVT
, 3):
13910 case COND_FLOAT_MOV(MOVT
, 4):
13911 case COND_FLOAT_MOV(MOVT
, 5):
13912 case COND_FLOAT_MOV(MOVT
, 6):
13913 case COND_FLOAT_MOV(MOVT
, 7):
13914 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13915 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13917 case COND_FLOAT_MOV(MOVF
, 0):
13918 case COND_FLOAT_MOV(MOVF
, 1):
13919 case COND_FLOAT_MOV(MOVF
, 2):
13920 case COND_FLOAT_MOV(MOVF
, 3):
13921 case COND_FLOAT_MOV(MOVF
, 4):
13922 case COND_FLOAT_MOV(MOVF
, 5):
13923 case COND_FLOAT_MOV(MOVF
, 6):
13924 case COND_FLOAT_MOV(MOVF
, 7):
13925 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13926 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13929 MIPS_INVAL("pool32fxf");
13930 generate_exception_end(ctx
, EXCP_RI
);
13935 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13939 int rt
, rs
, rd
, rr
;
13941 uint32_t op
, minor
, minor2
, mips32_op
;
13942 uint32_t cond
, fmt
, cc
;
13944 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13945 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13947 rt
= (ctx
->opcode
>> 21) & 0x1f;
13948 rs
= (ctx
->opcode
>> 16) & 0x1f;
13949 rd
= (ctx
->opcode
>> 11) & 0x1f;
13950 rr
= (ctx
->opcode
>> 6) & 0x1f;
13951 imm
= (int16_t) ctx
->opcode
;
13953 op
= (ctx
->opcode
>> 26) & 0x3f;
13956 minor
= ctx
->opcode
& 0x3f;
13959 minor
= (ctx
->opcode
>> 6) & 0xf;
13962 mips32_op
= OPC_SLL
;
13965 mips32_op
= OPC_SRA
;
13968 mips32_op
= OPC_SRL
;
13971 mips32_op
= OPC_ROTR
;
13973 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13976 check_insn(ctx
, ISA_MIPS32R6
);
13977 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13980 check_insn(ctx
, ISA_MIPS32R6
);
13981 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13984 check_insn(ctx
, ISA_MIPS32R6
);
13985 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13988 goto pool32a_invalid
;
13992 minor
= (ctx
->opcode
>> 6) & 0xf;
13996 mips32_op
= OPC_ADD
;
13999 mips32_op
= OPC_ADDU
;
14002 mips32_op
= OPC_SUB
;
14005 mips32_op
= OPC_SUBU
;
14008 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14009 mips32_op
= OPC_MUL
;
14011 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
14015 mips32_op
= OPC_SLLV
;
14018 mips32_op
= OPC_SRLV
;
14021 mips32_op
= OPC_SRAV
;
14024 mips32_op
= OPC_ROTRV
;
14026 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14028 /* Logical operations */
14030 mips32_op
= OPC_AND
;
14033 mips32_op
= OPC_OR
;
14036 mips32_op
= OPC_NOR
;
14039 mips32_op
= OPC_XOR
;
14041 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14043 /* Set less than */
14045 mips32_op
= OPC_SLT
;
14048 mips32_op
= OPC_SLTU
;
14050 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14053 goto pool32a_invalid
;
14057 minor
= (ctx
->opcode
>> 6) & 0xf;
14059 /* Conditional moves */
14060 case MOVN
: /* MUL */
14061 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14063 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14066 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14069 case MOVZ
: /* MUH */
14070 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14072 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14075 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14079 check_insn(ctx
, ISA_MIPS32R6
);
14080 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14083 check_insn(ctx
, ISA_MIPS32R6
);
14084 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14086 case LWXS
: /* DIV */
14087 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14089 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14092 gen_ldxs(ctx
, rs
, rt
, rd
);
14096 check_insn(ctx
, ISA_MIPS32R6
);
14097 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14100 check_insn(ctx
, ISA_MIPS32R6
);
14101 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14104 check_insn(ctx
, ISA_MIPS32R6
);
14105 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14108 goto pool32a_invalid
;
14112 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14115 check_insn(ctx
, ISA_MIPS32R6
);
14116 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14117 extract32(ctx
->opcode
, 9, 2));
14120 check_insn(ctx
, ISA_MIPS32R6
);
14121 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14122 extract32(ctx
->opcode
, 9, 2));
14125 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14128 gen_pool32axf(env
, ctx
, rt
, rs
);
14131 generate_exception_end(ctx
, EXCP_BREAK
);
14134 check_insn(ctx
, ISA_MIPS32R6
);
14135 generate_exception_end(ctx
, EXCP_RI
);
14139 MIPS_INVAL("pool32a");
14140 generate_exception_end(ctx
, EXCP_RI
);
14145 minor
= (ctx
->opcode
>> 12) & 0xf;
14148 check_cp0_enabled(ctx
);
14149 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14150 gen_cache_operation(ctx
, rt
, rs
, imm
);
14155 /* COP2: Not implemented. */
14156 generate_exception_err(ctx
, EXCP_CpU
, 2);
14158 #ifdef TARGET_MIPS64
14161 check_insn(ctx
, ISA_MIPS3
);
14162 check_mips_64(ctx
);
14167 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14169 #ifdef TARGET_MIPS64
14172 check_insn(ctx
, ISA_MIPS3
);
14173 check_mips_64(ctx
);
14178 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14181 MIPS_INVAL("pool32b");
14182 generate_exception_end(ctx
, EXCP_RI
);
14187 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14188 minor
= ctx
->opcode
& 0x3f;
14189 check_cp1_enabled(ctx
);
14192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14193 mips32_op
= OPC_ALNV_PS
;
14196 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14197 mips32_op
= OPC_MADD_S
;
14200 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14201 mips32_op
= OPC_MADD_D
;
14204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14205 mips32_op
= OPC_MADD_PS
;
14208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14209 mips32_op
= OPC_MSUB_S
;
14212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14213 mips32_op
= OPC_MSUB_D
;
14216 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14217 mips32_op
= OPC_MSUB_PS
;
14220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14221 mips32_op
= OPC_NMADD_S
;
14224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14225 mips32_op
= OPC_NMADD_D
;
14228 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14229 mips32_op
= OPC_NMADD_PS
;
14232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14233 mips32_op
= OPC_NMSUB_S
;
14236 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14237 mips32_op
= OPC_NMSUB_D
;
14240 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14241 mips32_op
= OPC_NMSUB_PS
;
14243 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14245 case CABS_COND_FMT
:
14246 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14247 cond
= (ctx
->opcode
>> 6) & 0xf;
14248 cc
= (ctx
->opcode
>> 13) & 0x7;
14249 fmt
= (ctx
->opcode
>> 10) & 0x3;
14252 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14255 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14258 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14261 goto pool32f_invalid
;
14265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14266 cond
= (ctx
->opcode
>> 6) & 0xf;
14267 cc
= (ctx
->opcode
>> 13) & 0x7;
14268 fmt
= (ctx
->opcode
>> 10) & 0x3;
14271 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14274 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14277 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14280 goto pool32f_invalid
;
14284 check_insn(ctx
, ISA_MIPS32R6
);
14285 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14288 check_insn(ctx
, ISA_MIPS32R6
);
14289 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14292 gen_pool32fxf(ctx
, rt
, rs
);
14296 switch ((ctx
->opcode
>> 6) & 0x7) {
14298 mips32_op
= OPC_PLL_PS
;
14301 mips32_op
= OPC_PLU_PS
;
14304 mips32_op
= OPC_PUL_PS
;
14307 mips32_op
= OPC_PUU_PS
;
14310 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14311 mips32_op
= OPC_CVT_PS_S
;
14313 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14316 goto pool32f_invalid
;
14320 check_insn(ctx
, ISA_MIPS32R6
);
14321 switch ((ctx
->opcode
>> 9) & 0x3) {
14323 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14326 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14329 goto pool32f_invalid
;
14334 switch ((ctx
->opcode
>> 6) & 0x7) {
14336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14337 mips32_op
= OPC_LWXC1
;
14340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14341 mips32_op
= OPC_SWXC1
;
14344 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14345 mips32_op
= OPC_LDXC1
;
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 mips32_op
= OPC_SDXC1
;
14352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14353 mips32_op
= OPC_LUXC1
;
14356 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14357 mips32_op
= OPC_SUXC1
;
14359 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14362 goto pool32f_invalid
;
14366 check_insn(ctx
, ISA_MIPS32R6
);
14367 switch ((ctx
->opcode
>> 9) & 0x3) {
14369 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14372 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14375 goto pool32f_invalid
;
14380 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14381 fmt
= (ctx
->opcode
>> 9) & 0x3;
14382 switch ((ctx
->opcode
>> 6) & 0x7) {
14386 mips32_op
= OPC_RSQRT2_S
;
14389 mips32_op
= OPC_RSQRT2_D
;
14392 mips32_op
= OPC_RSQRT2_PS
;
14395 goto pool32f_invalid
;
14401 mips32_op
= OPC_RECIP2_S
;
14404 mips32_op
= OPC_RECIP2_D
;
14407 mips32_op
= OPC_RECIP2_PS
;
14410 goto pool32f_invalid
;
14414 mips32_op
= OPC_ADDR_PS
;
14417 mips32_op
= OPC_MULR_PS
;
14419 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14422 goto pool32f_invalid
;
14426 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14427 cc
= (ctx
->opcode
>> 13) & 0x7;
14428 fmt
= (ctx
->opcode
>> 9) & 0x3;
14429 switch ((ctx
->opcode
>> 6) & 0x7) {
14430 case MOVF_FMT
: /* RINT_FMT */
14431 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14435 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14438 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14441 goto pool32f_invalid
;
14447 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14450 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14454 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14457 goto pool32f_invalid
;
14461 case MOVT_FMT
: /* CLASS_FMT */
14462 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14466 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14469 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14472 goto pool32f_invalid
;
14478 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14481 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14485 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14488 goto pool32f_invalid
;
14493 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14496 goto pool32f_invalid
;
14499 #define FINSN_3ARG_SDPS(prfx) \
14500 switch ((ctx->opcode >> 8) & 0x3) { \
14502 mips32_op = OPC_##prfx##_S; \
14505 mips32_op = OPC_##prfx##_D; \
14507 case FMT_SDPS_PS: \
14509 mips32_op = OPC_##prfx##_PS; \
14512 goto pool32f_invalid; \
14515 check_insn(ctx
, ISA_MIPS32R6
);
14516 switch ((ctx
->opcode
>> 9) & 0x3) {
14518 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14521 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14524 goto pool32f_invalid
;
14528 check_insn(ctx
, ISA_MIPS32R6
);
14529 switch ((ctx
->opcode
>> 9) & 0x3) {
14531 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14534 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14537 goto pool32f_invalid
;
14541 /* regular FP ops */
14542 switch ((ctx
->opcode
>> 6) & 0x3) {
14544 FINSN_3ARG_SDPS(ADD
);
14547 FINSN_3ARG_SDPS(SUB
);
14550 FINSN_3ARG_SDPS(MUL
);
14553 fmt
= (ctx
->opcode
>> 8) & 0x3;
14555 mips32_op
= OPC_DIV_D
;
14556 } else if (fmt
== 0) {
14557 mips32_op
= OPC_DIV_S
;
14559 goto pool32f_invalid
;
14563 goto pool32f_invalid
;
14568 switch ((ctx
->opcode
>> 6) & 0x7) {
14569 case MOVN_FMT
: /* SELNEZ_FMT */
14570 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14572 switch ((ctx
->opcode
>> 9) & 0x3) {
14574 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14577 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14580 goto pool32f_invalid
;
14584 FINSN_3ARG_SDPS(MOVN
);
14588 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14589 FINSN_3ARG_SDPS(MOVN
);
14591 case MOVZ_FMT
: /* SELEQZ_FMT */
14592 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14594 switch ((ctx
->opcode
>> 9) & 0x3) {
14596 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14599 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14602 goto pool32f_invalid
;
14606 FINSN_3ARG_SDPS(MOVZ
);
14610 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14611 FINSN_3ARG_SDPS(MOVZ
);
14614 check_insn(ctx
, ISA_MIPS32R6
);
14615 switch ((ctx
->opcode
>> 9) & 0x3) {
14617 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14620 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14623 goto pool32f_invalid
;
14627 check_insn(ctx
, ISA_MIPS32R6
);
14628 switch ((ctx
->opcode
>> 9) & 0x3) {
14630 mips32_op
= OPC_MADDF_S
;
14633 mips32_op
= OPC_MADDF_D
;
14636 goto pool32f_invalid
;
14640 check_insn(ctx
, ISA_MIPS32R6
);
14641 switch ((ctx
->opcode
>> 9) & 0x3) {
14643 mips32_op
= OPC_MSUBF_S
;
14646 mips32_op
= OPC_MSUBF_D
;
14649 goto pool32f_invalid
;
14653 goto pool32f_invalid
;
14657 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14661 MIPS_INVAL("pool32f");
14662 generate_exception_end(ctx
, EXCP_RI
);
14666 generate_exception_err(ctx
, EXCP_CpU
, 1);
14670 minor
= (ctx
->opcode
>> 21) & 0x1f;
14673 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14674 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14678 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14679 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14682 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14683 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14684 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14687 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14688 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14691 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14692 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14693 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14696 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14697 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14698 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14701 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14702 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14705 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14706 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14710 case TLTI
: /* BC1EQZC */
14711 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14713 check_cp1_enabled(ctx
);
14714 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14717 mips32_op
= OPC_TLTI
;
14721 case TGEI
: /* BC1NEZC */
14722 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14724 check_cp1_enabled(ctx
);
14725 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14728 mips32_op
= OPC_TGEI
;
14733 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14734 mips32_op
= OPC_TLTIU
;
14737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14738 mips32_op
= OPC_TGEIU
;
14740 case TNEI
: /* SYNCI */
14741 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14743 /* Break the TB to be able to sync copied instructions
14745 ctx
->base
.is_jmp
= DISAS_STOP
;
14748 mips32_op
= OPC_TNEI
;
14753 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14754 mips32_op
= OPC_TEQI
;
14756 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14761 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14762 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14763 4, rs
, 0, imm
<< 1, 0);
14764 /* Compact branches don't have a delay slot, so just let
14765 the normal delay slot handling take us to the branch
14769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14770 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14774 /* Break the TB to be able to sync copied instructions
14776 ctx
->base
.is_jmp
= DISAS_STOP
;
14780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14781 /* COP2: Not implemented. */
14782 generate_exception_err(ctx
, EXCP_CpU
, 2);
14785 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14786 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14789 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14790 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14793 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14794 mips32_op
= OPC_BC1FANY4
;
14797 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14798 mips32_op
= OPC_BC1TANY4
;
14801 check_insn(ctx
, ASE_MIPS3D
);
14804 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14805 check_cp1_enabled(ctx
);
14806 gen_compute_branch1(ctx
, mips32_op
,
14807 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14809 generate_exception_err(ctx
, EXCP_CpU
, 1);
14814 /* MIPS DSP: not implemented */
14817 MIPS_INVAL("pool32i");
14818 generate_exception_end(ctx
, EXCP_RI
);
14823 minor
= (ctx
->opcode
>> 12) & 0xf;
14824 offset
= sextract32(ctx
->opcode
, 0,
14825 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14828 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14829 mips32_op
= OPC_LWL
;
14832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14833 mips32_op
= OPC_SWL
;
14836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14837 mips32_op
= OPC_LWR
;
14840 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14841 mips32_op
= OPC_SWR
;
14843 #if defined(TARGET_MIPS64)
14845 check_insn(ctx
, ISA_MIPS3
);
14846 check_mips_64(ctx
);
14847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14848 mips32_op
= OPC_LDL
;
14851 check_insn(ctx
, ISA_MIPS3
);
14852 check_mips_64(ctx
);
14853 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14854 mips32_op
= OPC_SDL
;
14857 check_insn(ctx
, ISA_MIPS3
);
14858 check_mips_64(ctx
);
14859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14860 mips32_op
= OPC_LDR
;
14863 check_insn(ctx
, ISA_MIPS3
);
14864 check_mips_64(ctx
);
14865 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14866 mips32_op
= OPC_SDR
;
14869 check_insn(ctx
, ISA_MIPS3
);
14870 check_mips_64(ctx
);
14871 mips32_op
= OPC_LWU
;
14874 check_insn(ctx
, ISA_MIPS3
);
14875 check_mips_64(ctx
);
14876 mips32_op
= OPC_LLD
;
14880 mips32_op
= OPC_LL
;
14883 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14886 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
14889 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14891 #if defined(TARGET_MIPS64)
14893 check_insn(ctx
, ISA_MIPS3
);
14894 check_mips_64(ctx
);
14895 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14900 MIPS_INVAL("pool32c ld-eva");
14901 generate_exception_end(ctx
, EXCP_RI
);
14904 check_cp0_enabled(ctx
);
14906 minor2
= (ctx
->opcode
>> 9) & 0x7;
14907 offset
= sextract32(ctx
->opcode
, 0, 9);
14910 mips32_op
= OPC_LBUE
;
14913 mips32_op
= OPC_LHUE
;
14916 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14917 mips32_op
= OPC_LWLE
;
14920 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14921 mips32_op
= OPC_LWRE
;
14924 mips32_op
= OPC_LBE
;
14927 mips32_op
= OPC_LHE
;
14930 mips32_op
= OPC_LLE
;
14933 mips32_op
= OPC_LWE
;
14939 MIPS_INVAL("pool32c st-eva");
14940 generate_exception_end(ctx
, EXCP_RI
);
14943 check_cp0_enabled(ctx
);
14945 minor2
= (ctx
->opcode
>> 9) & 0x7;
14946 offset
= sextract32(ctx
->opcode
, 0, 9);
14949 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14950 mips32_op
= OPC_SWLE
;
14953 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14954 mips32_op
= OPC_SWRE
;
14957 /* Treat as no-op */
14958 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14959 /* hint codes 24-31 are reserved and signal RI */
14960 generate_exception(ctx
, EXCP_RI
);
14964 /* Treat as no-op */
14965 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14966 gen_cache_operation(ctx
, rt
, rs
, offset
);
14970 mips32_op
= OPC_SBE
;
14973 mips32_op
= OPC_SHE
;
14976 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
14979 mips32_op
= OPC_SWE
;
14984 /* Treat as no-op */
14985 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14986 /* hint codes 24-31 are reserved and signal RI */
14987 generate_exception(ctx
, EXCP_RI
);
14991 MIPS_INVAL("pool32c");
14992 generate_exception_end(ctx
, EXCP_RI
);
14996 case ADDI32
: /* AUI, LUI */
14997 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14999 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
15002 mips32_op
= OPC_ADDI
;
15007 mips32_op
= OPC_ADDIU
;
15009 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15012 /* Logical operations */
15014 mips32_op
= OPC_ORI
;
15017 mips32_op
= OPC_XORI
;
15020 mips32_op
= OPC_ANDI
;
15022 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15025 /* Set less than immediate */
15027 mips32_op
= OPC_SLTI
;
15030 mips32_op
= OPC_SLTIU
;
15032 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15036 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15037 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15038 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15040 case JALS32
: /* BOVC, BEQC, BEQZALC */
15041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15044 mips32_op
= OPC_BOVC
;
15045 } else if (rs
< rt
&& rs
== 0) {
15047 mips32_op
= OPC_BEQZALC
;
15050 mips32_op
= OPC_BEQC
;
15052 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15055 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15056 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15057 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15060 case BEQ32
: /* BC */
15061 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15063 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15064 sextract32(ctx
->opcode
<< 1, 0, 27));
15067 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15070 case BNE32
: /* BALC */
15071 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15073 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15074 sextract32(ctx
->opcode
<< 1, 0, 27));
15077 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15080 case J32
: /* BGTZC, BLTZC, BLTC */
15081 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15082 if (rs
== 0 && rt
!= 0) {
15084 mips32_op
= OPC_BGTZC
;
15085 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15087 mips32_op
= OPC_BLTZC
;
15090 mips32_op
= OPC_BLTC
;
15092 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15095 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15096 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15099 case JAL32
: /* BLEZC, BGEZC, BGEC */
15100 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15101 if (rs
== 0 && rt
!= 0) {
15103 mips32_op
= OPC_BLEZC
;
15104 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15106 mips32_op
= OPC_BGEZC
;
15109 mips32_op
= OPC_BGEC
;
15111 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15114 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15115 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15116 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15119 /* Floating point (COP1) */
15121 mips32_op
= OPC_LWC1
;
15124 mips32_op
= OPC_LDC1
;
15127 mips32_op
= OPC_SWC1
;
15130 mips32_op
= OPC_SDC1
;
15132 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15134 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15135 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15136 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15137 switch ((ctx
->opcode
>> 16) & 0x1f) {
15138 case ADDIUPC_00
... ADDIUPC_07
:
15139 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15142 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
15145 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
15147 case LWPC_08
... LWPC_0F
:
15148 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
15151 generate_exception(ctx
, EXCP_RI
);
15156 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15157 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15159 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15162 case BNVC
: /* BNEC, BNEZALC */
15163 check_insn(ctx
, ISA_MIPS32R6
);
15166 mips32_op
= OPC_BNVC
;
15167 } else if (rs
< rt
&& rs
== 0) {
15169 mips32_op
= OPC_BNEZALC
;
15172 mips32_op
= OPC_BNEC
;
15174 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15176 case R6_BNEZC
: /* JIALC */
15177 check_insn(ctx
, ISA_MIPS32R6
);
15180 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15181 sextract32(ctx
->opcode
<< 1, 0, 22));
15184 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15187 case R6_BEQZC
: /* JIC */
15188 check_insn(ctx
, ISA_MIPS32R6
);
15191 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15192 sextract32(ctx
->opcode
<< 1, 0, 22));
15195 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15198 case BLEZALC
: /* BGEZALC, BGEUC */
15199 check_insn(ctx
, ISA_MIPS32R6
);
15200 if (rs
== 0 && rt
!= 0) {
15202 mips32_op
= OPC_BLEZALC
;
15203 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15205 mips32_op
= OPC_BGEZALC
;
15208 mips32_op
= OPC_BGEUC
;
15210 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15212 case BGTZALC
: /* BLTZALC, BLTUC */
15213 check_insn(ctx
, ISA_MIPS32R6
);
15214 if (rs
== 0 && rt
!= 0) {
15216 mips32_op
= OPC_BGTZALC
;
15217 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15219 mips32_op
= OPC_BLTZALC
;
15222 mips32_op
= OPC_BLTUC
;
15224 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15226 /* Loads and stores */
15228 mips32_op
= OPC_LB
;
15231 mips32_op
= OPC_LBU
;
15234 mips32_op
= OPC_LH
;
15237 mips32_op
= OPC_LHU
;
15240 mips32_op
= OPC_LW
;
15242 #ifdef TARGET_MIPS64
15244 check_insn(ctx
, ISA_MIPS3
);
15245 check_mips_64(ctx
);
15246 mips32_op
= OPC_LD
;
15249 check_insn(ctx
, ISA_MIPS3
);
15250 check_mips_64(ctx
);
15251 mips32_op
= OPC_SD
;
15255 mips32_op
= OPC_SB
;
15258 mips32_op
= OPC_SH
;
15261 mips32_op
= OPC_SW
;
15264 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15267 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15270 generate_exception_end(ctx
, EXCP_RI
);
15275 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15279 /* make sure instructions are on a halfword boundary */
15280 if (ctx
->base
.pc_next
& 0x1) {
15281 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15282 generate_exception_end(ctx
, EXCP_AdEL
);
15286 op
= (ctx
->opcode
>> 10) & 0x3f;
15287 /* Enforce properly-sized instructions in a delay slot */
15288 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15289 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15291 /* POOL32A, POOL32B, POOL32I, POOL32C */
15293 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15295 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15297 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15299 /* LB32, LH32, LWC132, LDC132, LW32 */
15300 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15301 generate_exception_end(ctx
, EXCP_RI
);
15306 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15308 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15310 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15311 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15312 generate_exception_end(ctx
, EXCP_RI
);
15322 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15323 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15324 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15327 switch (ctx
->opcode
& 0x1) {
15335 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15336 /* In the Release 6 the register number location in
15337 * the instruction encoding has changed.
15339 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15341 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15347 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15348 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15349 int amount
= (ctx
->opcode
>> 1) & 0x7;
15351 amount
= amount
== 0 ? 8 : amount
;
15353 switch (ctx
->opcode
& 0x1) {
15362 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15366 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15367 gen_pool16c_r6_insn(ctx
);
15369 gen_pool16c_insn(ctx
);
15374 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15375 int rb
= 28; /* GP */
15376 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15378 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15382 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15383 if (ctx
->opcode
& 1) {
15384 generate_exception_end(ctx
, EXCP_RI
);
15387 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15388 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15389 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15390 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15395 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15396 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15397 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15398 offset
= (offset
== 0xf ? -1 : offset
);
15400 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15405 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15406 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15407 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15409 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15414 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15415 int rb
= 29; /* SP */
15416 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15418 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15423 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15424 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15425 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15427 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15432 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15433 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15434 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15436 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15441 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15442 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15443 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15445 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15450 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15451 int rb
= 29; /* SP */
15452 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15454 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15459 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15460 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15461 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15463 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15468 int rd
= uMIPS_RD5(ctx
->opcode
);
15469 int rs
= uMIPS_RS5(ctx
->opcode
);
15471 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15478 switch (ctx
->opcode
& 0x1) {
15488 switch (ctx
->opcode
& 0x1) {
15493 gen_addiur1sp(ctx
);
15497 case B16
: /* BC16 */
15498 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15499 sextract32(ctx
->opcode
, 0, 10) << 1,
15500 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15502 case BNEZ16
: /* BNEZC16 */
15503 case BEQZ16
: /* BEQZC16 */
15504 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15505 mmreg(uMIPS_RD(ctx
->opcode
)),
15506 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15507 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15512 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15513 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15515 imm
= (imm
== 0x7f ? -1 : imm
);
15516 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15522 generate_exception_end(ctx
, EXCP_RI
);
15525 decode_micromips32_opc(env
, ctx
);
15532 /* SmartMIPS extension to MIPS32 */
15534 #if defined(TARGET_MIPS64)
15536 /* MDMX extension to MIPS64 */
15540 /* MIPSDSP functions. */
15541 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15542 int rd
, int base
, int offset
)
15547 t0
= tcg_temp_new();
15550 gen_load_gpr(t0
, offset
);
15551 } else if (offset
== 0) {
15552 gen_load_gpr(t0
, base
);
15554 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15559 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15560 gen_store_gpr(t0
, rd
);
15563 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15564 gen_store_gpr(t0
, rd
);
15567 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15568 gen_store_gpr(t0
, rd
);
15570 #if defined(TARGET_MIPS64)
15572 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15573 gen_store_gpr(t0
, rd
);
15580 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15581 int ret
, int v1
, int v2
)
15587 /* Treat as NOP. */
15591 v1_t
= tcg_temp_new();
15592 v2_t
= tcg_temp_new();
15594 gen_load_gpr(v1_t
, v1
);
15595 gen_load_gpr(v2_t
, v2
);
15598 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15599 case OPC_MULT_G_2E
:
15603 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15605 case OPC_ADDUH_R_QB
:
15606 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15609 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15611 case OPC_ADDQH_R_PH
:
15612 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15615 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15617 case OPC_ADDQH_R_W
:
15618 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15621 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15623 case OPC_SUBUH_R_QB
:
15624 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15627 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15629 case OPC_SUBQH_R_PH
:
15630 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15633 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15635 case OPC_SUBQH_R_W
:
15636 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15640 case OPC_ABSQ_S_PH_DSP
:
15642 case OPC_ABSQ_S_QB
:
15644 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15646 case OPC_ABSQ_S_PH
:
15648 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15652 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15654 case OPC_PRECEQ_W_PHL
:
15656 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15657 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15659 case OPC_PRECEQ_W_PHR
:
15661 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15662 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15663 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15665 case OPC_PRECEQU_PH_QBL
:
15667 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15669 case OPC_PRECEQU_PH_QBR
:
15671 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15673 case OPC_PRECEQU_PH_QBLA
:
15675 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15677 case OPC_PRECEQU_PH_QBRA
:
15679 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15681 case OPC_PRECEU_PH_QBL
:
15683 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15685 case OPC_PRECEU_PH_QBR
:
15687 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15689 case OPC_PRECEU_PH_QBLA
:
15691 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15693 case OPC_PRECEU_PH_QBRA
:
15695 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15699 case OPC_ADDU_QB_DSP
:
15703 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15705 case OPC_ADDQ_S_PH
:
15707 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15711 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15715 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15717 case OPC_ADDU_S_QB
:
15719 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15723 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15725 case OPC_ADDU_S_PH
:
15727 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15731 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15733 case OPC_SUBQ_S_PH
:
15735 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15739 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15743 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15745 case OPC_SUBU_S_QB
:
15747 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15751 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15753 case OPC_SUBU_S_PH
:
15755 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15759 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15763 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15767 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15769 case OPC_RADDU_W_QB
:
15771 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15775 case OPC_CMPU_EQ_QB_DSP
:
15777 case OPC_PRECR_QB_PH
:
15779 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15781 case OPC_PRECRQ_QB_PH
:
15783 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15785 case OPC_PRECR_SRA_PH_W
:
15788 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15789 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15791 tcg_temp_free_i32(sa_t
);
15794 case OPC_PRECR_SRA_R_PH_W
:
15797 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15798 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15800 tcg_temp_free_i32(sa_t
);
15803 case OPC_PRECRQ_PH_W
:
15805 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15807 case OPC_PRECRQ_RS_PH_W
:
15809 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15811 case OPC_PRECRQU_S_QB_PH
:
15813 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15817 #ifdef TARGET_MIPS64
15818 case OPC_ABSQ_S_QH_DSP
:
15820 case OPC_PRECEQ_L_PWL
:
15822 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15824 case OPC_PRECEQ_L_PWR
:
15826 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15828 case OPC_PRECEQ_PW_QHL
:
15830 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15832 case OPC_PRECEQ_PW_QHR
:
15834 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15836 case OPC_PRECEQ_PW_QHLA
:
15838 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15840 case OPC_PRECEQ_PW_QHRA
:
15842 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15844 case OPC_PRECEQU_QH_OBL
:
15846 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15848 case OPC_PRECEQU_QH_OBR
:
15850 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15852 case OPC_PRECEQU_QH_OBLA
:
15854 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15856 case OPC_PRECEQU_QH_OBRA
:
15858 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15860 case OPC_PRECEU_QH_OBL
:
15862 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15864 case OPC_PRECEU_QH_OBR
:
15866 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15868 case OPC_PRECEU_QH_OBLA
:
15870 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15872 case OPC_PRECEU_QH_OBRA
:
15874 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15876 case OPC_ABSQ_S_OB
:
15878 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15880 case OPC_ABSQ_S_PW
:
15882 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15884 case OPC_ABSQ_S_QH
:
15886 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15890 case OPC_ADDU_OB_DSP
:
15892 case OPC_RADDU_L_OB
:
15894 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15898 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15900 case OPC_SUBQ_S_PW
:
15902 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15906 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15908 case OPC_SUBQ_S_QH
:
15910 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15914 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15916 case OPC_SUBU_S_OB
:
15918 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15922 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15924 case OPC_SUBU_S_QH
:
15926 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15930 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15932 case OPC_SUBUH_R_OB
:
15934 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15938 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15940 case OPC_ADDQ_S_PW
:
15942 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15946 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15948 case OPC_ADDQ_S_QH
:
15950 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15954 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15956 case OPC_ADDU_S_OB
:
15958 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15962 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15964 case OPC_ADDU_S_QH
:
15966 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15970 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15972 case OPC_ADDUH_R_OB
:
15974 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15978 case OPC_CMPU_EQ_OB_DSP
:
15980 case OPC_PRECR_OB_QH
:
15982 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15984 case OPC_PRECR_SRA_QH_PW
:
15987 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15988 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15989 tcg_temp_free_i32(ret_t
);
15992 case OPC_PRECR_SRA_R_QH_PW
:
15995 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15996 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15997 tcg_temp_free_i32(sa_v
);
16000 case OPC_PRECRQ_OB_QH
:
16002 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
16004 case OPC_PRECRQ_PW_L
:
16006 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
16008 case OPC_PRECRQ_QH_PW
:
16010 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16012 case OPC_PRECRQ_RS_QH_PW
:
16014 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16016 case OPC_PRECRQU_S_OB_QH
:
16018 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16025 tcg_temp_free(v1_t
);
16026 tcg_temp_free(v2_t
);
16029 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16030 int ret
, int v1
, int v2
)
16038 /* Treat as NOP. */
16042 t0
= tcg_temp_new();
16043 v1_t
= tcg_temp_new();
16044 v2_t
= tcg_temp_new();
16046 tcg_gen_movi_tl(t0
, v1
);
16047 gen_load_gpr(v1_t
, v1
);
16048 gen_load_gpr(v2_t
, v2
);
16051 case OPC_SHLL_QB_DSP
:
16053 op2
= MASK_SHLL_QB(ctx
->opcode
);
16057 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16061 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16065 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16069 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16071 case OPC_SHLL_S_PH
:
16073 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16075 case OPC_SHLLV_S_PH
:
16077 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16081 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16083 case OPC_SHLLV_S_W
:
16085 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16089 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16093 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16097 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16101 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16105 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16107 case OPC_SHRA_R_QB
:
16109 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16113 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16115 case OPC_SHRAV_R_QB
:
16117 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16121 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16123 case OPC_SHRA_R_PH
:
16125 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16129 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16131 case OPC_SHRAV_R_PH
:
16133 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16137 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16139 case OPC_SHRAV_R_W
:
16141 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16143 default: /* Invalid */
16144 MIPS_INVAL("MASK SHLL.QB");
16145 generate_exception_end(ctx
, EXCP_RI
);
16150 #ifdef TARGET_MIPS64
16151 case OPC_SHLL_OB_DSP
:
16152 op2
= MASK_SHLL_OB(ctx
->opcode
);
16156 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16160 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16162 case OPC_SHLL_S_PW
:
16164 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16166 case OPC_SHLLV_S_PW
:
16168 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16172 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16176 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16180 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16184 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16186 case OPC_SHLL_S_QH
:
16188 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16190 case OPC_SHLLV_S_QH
:
16192 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16196 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16200 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16202 case OPC_SHRA_R_OB
:
16204 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16206 case OPC_SHRAV_R_OB
:
16208 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16212 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16216 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16218 case OPC_SHRA_R_PW
:
16220 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16222 case OPC_SHRAV_R_PW
:
16224 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16228 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16232 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16234 case OPC_SHRA_R_QH
:
16236 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16238 case OPC_SHRAV_R_QH
:
16240 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16244 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16248 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16252 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16256 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16258 default: /* Invalid */
16259 MIPS_INVAL("MASK SHLL.OB");
16260 generate_exception_end(ctx
, EXCP_RI
);
16268 tcg_temp_free(v1_t
);
16269 tcg_temp_free(v2_t
);
16272 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16273 int ret
, int v1
, int v2
, int check_ret
)
16279 if ((ret
== 0) && (check_ret
== 1)) {
16280 /* Treat as NOP. */
16284 t0
= tcg_temp_new_i32();
16285 v1_t
= tcg_temp_new();
16286 v2_t
= tcg_temp_new();
16288 tcg_gen_movi_i32(t0
, ret
);
16289 gen_load_gpr(v1_t
, v1
);
16290 gen_load_gpr(v2_t
, v2
);
16293 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16294 * the same mask and op1. */
16295 case OPC_MULT_G_2E
:
16299 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16302 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16305 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16307 case OPC_MULQ_RS_W
:
16308 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16312 case OPC_DPA_W_PH_DSP
:
16314 case OPC_DPAU_H_QBL
:
16316 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16318 case OPC_DPAU_H_QBR
:
16320 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16322 case OPC_DPSU_H_QBL
:
16324 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16326 case OPC_DPSU_H_QBR
:
16328 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16332 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16334 case OPC_DPAX_W_PH
:
16336 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16338 case OPC_DPAQ_S_W_PH
:
16340 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16342 case OPC_DPAQX_S_W_PH
:
16344 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16346 case OPC_DPAQX_SA_W_PH
:
16348 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16352 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16354 case OPC_DPSX_W_PH
:
16356 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16358 case OPC_DPSQ_S_W_PH
:
16360 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16362 case OPC_DPSQX_S_W_PH
:
16364 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16366 case OPC_DPSQX_SA_W_PH
:
16368 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16370 case OPC_MULSAQ_S_W_PH
:
16372 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16374 case OPC_DPAQ_SA_L_W
:
16376 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16378 case OPC_DPSQ_SA_L_W
:
16380 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16382 case OPC_MAQ_S_W_PHL
:
16384 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16386 case OPC_MAQ_S_W_PHR
:
16388 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16390 case OPC_MAQ_SA_W_PHL
:
16392 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16394 case OPC_MAQ_SA_W_PHR
:
16396 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16398 case OPC_MULSA_W_PH
:
16400 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16404 #ifdef TARGET_MIPS64
16405 case OPC_DPAQ_W_QH_DSP
:
16407 int ac
= ret
& 0x03;
16408 tcg_gen_movi_i32(t0
, ac
);
16413 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16417 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16421 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16425 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16429 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16431 case OPC_DPAQ_S_W_QH
:
16433 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16435 case OPC_DPAQ_SA_L_PW
:
16437 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16439 case OPC_DPAU_H_OBL
:
16441 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16443 case OPC_DPAU_H_OBR
:
16445 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16449 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16451 case OPC_DPSQ_S_W_QH
:
16453 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16455 case OPC_DPSQ_SA_L_PW
:
16457 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16459 case OPC_DPSU_H_OBL
:
16461 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16463 case OPC_DPSU_H_OBR
:
16465 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16467 case OPC_MAQ_S_L_PWL
:
16469 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16471 case OPC_MAQ_S_L_PWR
:
16473 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16475 case OPC_MAQ_S_W_QHLL
:
16477 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16479 case OPC_MAQ_SA_W_QHLL
:
16481 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16483 case OPC_MAQ_S_W_QHLR
:
16485 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16487 case OPC_MAQ_SA_W_QHLR
:
16489 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16491 case OPC_MAQ_S_W_QHRL
:
16493 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16495 case OPC_MAQ_SA_W_QHRL
:
16497 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16499 case OPC_MAQ_S_W_QHRR
:
16501 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16503 case OPC_MAQ_SA_W_QHRR
:
16505 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16507 case OPC_MULSAQ_S_L_PW
:
16509 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16511 case OPC_MULSAQ_S_W_QH
:
16513 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16519 case OPC_ADDU_QB_DSP
:
16521 case OPC_MULEU_S_PH_QBL
:
16523 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16525 case OPC_MULEU_S_PH_QBR
:
16527 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16529 case OPC_MULQ_RS_PH
:
16531 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16533 case OPC_MULEQ_S_W_PHL
:
16535 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16537 case OPC_MULEQ_S_W_PHR
:
16539 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16541 case OPC_MULQ_S_PH
:
16543 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16547 #ifdef TARGET_MIPS64
16548 case OPC_ADDU_OB_DSP
:
16550 case OPC_MULEQ_S_PW_QHL
:
16552 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16554 case OPC_MULEQ_S_PW_QHR
:
16556 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16558 case OPC_MULEU_S_QH_OBL
:
16560 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16562 case OPC_MULEU_S_QH_OBR
:
16564 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16566 case OPC_MULQ_RS_QH
:
16568 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16575 tcg_temp_free_i32(t0
);
16576 tcg_temp_free(v1_t
);
16577 tcg_temp_free(v2_t
);
16580 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16588 /* Treat as NOP. */
16592 t0
= tcg_temp_new();
16593 val_t
= tcg_temp_new();
16594 gen_load_gpr(val_t
, val
);
16597 case OPC_ABSQ_S_PH_DSP
:
16601 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16606 target_long result
;
16607 imm
= (ctx
->opcode
>> 16) & 0xFF;
16608 result
= (uint32_t)imm
<< 24 |
16609 (uint32_t)imm
<< 16 |
16610 (uint32_t)imm
<< 8 |
16612 result
= (int32_t)result
;
16613 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16618 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16619 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16620 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16621 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16622 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16623 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16628 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16629 imm
= (int16_t)(imm
<< 6) >> 6;
16630 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16631 (target_long
)((int32_t)imm
<< 16 | \
16637 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16638 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16639 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16640 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16644 #ifdef TARGET_MIPS64
16645 case OPC_ABSQ_S_QH_DSP
:
16652 imm
= (ctx
->opcode
>> 16) & 0xFF;
16653 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16654 temp
= (temp
<< 16) | temp
;
16655 temp
= (temp
<< 32) | temp
;
16656 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16664 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16665 imm
= (int16_t)(imm
<< 6) >> 6;
16666 temp
= ((target_long
)imm
<< 32) \
16667 | ((target_long
)imm
& 0xFFFFFFFF);
16668 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16676 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16677 imm
= (int16_t)(imm
<< 6) >> 6;
16679 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16680 ((uint64_t)(uint16_t)imm
<< 32) |
16681 ((uint64_t)(uint16_t)imm
<< 16) |
16682 (uint64_t)(uint16_t)imm
;
16683 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16688 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16689 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16690 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16691 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16692 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16693 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16694 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16698 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16699 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16700 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16704 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16705 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16706 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16707 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16708 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16715 tcg_temp_free(val_t
);
16718 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16719 uint32_t op1
, uint32_t op2
,
16720 int ret
, int v1
, int v2
, int check_ret
)
16726 if ((ret
== 0) && (check_ret
== 1)) {
16727 /* Treat as NOP. */
16731 t1
= tcg_temp_new();
16732 v1_t
= tcg_temp_new();
16733 v2_t
= tcg_temp_new();
16735 gen_load_gpr(v1_t
, v1
);
16736 gen_load_gpr(v2_t
, v2
);
16739 case OPC_CMPU_EQ_QB_DSP
:
16741 case OPC_CMPU_EQ_QB
:
16743 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16745 case OPC_CMPU_LT_QB
:
16747 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16749 case OPC_CMPU_LE_QB
:
16751 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16753 case OPC_CMPGU_EQ_QB
:
16755 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16757 case OPC_CMPGU_LT_QB
:
16759 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16761 case OPC_CMPGU_LE_QB
:
16763 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16765 case OPC_CMPGDU_EQ_QB
:
16767 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16768 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16769 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16770 tcg_gen_shli_tl(t1
, t1
, 24);
16771 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16773 case OPC_CMPGDU_LT_QB
:
16775 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16776 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16777 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16778 tcg_gen_shli_tl(t1
, t1
, 24);
16779 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16781 case OPC_CMPGDU_LE_QB
:
16783 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16784 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16785 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16786 tcg_gen_shli_tl(t1
, t1
, 24);
16787 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16789 case OPC_CMP_EQ_PH
:
16791 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16793 case OPC_CMP_LT_PH
:
16795 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16797 case OPC_CMP_LE_PH
:
16799 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16803 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16807 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16809 case OPC_PACKRL_PH
:
16811 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16815 #ifdef TARGET_MIPS64
16816 case OPC_CMPU_EQ_OB_DSP
:
16818 case OPC_CMP_EQ_PW
:
16820 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16822 case OPC_CMP_LT_PW
:
16824 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16826 case OPC_CMP_LE_PW
:
16828 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16830 case OPC_CMP_EQ_QH
:
16832 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16834 case OPC_CMP_LT_QH
:
16836 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16838 case OPC_CMP_LE_QH
:
16840 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16842 case OPC_CMPGDU_EQ_OB
:
16844 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16846 case OPC_CMPGDU_LT_OB
:
16848 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16850 case OPC_CMPGDU_LE_OB
:
16852 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16854 case OPC_CMPGU_EQ_OB
:
16856 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16858 case OPC_CMPGU_LT_OB
:
16860 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16862 case OPC_CMPGU_LE_OB
:
16864 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16866 case OPC_CMPU_EQ_OB
:
16868 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16870 case OPC_CMPU_LT_OB
:
16872 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16874 case OPC_CMPU_LE_OB
:
16876 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16878 case OPC_PACKRL_PW
:
16880 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16884 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16888 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16892 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16900 tcg_temp_free(v1_t
);
16901 tcg_temp_free(v2_t
);
16904 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16905 uint32_t op1
, int rt
, int rs
, int sa
)
16912 /* Treat as NOP. */
16916 t0
= tcg_temp_new();
16917 gen_load_gpr(t0
, rs
);
16920 case OPC_APPEND_DSP
:
16921 switch (MASK_APPEND(ctx
->opcode
)) {
16924 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16926 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16930 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16931 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16932 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16933 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16935 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16939 if (sa
!= 0 && sa
!= 2) {
16940 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16941 tcg_gen_ext32u_tl(t0
, t0
);
16942 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16943 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16945 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16947 default: /* Invalid */
16948 MIPS_INVAL("MASK APPEND");
16949 generate_exception_end(ctx
, EXCP_RI
);
16953 #ifdef TARGET_MIPS64
16954 case OPC_DAPPEND_DSP
:
16955 switch (MASK_DAPPEND(ctx
->opcode
)) {
16958 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16962 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16963 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16964 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16968 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16969 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16970 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16975 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16976 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16977 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16978 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16981 default: /* Invalid */
16982 MIPS_INVAL("MASK DAPPEND");
16983 generate_exception_end(ctx
, EXCP_RI
);
16992 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16993 int ret
, int v1
, int v2
, int check_ret
)
17002 if ((ret
== 0) && (check_ret
== 1)) {
17003 /* Treat as NOP. */
17007 t0
= tcg_temp_new();
17008 t1
= tcg_temp_new();
17009 v1_t
= tcg_temp_new();
17010 v2_t
= tcg_temp_new();
17012 gen_load_gpr(v1_t
, v1
);
17013 gen_load_gpr(v2_t
, v2
);
17016 case OPC_EXTR_W_DSP
:
17020 tcg_gen_movi_tl(t0
, v2
);
17021 tcg_gen_movi_tl(t1
, v1
);
17022 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17025 tcg_gen_movi_tl(t0
, v2
);
17026 tcg_gen_movi_tl(t1
, v1
);
17027 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17029 case OPC_EXTR_RS_W
:
17030 tcg_gen_movi_tl(t0
, v2
);
17031 tcg_gen_movi_tl(t1
, v1
);
17032 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17035 tcg_gen_movi_tl(t0
, v2
);
17036 tcg_gen_movi_tl(t1
, v1
);
17037 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17039 case OPC_EXTRV_S_H
:
17040 tcg_gen_movi_tl(t0
, v2
);
17041 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17044 tcg_gen_movi_tl(t0
, v2
);
17045 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17047 case OPC_EXTRV_R_W
:
17048 tcg_gen_movi_tl(t0
, v2
);
17049 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17051 case OPC_EXTRV_RS_W
:
17052 tcg_gen_movi_tl(t0
, v2
);
17053 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17056 tcg_gen_movi_tl(t0
, v2
);
17057 tcg_gen_movi_tl(t1
, v1
);
17058 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17061 tcg_gen_movi_tl(t0
, v2
);
17062 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17065 tcg_gen_movi_tl(t0
, v2
);
17066 tcg_gen_movi_tl(t1
, v1
);
17067 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17070 tcg_gen_movi_tl(t0
, v2
);
17071 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17074 imm
= (ctx
->opcode
>> 20) & 0x3F;
17075 tcg_gen_movi_tl(t0
, ret
);
17076 tcg_gen_movi_tl(t1
, imm
);
17077 gen_helper_shilo(t0
, t1
, cpu_env
);
17080 tcg_gen_movi_tl(t0
, ret
);
17081 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17084 tcg_gen_movi_tl(t0
, ret
);
17085 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17088 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17089 tcg_gen_movi_tl(t0
, imm
);
17090 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17093 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17094 tcg_gen_movi_tl(t0
, imm
);
17095 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17099 #ifdef TARGET_MIPS64
17100 case OPC_DEXTR_W_DSP
:
17104 tcg_gen_movi_tl(t0
, ret
);
17105 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17109 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17110 int ac
= (ctx
->opcode
>> 11) & 0x03;
17111 tcg_gen_movi_tl(t0
, shift
);
17112 tcg_gen_movi_tl(t1
, ac
);
17113 gen_helper_dshilo(t0
, t1
, cpu_env
);
17118 int ac
= (ctx
->opcode
>> 11) & 0x03;
17119 tcg_gen_movi_tl(t0
, ac
);
17120 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17124 tcg_gen_movi_tl(t0
, v2
);
17125 tcg_gen_movi_tl(t1
, v1
);
17127 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17130 tcg_gen_movi_tl(t0
, v2
);
17131 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17134 tcg_gen_movi_tl(t0
, v2
);
17135 tcg_gen_movi_tl(t1
, v1
);
17136 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17139 tcg_gen_movi_tl(t0
, v2
);
17140 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17143 tcg_gen_movi_tl(t0
, v2
);
17144 tcg_gen_movi_tl(t1
, v1
);
17145 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17147 case OPC_DEXTR_R_L
:
17148 tcg_gen_movi_tl(t0
, v2
);
17149 tcg_gen_movi_tl(t1
, v1
);
17150 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17152 case OPC_DEXTR_RS_L
:
17153 tcg_gen_movi_tl(t0
, v2
);
17154 tcg_gen_movi_tl(t1
, v1
);
17155 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17158 tcg_gen_movi_tl(t0
, v2
);
17159 tcg_gen_movi_tl(t1
, v1
);
17160 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17162 case OPC_DEXTR_R_W
:
17163 tcg_gen_movi_tl(t0
, v2
);
17164 tcg_gen_movi_tl(t1
, v1
);
17165 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17167 case OPC_DEXTR_RS_W
:
17168 tcg_gen_movi_tl(t0
, v2
);
17169 tcg_gen_movi_tl(t1
, v1
);
17170 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17172 case OPC_DEXTR_S_H
:
17173 tcg_gen_movi_tl(t0
, v2
);
17174 tcg_gen_movi_tl(t1
, v1
);
17175 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17177 case OPC_DEXTRV_S_H
:
17178 tcg_gen_movi_tl(t0
, v2
);
17179 tcg_gen_movi_tl(t1
, v1
);
17180 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17183 tcg_gen_movi_tl(t0
, v2
);
17184 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17186 case OPC_DEXTRV_R_L
:
17187 tcg_gen_movi_tl(t0
, v2
);
17188 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17190 case OPC_DEXTRV_RS_L
:
17191 tcg_gen_movi_tl(t0
, v2
);
17192 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17195 tcg_gen_movi_tl(t0
, v2
);
17196 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17198 case OPC_DEXTRV_R_W
:
17199 tcg_gen_movi_tl(t0
, v2
);
17200 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17202 case OPC_DEXTRV_RS_W
:
17203 tcg_gen_movi_tl(t0
, v2
);
17204 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17213 tcg_temp_free(v1_t
);
17214 tcg_temp_free(v2_t
);
17217 /* End MIPSDSP functions. */
17219 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17221 int rs
, rt
, rd
, sa
;
17224 rs
= (ctx
->opcode
>> 21) & 0x1f;
17225 rt
= (ctx
->opcode
>> 16) & 0x1f;
17226 rd
= (ctx
->opcode
>> 11) & 0x1f;
17227 sa
= (ctx
->opcode
>> 6) & 0x1f;
17229 op1
= MASK_SPECIAL(ctx
->opcode
);
17232 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17234 case OPC_MULT
... OPC_DIVU
:
17235 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17245 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17248 MIPS_INVAL("special_r6 muldiv");
17249 generate_exception_end(ctx
, EXCP_RI
);
17255 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17259 if (rt
== 0 && sa
== 1) {
17260 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17261 We need additionally to check other fields */
17262 gen_cl(ctx
, op1
, rd
, rs
);
17264 generate_exception_end(ctx
, EXCP_RI
);
17268 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17269 gen_helper_do_semihosting(cpu_env
);
17271 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17272 generate_exception_end(ctx
, EXCP_RI
);
17274 generate_exception_end(ctx
, EXCP_DBp
);
17278 #if defined(TARGET_MIPS64)
17280 check_mips_64(ctx
);
17281 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17285 if (rt
== 0 && sa
== 1) {
17286 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17287 We need additionally to check other fields */
17288 check_mips_64(ctx
);
17289 gen_cl(ctx
, op1
, rd
, rs
);
17291 generate_exception_end(ctx
, EXCP_RI
);
17294 case OPC_DMULT
... OPC_DDIVU
:
17295 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17305 check_mips_64(ctx
);
17306 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17309 MIPS_INVAL("special_r6 muldiv");
17310 generate_exception_end(ctx
, EXCP_RI
);
17315 default: /* Invalid */
17316 MIPS_INVAL("special_r6");
17317 generate_exception_end(ctx
, EXCP_RI
);
17322 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17324 int rs
, rt
, rd
, sa
;
17327 rs
= (ctx
->opcode
>> 21) & 0x1f;
17328 rt
= (ctx
->opcode
>> 16) & 0x1f;
17329 rd
= (ctx
->opcode
>> 11) & 0x1f;
17330 sa
= (ctx
->opcode
>> 6) & 0x1f;
17332 op1
= MASK_SPECIAL(ctx
->opcode
);
17334 case OPC_MOVN
: /* Conditional move */
17336 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17337 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17338 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17340 case OPC_MFHI
: /* Move from HI/LO */
17342 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17345 case OPC_MTLO
: /* Move to HI/LO */
17346 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17349 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17350 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17351 check_cp1_enabled(ctx
);
17352 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17353 (ctx
->opcode
>> 16) & 1);
17355 generate_exception_err(ctx
, EXCP_CpU
, 1);
17361 check_insn(ctx
, INSN_VR54XX
);
17362 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17363 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17365 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17370 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17372 #if defined(TARGET_MIPS64)
17373 case OPC_DMULT
... OPC_DDIVU
:
17374 check_insn(ctx
, ISA_MIPS3
);
17375 check_mips_64(ctx
);
17376 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17380 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17383 #ifdef MIPS_STRICT_STANDARD
17384 MIPS_INVAL("SPIM");
17385 generate_exception_end(ctx
, EXCP_RI
);
17387 /* Implemented as RI exception for now. */
17388 MIPS_INVAL("spim (unofficial)");
17389 generate_exception_end(ctx
, EXCP_RI
);
17392 default: /* Invalid */
17393 MIPS_INVAL("special_legacy");
17394 generate_exception_end(ctx
, EXCP_RI
);
17399 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17401 int rs
, rt
, rd
, sa
;
17404 rs
= (ctx
->opcode
>> 21) & 0x1f;
17405 rt
= (ctx
->opcode
>> 16) & 0x1f;
17406 rd
= (ctx
->opcode
>> 11) & 0x1f;
17407 sa
= (ctx
->opcode
>> 6) & 0x1f;
17409 op1
= MASK_SPECIAL(ctx
->opcode
);
17411 case OPC_SLL
: /* Shift with immediate */
17412 if (sa
== 5 && rd
== 0 &&
17413 rs
== 0 && rt
== 0) { /* PAUSE */
17414 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17415 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17416 generate_exception_end(ctx
, EXCP_RI
);
17422 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17425 switch ((ctx
->opcode
>> 21) & 0x1f) {
17427 /* rotr is decoded as srl on non-R2 CPUs */
17428 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17433 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17436 generate_exception_end(ctx
, EXCP_RI
);
17440 case OPC_ADD
... OPC_SUBU
:
17441 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17443 case OPC_SLLV
: /* Shifts */
17445 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17448 switch ((ctx
->opcode
>> 6) & 0x1f) {
17450 /* rotrv is decoded as srlv on non-R2 CPUs */
17451 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17456 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17459 generate_exception_end(ctx
, EXCP_RI
);
17463 case OPC_SLT
: /* Set on less than */
17465 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17467 case OPC_AND
: /* Logic*/
17471 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17474 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17476 case OPC_TGE
... OPC_TEQ
: /* Traps */
17478 check_insn(ctx
, ISA_MIPS2
);
17479 gen_trap(ctx
, op1
, rs
, rt
, -1);
17481 case OPC_LSA
: /* OPC_PMON */
17482 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17483 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17484 decode_opc_special_r6(env
, ctx
);
17486 /* Pmon entry point, also R4010 selsl */
17487 #ifdef MIPS_STRICT_STANDARD
17488 MIPS_INVAL("PMON / selsl");
17489 generate_exception_end(ctx
, EXCP_RI
);
17491 gen_helper_0e0i(pmon
, sa
);
17496 generate_exception_end(ctx
, EXCP_SYSCALL
);
17499 generate_exception_end(ctx
, EXCP_BREAK
);
17502 check_insn(ctx
, ISA_MIPS2
);
17503 gen_sync(extract32(ctx
->opcode
, 6, 5));
17506 #if defined(TARGET_MIPS64)
17507 /* MIPS64 specific opcodes */
17512 check_insn(ctx
, ISA_MIPS3
);
17513 check_mips_64(ctx
);
17514 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17517 switch ((ctx
->opcode
>> 21) & 0x1f) {
17519 /* drotr is decoded as dsrl on non-R2 CPUs */
17520 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17525 check_insn(ctx
, ISA_MIPS3
);
17526 check_mips_64(ctx
);
17527 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17530 generate_exception_end(ctx
, EXCP_RI
);
17535 switch ((ctx
->opcode
>> 21) & 0x1f) {
17537 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17538 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17543 check_insn(ctx
, ISA_MIPS3
);
17544 check_mips_64(ctx
);
17545 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17548 generate_exception_end(ctx
, EXCP_RI
);
17552 case OPC_DADD
... OPC_DSUBU
:
17553 check_insn(ctx
, ISA_MIPS3
);
17554 check_mips_64(ctx
);
17555 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17559 check_insn(ctx
, ISA_MIPS3
);
17560 check_mips_64(ctx
);
17561 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17564 switch ((ctx
->opcode
>> 6) & 0x1f) {
17566 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17567 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17572 check_insn(ctx
, ISA_MIPS3
);
17573 check_mips_64(ctx
);
17574 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17577 generate_exception_end(ctx
, EXCP_RI
);
17582 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17583 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17584 decode_opc_special_r6(env
, ctx
);
17589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17590 decode_opc_special_r6(env
, ctx
);
17592 decode_opc_special_legacy(env
, ctx
);
17597 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17602 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17604 rs
= (ctx
->opcode
>> 21) & 0x1f;
17605 rt
= (ctx
->opcode
>> 16) & 0x1f;
17606 rd
= (ctx
->opcode
>> 11) & 0x1f;
17608 op1
= MASK_SPECIAL2(ctx
->opcode
);
17610 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17611 case OPC_MSUB
... OPC_MSUBU
:
17612 check_insn(ctx
, ISA_MIPS32
);
17613 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17616 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17619 case OPC_DIVU_G_2F
:
17620 case OPC_MULT_G_2F
:
17621 case OPC_MULTU_G_2F
:
17623 case OPC_MODU_G_2F
:
17624 check_insn(ctx
, INSN_LOONGSON2F
);
17625 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17629 check_insn(ctx
, ISA_MIPS32
);
17630 gen_cl(ctx
, op1
, rd
, rs
);
17633 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17634 gen_helper_do_semihosting(cpu_env
);
17636 /* XXX: not clear which exception should be raised
17637 * when in debug mode...
17639 check_insn(ctx
, ISA_MIPS32
);
17640 generate_exception_end(ctx
, EXCP_DBp
);
17643 #if defined(TARGET_MIPS64)
17646 check_insn(ctx
, ISA_MIPS64
);
17647 check_mips_64(ctx
);
17648 gen_cl(ctx
, op1
, rd
, rs
);
17650 case OPC_DMULT_G_2F
:
17651 case OPC_DMULTU_G_2F
:
17652 case OPC_DDIV_G_2F
:
17653 case OPC_DDIVU_G_2F
:
17654 case OPC_DMOD_G_2F
:
17655 case OPC_DMODU_G_2F
:
17656 check_insn(ctx
, INSN_LOONGSON2F
);
17657 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17660 default: /* Invalid */
17661 MIPS_INVAL("special2_legacy");
17662 generate_exception_end(ctx
, EXCP_RI
);
17667 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17669 int rs
, rt
, rd
, sa
;
17673 rs
= (ctx
->opcode
>> 21) & 0x1f;
17674 rt
= (ctx
->opcode
>> 16) & 0x1f;
17675 rd
= (ctx
->opcode
>> 11) & 0x1f;
17676 sa
= (ctx
->opcode
>> 6) & 0x1f;
17677 imm
= (int16_t)ctx
->opcode
>> 7;
17679 op1
= MASK_SPECIAL3(ctx
->opcode
);
17683 /* hint codes 24-31 are reserved and signal RI */
17684 generate_exception_end(ctx
, EXCP_RI
);
17686 /* Treat as NOP. */
17689 check_cp0_enabled(ctx
);
17690 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17691 gen_cache_operation(ctx
, rt
, rs
, imm
);
17695 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17698 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17703 /* Treat as NOP. */
17706 op2
= MASK_BSHFL(ctx
->opcode
);
17708 case OPC_ALIGN
... OPC_ALIGN_END
:
17709 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17712 gen_bitswap(ctx
, op2
, rd
, rt
);
17717 #if defined(TARGET_MIPS64)
17719 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17722 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17725 check_mips_64(ctx
);
17728 /* Treat as NOP. */
17731 op2
= MASK_DBSHFL(ctx
->opcode
);
17733 case OPC_DALIGN
... OPC_DALIGN_END
:
17734 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17737 gen_bitswap(ctx
, op2
, rd
, rt
);
17744 default: /* Invalid */
17745 MIPS_INVAL("special3_r6");
17746 generate_exception_end(ctx
, EXCP_RI
);
17751 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17756 rs
= (ctx
->opcode
>> 21) & 0x1f;
17757 rt
= (ctx
->opcode
>> 16) & 0x1f;
17758 rd
= (ctx
->opcode
>> 11) & 0x1f;
17760 op1
= MASK_SPECIAL3(ctx
->opcode
);
17762 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17763 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17764 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17765 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17766 * the same mask and op1. */
17767 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17768 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17771 case OPC_ADDUH_R_QB
:
17773 case OPC_ADDQH_R_PH
:
17775 case OPC_ADDQH_R_W
:
17777 case OPC_SUBUH_R_QB
:
17779 case OPC_SUBQH_R_PH
:
17781 case OPC_SUBQH_R_W
:
17782 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17787 case OPC_MULQ_RS_W
:
17788 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17791 MIPS_INVAL("MASK ADDUH.QB");
17792 generate_exception_end(ctx
, EXCP_RI
);
17795 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17796 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17798 generate_exception_end(ctx
, EXCP_RI
);
17802 op2
= MASK_LX(ctx
->opcode
);
17804 #if defined(TARGET_MIPS64)
17810 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17812 default: /* Invalid */
17813 MIPS_INVAL("MASK LX");
17814 generate_exception_end(ctx
, EXCP_RI
);
17818 case OPC_ABSQ_S_PH_DSP
:
17819 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17821 case OPC_ABSQ_S_QB
:
17822 case OPC_ABSQ_S_PH
:
17824 case OPC_PRECEQ_W_PHL
:
17825 case OPC_PRECEQ_W_PHR
:
17826 case OPC_PRECEQU_PH_QBL
:
17827 case OPC_PRECEQU_PH_QBR
:
17828 case OPC_PRECEQU_PH_QBLA
:
17829 case OPC_PRECEQU_PH_QBRA
:
17830 case OPC_PRECEU_PH_QBL
:
17831 case OPC_PRECEU_PH_QBR
:
17832 case OPC_PRECEU_PH_QBLA
:
17833 case OPC_PRECEU_PH_QBRA
:
17834 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17841 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17844 MIPS_INVAL("MASK ABSQ_S.PH");
17845 generate_exception_end(ctx
, EXCP_RI
);
17849 case OPC_ADDU_QB_DSP
:
17850 op2
= MASK_ADDU_QB(ctx
->opcode
);
17853 case OPC_ADDQ_S_PH
:
17856 case OPC_ADDU_S_QB
:
17858 case OPC_ADDU_S_PH
:
17860 case OPC_SUBQ_S_PH
:
17863 case OPC_SUBU_S_QB
:
17865 case OPC_SUBU_S_PH
:
17869 case OPC_RADDU_W_QB
:
17870 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17872 case OPC_MULEU_S_PH_QBL
:
17873 case OPC_MULEU_S_PH_QBR
:
17874 case OPC_MULQ_RS_PH
:
17875 case OPC_MULEQ_S_W_PHL
:
17876 case OPC_MULEQ_S_W_PHR
:
17877 case OPC_MULQ_S_PH
:
17878 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17880 default: /* Invalid */
17881 MIPS_INVAL("MASK ADDU.QB");
17882 generate_exception_end(ctx
, EXCP_RI
);
17887 case OPC_CMPU_EQ_QB_DSP
:
17888 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17890 case OPC_PRECR_SRA_PH_W
:
17891 case OPC_PRECR_SRA_R_PH_W
:
17892 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17894 case OPC_PRECR_QB_PH
:
17895 case OPC_PRECRQ_QB_PH
:
17896 case OPC_PRECRQ_PH_W
:
17897 case OPC_PRECRQ_RS_PH_W
:
17898 case OPC_PRECRQU_S_QB_PH
:
17899 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17901 case OPC_CMPU_EQ_QB
:
17902 case OPC_CMPU_LT_QB
:
17903 case OPC_CMPU_LE_QB
:
17904 case OPC_CMP_EQ_PH
:
17905 case OPC_CMP_LT_PH
:
17906 case OPC_CMP_LE_PH
:
17907 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17909 case OPC_CMPGU_EQ_QB
:
17910 case OPC_CMPGU_LT_QB
:
17911 case OPC_CMPGU_LE_QB
:
17912 case OPC_CMPGDU_EQ_QB
:
17913 case OPC_CMPGDU_LT_QB
:
17914 case OPC_CMPGDU_LE_QB
:
17917 case OPC_PACKRL_PH
:
17918 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17920 default: /* Invalid */
17921 MIPS_INVAL("MASK CMPU.EQ.QB");
17922 generate_exception_end(ctx
, EXCP_RI
);
17926 case OPC_SHLL_QB_DSP
:
17927 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17929 case OPC_DPA_W_PH_DSP
:
17930 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17932 case OPC_DPAU_H_QBL
:
17933 case OPC_DPAU_H_QBR
:
17934 case OPC_DPSU_H_QBL
:
17935 case OPC_DPSU_H_QBR
:
17937 case OPC_DPAX_W_PH
:
17938 case OPC_DPAQ_S_W_PH
:
17939 case OPC_DPAQX_S_W_PH
:
17940 case OPC_DPAQX_SA_W_PH
:
17942 case OPC_DPSX_W_PH
:
17943 case OPC_DPSQ_S_W_PH
:
17944 case OPC_DPSQX_S_W_PH
:
17945 case OPC_DPSQX_SA_W_PH
:
17946 case OPC_MULSAQ_S_W_PH
:
17947 case OPC_DPAQ_SA_L_W
:
17948 case OPC_DPSQ_SA_L_W
:
17949 case OPC_MAQ_S_W_PHL
:
17950 case OPC_MAQ_S_W_PHR
:
17951 case OPC_MAQ_SA_W_PHL
:
17952 case OPC_MAQ_SA_W_PHR
:
17953 case OPC_MULSA_W_PH
:
17954 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17956 default: /* Invalid */
17957 MIPS_INVAL("MASK DPAW.PH");
17958 generate_exception_end(ctx
, EXCP_RI
);
17963 op2
= MASK_INSV(ctx
->opcode
);
17974 t0
= tcg_temp_new();
17975 t1
= tcg_temp_new();
17977 gen_load_gpr(t0
, rt
);
17978 gen_load_gpr(t1
, rs
);
17980 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17986 default: /* Invalid */
17987 MIPS_INVAL("MASK INSV");
17988 generate_exception_end(ctx
, EXCP_RI
);
17992 case OPC_APPEND_DSP
:
17993 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17995 case OPC_EXTR_W_DSP
:
17996 op2
= MASK_EXTR_W(ctx
->opcode
);
18000 case OPC_EXTR_RS_W
:
18002 case OPC_EXTRV_S_H
:
18004 case OPC_EXTRV_R_W
:
18005 case OPC_EXTRV_RS_W
:
18010 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18013 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18019 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18021 default: /* Invalid */
18022 MIPS_INVAL("MASK EXTR.W");
18023 generate_exception_end(ctx
, EXCP_RI
);
18027 #if defined(TARGET_MIPS64)
18028 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
18029 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
18030 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
18031 check_insn(ctx
, INSN_LOONGSON2E
);
18032 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18034 case OPC_ABSQ_S_QH_DSP
:
18035 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18037 case OPC_PRECEQ_L_PWL
:
18038 case OPC_PRECEQ_L_PWR
:
18039 case OPC_PRECEQ_PW_QHL
:
18040 case OPC_PRECEQ_PW_QHR
:
18041 case OPC_PRECEQ_PW_QHLA
:
18042 case OPC_PRECEQ_PW_QHRA
:
18043 case OPC_PRECEQU_QH_OBL
:
18044 case OPC_PRECEQU_QH_OBR
:
18045 case OPC_PRECEQU_QH_OBLA
:
18046 case OPC_PRECEQU_QH_OBRA
:
18047 case OPC_PRECEU_QH_OBL
:
18048 case OPC_PRECEU_QH_OBR
:
18049 case OPC_PRECEU_QH_OBLA
:
18050 case OPC_PRECEU_QH_OBRA
:
18051 case OPC_ABSQ_S_OB
:
18052 case OPC_ABSQ_S_PW
:
18053 case OPC_ABSQ_S_QH
:
18054 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18062 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18064 default: /* Invalid */
18065 MIPS_INVAL("MASK ABSQ_S.QH");
18066 generate_exception_end(ctx
, EXCP_RI
);
18070 case OPC_ADDU_OB_DSP
:
18071 op2
= MASK_ADDU_OB(ctx
->opcode
);
18073 case OPC_RADDU_L_OB
:
18075 case OPC_SUBQ_S_PW
:
18077 case OPC_SUBQ_S_QH
:
18079 case OPC_SUBU_S_OB
:
18081 case OPC_SUBU_S_QH
:
18083 case OPC_SUBUH_R_OB
:
18085 case OPC_ADDQ_S_PW
:
18087 case OPC_ADDQ_S_QH
:
18089 case OPC_ADDU_S_OB
:
18091 case OPC_ADDU_S_QH
:
18093 case OPC_ADDUH_R_OB
:
18094 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18096 case OPC_MULEQ_S_PW_QHL
:
18097 case OPC_MULEQ_S_PW_QHR
:
18098 case OPC_MULEU_S_QH_OBL
:
18099 case OPC_MULEU_S_QH_OBR
:
18100 case OPC_MULQ_RS_QH
:
18101 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18103 default: /* Invalid */
18104 MIPS_INVAL("MASK ADDU.OB");
18105 generate_exception_end(ctx
, EXCP_RI
);
18109 case OPC_CMPU_EQ_OB_DSP
:
18110 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18112 case OPC_PRECR_SRA_QH_PW
:
18113 case OPC_PRECR_SRA_R_QH_PW
:
18114 /* Return value is rt. */
18115 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18117 case OPC_PRECR_OB_QH
:
18118 case OPC_PRECRQ_OB_QH
:
18119 case OPC_PRECRQ_PW_L
:
18120 case OPC_PRECRQ_QH_PW
:
18121 case OPC_PRECRQ_RS_QH_PW
:
18122 case OPC_PRECRQU_S_OB_QH
:
18123 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18125 case OPC_CMPU_EQ_OB
:
18126 case OPC_CMPU_LT_OB
:
18127 case OPC_CMPU_LE_OB
:
18128 case OPC_CMP_EQ_QH
:
18129 case OPC_CMP_LT_QH
:
18130 case OPC_CMP_LE_QH
:
18131 case OPC_CMP_EQ_PW
:
18132 case OPC_CMP_LT_PW
:
18133 case OPC_CMP_LE_PW
:
18134 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18136 case OPC_CMPGDU_EQ_OB
:
18137 case OPC_CMPGDU_LT_OB
:
18138 case OPC_CMPGDU_LE_OB
:
18139 case OPC_CMPGU_EQ_OB
:
18140 case OPC_CMPGU_LT_OB
:
18141 case OPC_CMPGU_LE_OB
:
18142 case OPC_PACKRL_PW
:
18146 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18148 default: /* Invalid */
18149 MIPS_INVAL("MASK CMPU_EQ.OB");
18150 generate_exception_end(ctx
, EXCP_RI
);
18154 case OPC_DAPPEND_DSP
:
18155 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18157 case OPC_DEXTR_W_DSP
:
18158 op2
= MASK_DEXTR_W(ctx
->opcode
);
18165 case OPC_DEXTR_R_L
:
18166 case OPC_DEXTR_RS_L
:
18168 case OPC_DEXTR_R_W
:
18169 case OPC_DEXTR_RS_W
:
18170 case OPC_DEXTR_S_H
:
18172 case OPC_DEXTRV_R_L
:
18173 case OPC_DEXTRV_RS_L
:
18174 case OPC_DEXTRV_S_H
:
18176 case OPC_DEXTRV_R_W
:
18177 case OPC_DEXTRV_RS_W
:
18178 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18183 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18185 default: /* Invalid */
18186 MIPS_INVAL("MASK EXTR.W");
18187 generate_exception_end(ctx
, EXCP_RI
);
18191 case OPC_DPAQ_W_QH_DSP
:
18192 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18194 case OPC_DPAU_H_OBL
:
18195 case OPC_DPAU_H_OBR
:
18196 case OPC_DPSU_H_OBL
:
18197 case OPC_DPSU_H_OBR
:
18199 case OPC_DPAQ_S_W_QH
:
18201 case OPC_DPSQ_S_W_QH
:
18202 case OPC_MULSAQ_S_W_QH
:
18203 case OPC_DPAQ_SA_L_PW
:
18204 case OPC_DPSQ_SA_L_PW
:
18205 case OPC_MULSAQ_S_L_PW
:
18206 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18208 case OPC_MAQ_S_W_QHLL
:
18209 case OPC_MAQ_S_W_QHLR
:
18210 case OPC_MAQ_S_W_QHRL
:
18211 case OPC_MAQ_S_W_QHRR
:
18212 case OPC_MAQ_SA_W_QHLL
:
18213 case OPC_MAQ_SA_W_QHLR
:
18214 case OPC_MAQ_SA_W_QHRL
:
18215 case OPC_MAQ_SA_W_QHRR
:
18216 case OPC_MAQ_S_L_PWL
:
18217 case OPC_MAQ_S_L_PWR
:
18222 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18224 default: /* Invalid */
18225 MIPS_INVAL("MASK DPAQ.W.QH");
18226 generate_exception_end(ctx
, EXCP_RI
);
18230 case OPC_DINSV_DSP
:
18231 op2
= MASK_INSV(ctx
->opcode
);
18242 t0
= tcg_temp_new();
18243 t1
= tcg_temp_new();
18245 gen_load_gpr(t0
, rt
);
18246 gen_load_gpr(t1
, rs
);
18248 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18254 default: /* Invalid */
18255 MIPS_INVAL("MASK DINSV");
18256 generate_exception_end(ctx
, EXCP_RI
);
18260 case OPC_SHLL_OB_DSP
:
18261 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18264 default: /* Invalid */
18265 MIPS_INVAL("special3_legacy");
18266 generate_exception_end(ctx
, EXCP_RI
);
18271 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18273 int rs
, rt
, rd
, sa
;
18277 rs
= (ctx
->opcode
>> 21) & 0x1f;
18278 rt
= (ctx
->opcode
>> 16) & 0x1f;
18279 rd
= (ctx
->opcode
>> 11) & 0x1f;
18280 sa
= (ctx
->opcode
>> 6) & 0x1f;
18281 imm
= sextract32(ctx
->opcode
, 7, 9);
18283 op1
= MASK_SPECIAL3(ctx
->opcode
);
18286 * EVA loads and stores overlap Loongson 2E instructions decoded by
18287 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18292 case OPC_LWLE
... OPC_LWRE
:
18293 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18295 case OPC_LBUE
... OPC_LHUE
:
18296 case OPC_LBE
... OPC_LWE
:
18297 check_cp0_enabled(ctx
);
18298 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18300 case OPC_SWLE
... OPC_SWRE
:
18301 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18303 case OPC_SBE
... OPC_SHE
:
18305 check_cp0_enabled(ctx
);
18306 gen_st(ctx
, op1
, rt
, rs
, imm
);
18309 check_cp0_enabled(ctx
);
18310 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18313 check_cp0_enabled(ctx
);
18314 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18315 gen_cache_operation(ctx
, rt
, rs
, imm
);
18317 /* Treat as NOP. */
18320 check_cp0_enabled(ctx
);
18321 /* Treat as NOP. */
18329 check_insn(ctx
, ISA_MIPS32R2
);
18330 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18333 op2
= MASK_BSHFL(ctx
->opcode
);
18335 case OPC_ALIGN
... OPC_ALIGN_END
:
18337 check_insn(ctx
, ISA_MIPS32R6
);
18338 decode_opc_special3_r6(env
, ctx
);
18341 check_insn(ctx
, ISA_MIPS32R2
);
18342 gen_bshfl(ctx
, op2
, rt
, rd
);
18346 #if defined(TARGET_MIPS64)
18347 case OPC_DEXTM
... OPC_DEXT
:
18348 case OPC_DINSM
... OPC_DINS
:
18349 check_insn(ctx
, ISA_MIPS64R2
);
18350 check_mips_64(ctx
);
18351 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18354 op2
= MASK_DBSHFL(ctx
->opcode
);
18356 case OPC_DALIGN
... OPC_DALIGN_END
:
18358 check_insn(ctx
, ISA_MIPS32R6
);
18359 decode_opc_special3_r6(env
, ctx
);
18362 check_insn(ctx
, ISA_MIPS64R2
);
18363 check_mips_64(ctx
);
18364 op2
= MASK_DBSHFL(ctx
->opcode
);
18365 gen_bshfl(ctx
, op2
, rt
, rd
);
18371 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18374 check_insn(ctx
, ASE_MT
);
18376 TCGv t0
= tcg_temp_new();
18377 TCGv t1
= tcg_temp_new();
18379 gen_load_gpr(t0
, rt
);
18380 gen_load_gpr(t1
, rs
);
18381 gen_helper_fork(t0
, t1
);
18387 check_insn(ctx
, ASE_MT
);
18389 TCGv t0
= tcg_temp_new();
18391 gen_load_gpr(t0
, rs
);
18392 gen_helper_yield(t0
, cpu_env
, t0
);
18393 gen_store_gpr(t0
, rd
);
18398 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18399 decode_opc_special3_r6(env
, ctx
);
18401 decode_opc_special3_legacy(env
, ctx
);
18406 /* MIPS SIMD Architecture (MSA) */
18407 static inline int check_msa_access(DisasContext
*ctx
)
18409 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18410 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18411 generate_exception_end(ctx
, EXCP_RI
);
18415 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18416 if (ctx
->insn_flags
& ASE_MSA
) {
18417 generate_exception_end(ctx
, EXCP_MSADIS
);
18420 generate_exception_end(ctx
, EXCP_RI
);
18427 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18429 /* generates tcg ops to check if any element is 0 */
18430 /* Note this function only works with MSA_WRLEN = 128 */
18431 uint64_t eval_zero_or_big
= 0;
18432 uint64_t eval_big
= 0;
18433 TCGv_i64 t0
= tcg_temp_new_i64();
18434 TCGv_i64 t1
= tcg_temp_new_i64();
18437 eval_zero_or_big
= 0x0101010101010101ULL
;
18438 eval_big
= 0x8080808080808080ULL
;
18441 eval_zero_or_big
= 0x0001000100010001ULL
;
18442 eval_big
= 0x8000800080008000ULL
;
18445 eval_zero_or_big
= 0x0000000100000001ULL
;
18446 eval_big
= 0x8000000080000000ULL
;
18449 eval_zero_or_big
= 0x0000000000000001ULL
;
18450 eval_big
= 0x8000000000000000ULL
;
18453 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18454 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18455 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18456 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18457 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18458 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18459 tcg_gen_or_i64(t0
, t0
, t1
);
18460 /* if all bits are zero then all elements are not zero */
18461 /* if some bit is non-zero then some element is zero */
18462 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18463 tcg_gen_trunc_i64_tl(tresult
, t0
);
18464 tcg_temp_free_i64(t0
);
18465 tcg_temp_free_i64(t1
);
18468 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18470 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18471 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18472 int64_t s16
= (int16_t)ctx
->opcode
;
18474 check_msa_access(ctx
);
18476 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18477 generate_exception_end(ctx
, EXCP_RI
);
18484 TCGv_i64 t0
= tcg_temp_new_i64();
18485 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18486 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18487 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18488 tcg_gen_trunc_i64_tl(bcond
, t0
);
18489 tcg_temp_free_i64(t0
);
18496 gen_check_zero_element(bcond
, df
, wt
);
18502 gen_check_zero_element(bcond
, df
, wt
);
18503 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18507 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
18509 ctx
->hflags
|= MIPS_HFLAG_BC
;
18510 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18513 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18515 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18516 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18517 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18518 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18520 TCGv_i32 twd
= tcg_const_i32(wd
);
18521 TCGv_i32 tws
= tcg_const_i32(ws
);
18522 TCGv_i32 ti8
= tcg_const_i32(i8
);
18524 switch (MASK_MSA_I8(ctx
->opcode
)) {
18526 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18529 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18532 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18535 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18538 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18541 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18544 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18550 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18551 if (df
== DF_DOUBLE
) {
18552 generate_exception_end(ctx
, EXCP_RI
);
18554 TCGv_i32 tdf
= tcg_const_i32(df
);
18555 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18556 tcg_temp_free_i32(tdf
);
18561 MIPS_INVAL("MSA instruction");
18562 generate_exception_end(ctx
, EXCP_RI
);
18566 tcg_temp_free_i32(twd
);
18567 tcg_temp_free_i32(tws
);
18568 tcg_temp_free_i32(ti8
);
18571 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18573 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18574 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18575 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18576 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18577 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18578 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18580 TCGv_i32 tdf
= tcg_const_i32(df
);
18581 TCGv_i32 twd
= tcg_const_i32(wd
);
18582 TCGv_i32 tws
= tcg_const_i32(ws
);
18583 TCGv_i32 timm
= tcg_temp_new_i32();
18584 tcg_gen_movi_i32(timm
, u5
);
18586 switch (MASK_MSA_I5(ctx
->opcode
)) {
18588 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18591 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18593 case OPC_MAXI_S_df
:
18594 tcg_gen_movi_i32(timm
, s5
);
18595 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18597 case OPC_MAXI_U_df
:
18598 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18600 case OPC_MINI_S_df
:
18601 tcg_gen_movi_i32(timm
, s5
);
18602 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18604 case OPC_MINI_U_df
:
18605 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18608 tcg_gen_movi_i32(timm
, s5
);
18609 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18611 case OPC_CLTI_S_df
:
18612 tcg_gen_movi_i32(timm
, s5
);
18613 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18615 case OPC_CLTI_U_df
:
18616 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18618 case OPC_CLEI_S_df
:
18619 tcg_gen_movi_i32(timm
, s5
);
18620 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18622 case OPC_CLEI_U_df
:
18623 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18627 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18628 tcg_gen_movi_i32(timm
, s10
);
18629 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18633 MIPS_INVAL("MSA instruction");
18634 generate_exception_end(ctx
, EXCP_RI
);
18638 tcg_temp_free_i32(tdf
);
18639 tcg_temp_free_i32(twd
);
18640 tcg_temp_free_i32(tws
);
18641 tcg_temp_free_i32(timm
);
18644 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18646 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18647 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18648 uint32_t df
= 0, m
= 0;
18649 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18650 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18657 if ((dfm
& 0x40) == 0x00) {
18660 } else if ((dfm
& 0x60) == 0x40) {
18663 } else if ((dfm
& 0x70) == 0x60) {
18666 } else if ((dfm
& 0x78) == 0x70) {
18670 generate_exception_end(ctx
, EXCP_RI
);
18674 tdf
= tcg_const_i32(df
);
18675 tm
= tcg_const_i32(m
);
18676 twd
= tcg_const_i32(wd
);
18677 tws
= tcg_const_i32(ws
);
18679 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18681 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18684 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18687 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18690 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18693 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18696 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18698 case OPC_BINSLI_df
:
18699 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18701 case OPC_BINSRI_df
:
18702 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18705 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18708 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18711 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18714 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18717 MIPS_INVAL("MSA instruction");
18718 generate_exception_end(ctx
, EXCP_RI
);
18722 tcg_temp_free_i32(tdf
);
18723 tcg_temp_free_i32(tm
);
18724 tcg_temp_free_i32(twd
);
18725 tcg_temp_free_i32(tws
);
18728 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18730 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18731 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18732 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18733 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18734 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18736 TCGv_i32 tdf
= tcg_const_i32(df
);
18737 TCGv_i32 twd
= tcg_const_i32(wd
);
18738 TCGv_i32 tws
= tcg_const_i32(ws
);
18739 TCGv_i32 twt
= tcg_const_i32(wt
);
18741 switch (MASK_MSA_3R(ctx
->opcode
)) {
18743 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18746 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18749 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18752 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18754 case OPC_SUBS_S_df
:
18755 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18758 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18761 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18764 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18767 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18770 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18772 case OPC_ADDS_A_df
:
18773 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18775 case OPC_SUBS_U_df
:
18776 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18779 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18782 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18785 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18788 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18791 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18794 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18796 case OPC_ADDS_S_df
:
18797 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18799 case OPC_SUBSUS_U_df
:
18800 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18803 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18806 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18809 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18812 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18815 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18818 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18820 case OPC_ADDS_U_df
:
18821 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18823 case OPC_SUBSUU_S_df
:
18824 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18827 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18830 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18833 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18836 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18839 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18841 case OPC_ASUB_S_df
:
18842 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18845 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18848 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18851 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18854 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18857 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18860 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18862 case OPC_ASUB_U_df
:
18863 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18866 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18869 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18872 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18875 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18877 case OPC_AVER_S_df
:
18878 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18881 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18884 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18887 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18890 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18892 case OPC_AVER_U_df
:
18893 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18896 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18899 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18902 case OPC_DOTP_S_df
:
18903 case OPC_DOTP_U_df
:
18904 case OPC_DPADD_S_df
:
18905 case OPC_DPADD_U_df
:
18906 case OPC_DPSUB_S_df
:
18907 case OPC_HADD_S_df
:
18908 case OPC_DPSUB_U_df
:
18909 case OPC_HADD_U_df
:
18910 case OPC_HSUB_S_df
:
18911 case OPC_HSUB_U_df
:
18912 if (df
== DF_BYTE
) {
18913 generate_exception_end(ctx
, EXCP_RI
);
18916 switch (MASK_MSA_3R(ctx
->opcode
)) {
18917 case OPC_DOTP_S_df
:
18918 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18920 case OPC_DOTP_U_df
:
18921 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18923 case OPC_DPADD_S_df
:
18924 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18926 case OPC_DPADD_U_df
:
18927 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18929 case OPC_DPSUB_S_df
:
18930 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18932 case OPC_HADD_S_df
:
18933 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18935 case OPC_DPSUB_U_df
:
18936 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18938 case OPC_HADD_U_df
:
18939 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18941 case OPC_HSUB_S_df
:
18942 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18944 case OPC_HSUB_U_df
:
18945 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18950 MIPS_INVAL("MSA instruction");
18951 generate_exception_end(ctx
, EXCP_RI
);
18954 tcg_temp_free_i32(twd
);
18955 tcg_temp_free_i32(tws
);
18956 tcg_temp_free_i32(twt
);
18957 tcg_temp_free_i32(tdf
);
18960 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18962 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18963 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18964 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18965 TCGv telm
= tcg_temp_new();
18966 TCGv_i32 tsr
= tcg_const_i32(source
);
18967 TCGv_i32 tdt
= tcg_const_i32(dest
);
18969 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18971 gen_load_gpr(telm
, source
);
18972 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18975 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18976 gen_store_gpr(telm
, dest
);
18979 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18982 MIPS_INVAL("MSA instruction");
18983 generate_exception_end(ctx
, EXCP_RI
);
18987 tcg_temp_free(telm
);
18988 tcg_temp_free_i32(tdt
);
18989 tcg_temp_free_i32(tsr
);
18992 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18995 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18996 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18997 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18999 TCGv_i32 tws
= tcg_const_i32(ws
);
19000 TCGv_i32 twd
= tcg_const_i32(wd
);
19001 TCGv_i32 tn
= tcg_const_i32(n
);
19002 TCGv_i32 tdf
= tcg_const_i32(df
);
19004 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19006 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
19008 case OPC_SPLATI_df
:
19009 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
19012 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
19014 case OPC_COPY_S_df
:
19015 case OPC_COPY_U_df
:
19016 case OPC_INSERT_df
:
19017 #if !defined(TARGET_MIPS64)
19018 /* Double format valid only for MIPS64 */
19019 if (df
== DF_DOUBLE
) {
19020 generate_exception_end(ctx
, EXCP_RI
);
19024 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19025 case OPC_COPY_S_df
:
19026 if (likely(wd
!= 0)) {
19027 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19030 case OPC_COPY_U_df
:
19031 if (likely(wd
!= 0)) {
19032 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19035 case OPC_INSERT_df
:
19036 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19041 MIPS_INVAL("MSA instruction");
19042 generate_exception_end(ctx
, EXCP_RI
);
19044 tcg_temp_free_i32(twd
);
19045 tcg_temp_free_i32(tws
);
19046 tcg_temp_free_i32(tn
);
19047 tcg_temp_free_i32(tdf
);
19050 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19052 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19053 uint32_t df
= 0, n
= 0;
19055 if ((dfn
& 0x30) == 0x00) {
19058 } else if ((dfn
& 0x38) == 0x20) {
19061 } else if ((dfn
& 0x3c) == 0x30) {
19064 } else if ((dfn
& 0x3e) == 0x38) {
19067 } else if (dfn
== 0x3E) {
19068 /* CTCMSA, CFCMSA, MOVE.V */
19069 gen_msa_elm_3e(env
, ctx
);
19072 generate_exception_end(ctx
, EXCP_RI
);
19076 gen_msa_elm_df(env
, ctx
, df
, n
);
19079 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19081 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19082 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19083 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19084 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19085 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19087 TCGv_i32 twd
= tcg_const_i32(wd
);
19088 TCGv_i32 tws
= tcg_const_i32(ws
);
19089 TCGv_i32 twt
= tcg_const_i32(wt
);
19090 TCGv_i32 tdf
= tcg_temp_new_i32();
19092 /* adjust df value for floating-point instruction */
19093 tcg_gen_movi_i32(tdf
, df
+ 2);
19095 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19097 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19100 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19103 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19106 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19109 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19112 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19115 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19118 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19121 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19124 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19127 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19130 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19133 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19136 tcg_gen_movi_i32(tdf
, df
+ 1);
19137 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19140 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19143 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19145 case OPC_MADD_Q_df
:
19146 tcg_gen_movi_i32(tdf
, df
+ 1);
19147 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19150 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19152 case OPC_MSUB_Q_df
:
19153 tcg_gen_movi_i32(tdf
, df
+ 1);
19154 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19157 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19160 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19163 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19166 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19169 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19172 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19175 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19178 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19181 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19184 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19187 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19190 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19193 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19195 case OPC_MULR_Q_df
:
19196 tcg_gen_movi_i32(tdf
, df
+ 1);
19197 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19200 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19202 case OPC_FMIN_A_df
:
19203 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19205 case OPC_MADDR_Q_df
:
19206 tcg_gen_movi_i32(tdf
, df
+ 1);
19207 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19210 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19213 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19215 case OPC_MSUBR_Q_df
:
19216 tcg_gen_movi_i32(tdf
, df
+ 1);
19217 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19220 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19222 case OPC_FMAX_A_df
:
19223 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19226 MIPS_INVAL("MSA instruction");
19227 generate_exception_end(ctx
, EXCP_RI
);
19231 tcg_temp_free_i32(twd
);
19232 tcg_temp_free_i32(tws
);
19233 tcg_temp_free_i32(twt
);
19234 tcg_temp_free_i32(tdf
);
19237 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19239 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19240 (op & (0x7 << 18)))
19241 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19242 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19243 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19244 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19245 TCGv_i32 twd
= tcg_const_i32(wd
);
19246 TCGv_i32 tws
= tcg_const_i32(ws
);
19247 TCGv_i32 twt
= tcg_const_i32(wt
);
19248 TCGv_i32 tdf
= tcg_const_i32(df
);
19250 switch (MASK_MSA_2R(ctx
->opcode
)) {
19252 #if !defined(TARGET_MIPS64)
19253 /* Double format valid only for MIPS64 */
19254 if (df
== DF_DOUBLE
) {
19255 generate_exception_end(ctx
, EXCP_RI
);
19259 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19262 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19265 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19268 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19271 MIPS_INVAL("MSA instruction");
19272 generate_exception_end(ctx
, EXCP_RI
);
19276 tcg_temp_free_i32(twd
);
19277 tcg_temp_free_i32(tws
);
19278 tcg_temp_free_i32(twt
);
19279 tcg_temp_free_i32(tdf
);
19282 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19284 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19285 (op & (0xf << 17)))
19286 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19287 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19288 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19289 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19290 TCGv_i32 twd
= tcg_const_i32(wd
);
19291 TCGv_i32 tws
= tcg_const_i32(ws
);
19292 TCGv_i32 twt
= tcg_const_i32(wt
);
19293 /* adjust df value for floating-point instruction */
19294 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19296 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19297 case OPC_FCLASS_df
:
19298 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19300 case OPC_FTRUNC_S_df
:
19301 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19303 case OPC_FTRUNC_U_df
:
19304 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19307 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19309 case OPC_FRSQRT_df
:
19310 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19313 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19316 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19319 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19321 case OPC_FEXUPL_df
:
19322 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19324 case OPC_FEXUPR_df
:
19325 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19328 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19331 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19333 case OPC_FTINT_S_df
:
19334 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19336 case OPC_FTINT_U_df
:
19337 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19339 case OPC_FFINT_S_df
:
19340 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19342 case OPC_FFINT_U_df
:
19343 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19347 tcg_temp_free_i32(twd
);
19348 tcg_temp_free_i32(tws
);
19349 tcg_temp_free_i32(twt
);
19350 tcg_temp_free_i32(tdf
);
19353 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19355 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19356 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19357 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19358 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19359 TCGv_i32 twd
= tcg_const_i32(wd
);
19360 TCGv_i32 tws
= tcg_const_i32(ws
);
19361 TCGv_i32 twt
= tcg_const_i32(wt
);
19363 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19365 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19368 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19371 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19374 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19377 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19380 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19383 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19386 MIPS_INVAL("MSA instruction");
19387 generate_exception_end(ctx
, EXCP_RI
);
19391 tcg_temp_free_i32(twd
);
19392 tcg_temp_free_i32(tws
);
19393 tcg_temp_free_i32(twt
);
19396 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19398 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19406 gen_msa_vec_v(env
, ctx
);
19409 gen_msa_2r(env
, ctx
);
19412 gen_msa_2rf(env
, ctx
);
19415 MIPS_INVAL("MSA instruction");
19416 generate_exception_end(ctx
, EXCP_RI
);
19421 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19423 uint32_t opcode
= ctx
->opcode
;
19424 check_insn(ctx
, ASE_MSA
);
19425 check_msa_access(ctx
);
19427 switch (MASK_MSA_MINOR(opcode
)) {
19428 case OPC_MSA_I8_00
:
19429 case OPC_MSA_I8_01
:
19430 case OPC_MSA_I8_02
:
19431 gen_msa_i8(env
, ctx
);
19433 case OPC_MSA_I5_06
:
19434 case OPC_MSA_I5_07
:
19435 gen_msa_i5(env
, ctx
);
19437 case OPC_MSA_BIT_09
:
19438 case OPC_MSA_BIT_0A
:
19439 gen_msa_bit(env
, ctx
);
19441 case OPC_MSA_3R_0D
:
19442 case OPC_MSA_3R_0E
:
19443 case OPC_MSA_3R_0F
:
19444 case OPC_MSA_3R_10
:
19445 case OPC_MSA_3R_11
:
19446 case OPC_MSA_3R_12
:
19447 case OPC_MSA_3R_13
:
19448 case OPC_MSA_3R_14
:
19449 case OPC_MSA_3R_15
:
19450 gen_msa_3r(env
, ctx
);
19453 gen_msa_elm(env
, ctx
);
19455 case OPC_MSA_3RF_1A
:
19456 case OPC_MSA_3RF_1B
:
19457 case OPC_MSA_3RF_1C
:
19458 gen_msa_3rf(env
, ctx
);
19461 gen_msa_vec(env
, ctx
);
19472 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19473 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19474 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19475 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19477 TCGv_i32 twd
= tcg_const_i32(wd
);
19478 TCGv taddr
= tcg_temp_new();
19479 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19481 switch (MASK_MSA_MINOR(opcode
)) {
19483 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19486 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19489 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19492 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19495 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19498 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19501 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19504 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19508 tcg_temp_free_i32(twd
);
19509 tcg_temp_free(taddr
);
19513 MIPS_INVAL("MSA instruction");
19514 generate_exception_end(ctx
, EXCP_RI
);
19520 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19523 int rs
, rt
, rd
, sa
;
19527 /* make sure instructions are on a word boundary */
19528 if (ctx
->base
.pc_next
& 0x3) {
19529 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
19530 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19534 /* Handle blikely not taken case */
19535 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19536 TCGLabel
*l1
= gen_new_label();
19538 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19539 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19540 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
19544 op
= MASK_OP_MAJOR(ctx
->opcode
);
19545 rs
= (ctx
->opcode
>> 21) & 0x1f;
19546 rt
= (ctx
->opcode
>> 16) & 0x1f;
19547 rd
= (ctx
->opcode
>> 11) & 0x1f;
19548 sa
= (ctx
->opcode
>> 6) & 0x1f;
19549 imm
= (int16_t)ctx
->opcode
;
19552 decode_opc_special(env
, ctx
);
19555 decode_opc_special2_legacy(env
, ctx
);
19558 decode_opc_special3(env
, ctx
);
19561 op1
= MASK_REGIMM(ctx
->opcode
);
19563 case OPC_BLTZL
: /* REGIMM branches */
19567 check_insn(ctx
, ISA_MIPS2
);
19568 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19572 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19576 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19578 /* OPC_NAL, OPC_BAL */
19579 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19581 generate_exception_end(ctx
, EXCP_RI
);
19584 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19587 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19589 check_insn(ctx
, ISA_MIPS2
);
19590 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19591 gen_trap(ctx
, op1
, rs
, -1, imm
);
19594 check_insn(ctx
, ISA_MIPS32R6
);
19595 generate_exception_end(ctx
, EXCP_RI
);
19598 check_insn(ctx
, ISA_MIPS32R2
);
19599 /* Break the TB to be able to sync copied instructions
19601 ctx
->base
.is_jmp
= DISAS_STOP
;
19603 case OPC_BPOSGE32
: /* MIPS DSP branch */
19604 #if defined(TARGET_MIPS64)
19608 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19610 #if defined(TARGET_MIPS64)
19612 check_insn(ctx
, ISA_MIPS32R6
);
19613 check_mips_64(ctx
);
19615 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19619 check_insn(ctx
, ISA_MIPS32R6
);
19620 check_mips_64(ctx
);
19622 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19626 default: /* Invalid */
19627 MIPS_INVAL("regimm");
19628 generate_exception_end(ctx
, EXCP_RI
);
19633 check_cp0_enabled(ctx
);
19634 op1
= MASK_CP0(ctx
->opcode
);
19642 #if defined(TARGET_MIPS64)
19646 #ifndef CONFIG_USER_ONLY
19647 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19648 #endif /* !CONFIG_USER_ONLY */
19650 case OPC_C0_FIRST
... OPC_C0_LAST
:
19651 #ifndef CONFIG_USER_ONLY
19652 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19653 #endif /* !CONFIG_USER_ONLY */
19656 #ifndef CONFIG_USER_ONLY
19659 TCGv t0
= tcg_temp_new();
19661 op2
= MASK_MFMC0(ctx
->opcode
);
19664 check_insn(ctx
, ASE_MT
);
19665 gen_helper_dmt(t0
);
19666 gen_store_gpr(t0
, rt
);
19669 check_insn(ctx
, ASE_MT
);
19670 gen_helper_emt(t0
);
19671 gen_store_gpr(t0
, rt
);
19674 check_insn(ctx
, ASE_MT
);
19675 gen_helper_dvpe(t0
, cpu_env
);
19676 gen_store_gpr(t0
, rt
);
19679 check_insn(ctx
, ASE_MT
);
19680 gen_helper_evpe(t0
, cpu_env
);
19681 gen_store_gpr(t0
, rt
);
19684 check_insn(ctx
, ISA_MIPS32R6
);
19686 gen_helper_dvp(t0
, cpu_env
);
19687 gen_store_gpr(t0
, rt
);
19691 check_insn(ctx
, ISA_MIPS32R6
);
19693 gen_helper_evp(t0
, cpu_env
);
19694 gen_store_gpr(t0
, rt
);
19698 check_insn(ctx
, ISA_MIPS32R2
);
19699 save_cpu_state(ctx
, 1);
19700 gen_helper_di(t0
, cpu_env
);
19701 gen_store_gpr(t0
, rt
);
19702 /* Stop translation as we may have switched
19703 the execution mode. */
19704 ctx
->base
.is_jmp
= DISAS_STOP
;
19707 check_insn(ctx
, ISA_MIPS32R2
);
19708 save_cpu_state(ctx
, 1);
19709 gen_helper_ei(t0
, cpu_env
);
19710 gen_store_gpr(t0
, rt
);
19711 /* DISAS_STOP isn't sufficient, we need to ensure we break
19712 out of translated code to check for pending interrupts */
19713 gen_save_pc(ctx
->base
.pc_next
+ 4);
19714 ctx
->base
.is_jmp
= DISAS_EXIT
;
19716 default: /* Invalid */
19717 MIPS_INVAL("mfmc0");
19718 generate_exception_end(ctx
, EXCP_RI
);
19723 #endif /* !CONFIG_USER_ONLY */
19726 check_insn(ctx
, ISA_MIPS32R2
);
19727 gen_load_srsgpr(rt
, rd
);
19730 check_insn(ctx
, ISA_MIPS32R2
);
19731 gen_store_srsgpr(rt
, rd
);
19735 generate_exception_end(ctx
, EXCP_RI
);
19739 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19740 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19741 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19742 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19745 /* Arithmetic with immediate opcode */
19746 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19750 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19752 case OPC_SLTI
: /* Set on less than with immediate opcode */
19754 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19756 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19757 case OPC_LUI
: /* OPC_AUI */
19760 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19762 case OPC_J
... OPC_JAL
: /* Jump */
19763 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19764 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19767 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19768 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19770 generate_exception_end(ctx
, EXCP_RI
);
19773 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19774 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19777 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19780 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19781 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19783 generate_exception_end(ctx
, EXCP_RI
);
19786 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19787 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19790 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19793 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19796 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19798 check_insn(ctx
, ISA_MIPS32R6
);
19799 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19800 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19803 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19806 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19808 check_insn(ctx
, ISA_MIPS32R6
);
19809 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19810 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19815 check_insn(ctx
, ISA_MIPS2
);
19816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19820 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19822 case OPC_LL
: /* Load and stores */
19823 check_insn(ctx
, ISA_MIPS2
);
19827 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19829 case OPC_LB
... OPC_LH
:
19830 case OPC_LW
... OPC_LHU
:
19831 gen_ld(ctx
, op
, rt
, rs
, imm
);
19835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19837 case OPC_SB
... OPC_SH
:
19839 gen_st(ctx
, op
, rt
, rs
, imm
);
19842 check_insn(ctx
, ISA_MIPS2
);
19843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19844 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19848 check_cp0_enabled(ctx
);
19849 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19850 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19851 gen_cache_operation(ctx
, rt
, rs
, imm
);
19853 /* Treat as NOP. */
19856 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19857 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19858 /* Treat as NOP. */
19861 /* Floating point (COP1). */
19866 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19870 op1
= MASK_CP1(ctx
->opcode
);
19875 check_cp1_enabled(ctx
);
19876 check_insn(ctx
, ISA_MIPS32R2
);
19881 check_cp1_enabled(ctx
);
19882 gen_cp1(ctx
, op1
, rt
, rd
);
19884 #if defined(TARGET_MIPS64)
19887 check_cp1_enabled(ctx
);
19888 check_insn(ctx
, ISA_MIPS3
);
19889 check_mips_64(ctx
);
19890 gen_cp1(ctx
, op1
, rt
, rd
);
19893 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19894 check_cp1_enabled(ctx
);
19895 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19897 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19902 check_insn(ctx
, ASE_MIPS3D
);
19903 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19904 (rt
>> 2) & 0x7, imm
<< 2);
19908 check_cp1_enabled(ctx
);
19909 check_insn(ctx
, ISA_MIPS32R6
);
19910 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19914 check_cp1_enabled(ctx
);
19915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19917 check_insn(ctx
, ASE_MIPS3D
);
19920 check_cp1_enabled(ctx
);
19921 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19922 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19923 (rt
>> 2) & 0x7, imm
<< 2);
19930 check_cp1_enabled(ctx
);
19931 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19937 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19938 check_cp1_enabled(ctx
);
19939 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19941 case R6_OPC_CMP_AF_S
:
19942 case R6_OPC_CMP_UN_S
:
19943 case R6_OPC_CMP_EQ_S
:
19944 case R6_OPC_CMP_UEQ_S
:
19945 case R6_OPC_CMP_LT_S
:
19946 case R6_OPC_CMP_ULT_S
:
19947 case R6_OPC_CMP_LE_S
:
19948 case R6_OPC_CMP_ULE_S
:
19949 case R6_OPC_CMP_SAF_S
:
19950 case R6_OPC_CMP_SUN_S
:
19951 case R6_OPC_CMP_SEQ_S
:
19952 case R6_OPC_CMP_SEUQ_S
:
19953 case R6_OPC_CMP_SLT_S
:
19954 case R6_OPC_CMP_SULT_S
:
19955 case R6_OPC_CMP_SLE_S
:
19956 case R6_OPC_CMP_SULE_S
:
19957 case R6_OPC_CMP_OR_S
:
19958 case R6_OPC_CMP_UNE_S
:
19959 case R6_OPC_CMP_NE_S
:
19960 case R6_OPC_CMP_SOR_S
:
19961 case R6_OPC_CMP_SUNE_S
:
19962 case R6_OPC_CMP_SNE_S
:
19963 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19965 case R6_OPC_CMP_AF_D
:
19966 case R6_OPC_CMP_UN_D
:
19967 case R6_OPC_CMP_EQ_D
:
19968 case R6_OPC_CMP_UEQ_D
:
19969 case R6_OPC_CMP_LT_D
:
19970 case R6_OPC_CMP_ULT_D
:
19971 case R6_OPC_CMP_LE_D
:
19972 case R6_OPC_CMP_ULE_D
:
19973 case R6_OPC_CMP_SAF_D
:
19974 case R6_OPC_CMP_SUN_D
:
19975 case R6_OPC_CMP_SEQ_D
:
19976 case R6_OPC_CMP_SEUQ_D
:
19977 case R6_OPC_CMP_SLT_D
:
19978 case R6_OPC_CMP_SULT_D
:
19979 case R6_OPC_CMP_SLE_D
:
19980 case R6_OPC_CMP_SULE_D
:
19981 case R6_OPC_CMP_OR_D
:
19982 case R6_OPC_CMP_UNE_D
:
19983 case R6_OPC_CMP_NE_D
:
19984 case R6_OPC_CMP_SOR_D
:
19985 case R6_OPC_CMP_SUNE_D
:
19986 case R6_OPC_CMP_SNE_D
:
19987 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19990 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19991 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19996 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
20011 check_insn(ctx
, ASE_MSA
);
20012 gen_msa_branch(env
, ctx
, op1
);
20016 generate_exception_end(ctx
, EXCP_RI
);
20021 /* Compact branches [R6] and COP2 [non-R6] */
20022 case OPC_BC
: /* OPC_LWC2 */
20023 case OPC_BALC
: /* OPC_SWC2 */
20024 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20025 /* OPC_BC, OPC_BALC */
20026 gen_compute_compact_branch(ctx
, op
, 0, 0,
20027 sextract32(ctx
->opcode
<< 2, 0, 28));
20029 /* OPC_LWC2, OPC_SWC2 */
20030 /* COP2: Not implemented. */
20031 generate_exception_err(ctx
, EXCP_CpU
, 2);
20034 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20035 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20036 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20038 /* OPC_BEQZC, OPC_BNEZC */
20039 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20040 sextract32(ctx
->opcode
<< 2, 0, 23));
20042 /* OPC_JIC, OPC_JIALC */
20043 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20046 /* OPC_LWC2, OPC_SWC2 */
20047 /* COP2: Not implemented. */
20048 generate_exception_err(ctx
, EXCP_CpU
, 2);
20052 check_insn(ctx
, INSN_LOONGSON2F
);
20053 /* Note that these instructions use different fields. */
20054 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20058 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20059 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20060 check_cp1_enabled(ctx
);
20061 op1
= MASK_CP3(ctx
->opcode
);
20065 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20071 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20072 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20075 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20076 /* Treat as NOP. */
20079 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20093 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20094 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20098 generate_exception_end(ctx
, EXCP_RI
);
20102 generate_exception_err(ctx
, EXCP_CpU
, 1);
20106 #if defined(TARGET_MIPS64)
20107 /* MIPS64 opcodes */
20108 case OPC_LDL
... OPC_LDR
:
20110 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20114 check_insn(ctx
, ISA_MIPS3
);
20115 check_mips_64(ctx
);
20116 gen_ld(ctx
, op
, rt
, rs
, imm
);
20118 case OPC_SDL
... OPC_SDR
:
20119 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20122 check_insn(ctx
, ISA_MIPS3
);
20123 check_mips_64(ctx
);
20124 gen_st(ctx
, op
, rt
, rs
, imm
);
20127 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20128 check_insn(ctx
, ISA_MIPS3
);
20129 check_mips_64(ctx
);
20130 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20132 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20133 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20134 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20135 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20138 check_insn(ctx
, ISA_MIPS3
);
20139 check_mips_64(ctx
);
20140 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20144 check_insn(ctx
, ISA_MIPS3
);
20145 check_mips_64(ctx
);
20146 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20149 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20150 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20151 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20153 MIPS_INVAL("major opcode");
20154 generate_exception_end(ctx
, EXCP_RI
);
20158 case OPC_DAUI
: /* OPC_JALX */
20159 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20160 #if defined(TARGET_MIPS64)
20162 check_mips_64(ctx
);
20164 generate_exception(ctx
, EXCP_RI
);
20165 } else if (rt
!= 0) {
20166 TCGv t0
= tcg_temp_new();
20167 gen_load_gpr(t0
, rs
);
20168 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20172 generate_exception_end(ctx
, EXCP_RI
);
20173 MIPS_INVAL("major opcode");
20177 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20178 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20179 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20182 case OPC_MSA
: /* OPC_MDMX */
20183 /* MDMX: Not implemented. */
20187 check_insn(ctx
, ISA_MIPS32R6
);
20188 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
20190 default: /* Invalid */
20191 MIPS_INVAL("major opcode");
20192 generate_exception_end(ctx
, EXCP_RI
);
20197 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
20199 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20200 CPUMIPSState
*env
= cs
->env_ptr
;
20202 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
20203 ctx
->saved_pc
= -1;
20204 ctx
->insn_flags
= env
->insn_flags
;
20205 ctx
->CP0_Config1
= env
->CP0_Config1
;
20207 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20208 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20209 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20210 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20211 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20212 ctx
->PAMask
= env
->PAMask
;
20213 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20214 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20215 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20216 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20217 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20218 /* Restore delay slot state from the tb context. */
20219 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
20220 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20221 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20222 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20223 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20224 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20225 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20226 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20227 restore_cpu_state(env
, ctx
);
20228 #ifdef CONFIG_USER_ONLY
20229 ctx
->mem_idx
= MIPS_HFLAG_UM
;
20231 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
20233 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
20234 MO_UNALN
: MO_ALIGN
;
20236 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
20240 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20244 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
20246 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20248 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
20252 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
20253 const CPUBreakpoint
*bp
)
20255 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20257 save_cpu_state(ctx
, 1);
20258 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20259 gen_helper_raise_exception_debug(cpu_env
);
20260 /* The address covered by the breakpoint must be included in
20261 [tb->pc, tb->pc + tb->size) in order to for it to be
20262 properly cleared -- thus we increment the PC here so that
20263 the logic setting tb->size below does the right thing. */
20264 ctx
->base
.pc_next
+= 4;
20268 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
20270 CPUMIPSState
*env
= cs
->env_ptr
;
20271 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20275 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
20276 if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
20277 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
20279 decode_opc(env
, ctx
);
20280 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
20281 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20282 insn_bytes
= decode_micromips_opc(env
, ctx
);
20283 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
20284 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
20285 insn_bytes
= decode_mips16_opc(env
, ctx
);
20287 generate_exception_end(ctx
, EXCP_RI
);
20288 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
20292 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
20293 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20294 MIPS_HFLAG_FBNSLOT
))) {
20295 /* force to generate branch as there is neither delay nor
20299 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
20300 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
20301 /* Force to generate branch as microMIPS R6 doesn't restrict
20302 branches in the forbidden slot. */
20307 gen_branch(ctx
, insn_bytes
);
20309 ctx
->base
.pc_next
+= insn_bytes
;
20311 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
20314 /* Execute a branch and its delay slot as a single instruction.
20315 This is what GDB expects and is consistent with what the
20316 hardware does (e.g. if a delay slot instruction faults, the
20317 reported PC is the PC of the branch). */
20318 if (ctx
->base
.singlestep_enabled
&&
20319 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
20320 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20322 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
20323 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
20327 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
20329 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
20331 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
20332 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
20333 gen_helper_raise_exception_debug(cpu_env
);
20335 switch (ctx
->base
.is_jmp
) {
20337 gen_save_pc(ctx
->base
.pc_next
);
20338 tcg_gen_lookup_and_goto_ptr();
20341 case DISAS_TOO_MANY
:
20342 save_cpu_state(ctx
, 0);
20343 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
20346 tcg_gen_exit_tb(NULL
, 0);
20348 case DISAS_NORETURN
:
20351 g_assert_not_reached();
20356 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
20358 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
20359 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
20362 static const TranslatorOps mips_tr_ops
= {
20363 .init_disas_context
= mips_tr_init_disas_context
,
20364 .tb_start
= mips_tr_tb_start
,
20365 .insn_start
= mips_tr_insn_start
,
20366 .breakpoint_check
= mips_tr_breakpoint_check
,
20367 .translate_insn
= mips_tr_translate_insn
,
20368 .tb_stop
= mips_tr_tb_stop
,
20369 .disas_log
= mips_tr_disas_log
,
20372 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20376 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
20379 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20383 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20385 #define printfpr(fp) \
20388 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20389 " fd:%13g fs:%13g psu: %13g\n", \
20390 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20391 (double)(fp)->fd, \
20392 (double)(fp)->fs[FP_ENDIAN_IDX], \
20393 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20396 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20397 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20398 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20399 " fd:%13g fs:%13g psu:%13g\n", \
20400 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20402 (double)tmp.fs[FP_ENDIAN_IDX], \
20403 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20408 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20409 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20410 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20411 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20412 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20413 printfpr(&env
->active_fpu
.fpr
[i
]);
20419 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20422 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20423 CPUMIPSState
*env
= &cpu
->env
;
20426 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20427 " LO=0x" TARGET_FMT_lx
" ds %04x "
20428 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20429 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20430 env
->hflags
, env
->btarget
, env
->bcond
);
20431 for (i
= 0; i
< 32; i
++) {
20433 cpu_fprintf(f
, "GPR%02d:", i
);
20434 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20436 cpu_fprintf(f
, "\n");
20439 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20440 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20441 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20443 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20444 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20445 env
->CP0_Config2
, env
->CP0_Config3
);
20446 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20447 env
->CP0_Config4
, env
->CP0_Config5
);
20448 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
20449 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20453 void mips_tcg_init(void)
20458 for (i
= 1; i
< 32; i
++)
20459 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20460 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20463 for (i
= 0; i
< 32; i
++) {
20464 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20466 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20467 /* The scalar floating-point unit (FPU) registers are mapped on
20468 * the MSA vector registers. */
20469 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20470 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20471 msa_wr_d
[i
* 2 + 1] =
20472 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20475 cpu_PC
= tcg_global_mem_new(cpu_env
,
20476 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20477 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20478 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20479 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20481 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20482 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20485 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20486 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20488 bcond
= tcg_global_mem_new(cpu_env
,
20489 offsetof(CPUMIPSState
, bcond
), "bcond");
20490 btarget
= tcg_global_mem_new(cpu_env
,
20491 offsetof(CPUMIPSState
, btarget
), "btarget");
20492 hflags
= tcg_global_mem_new_i32(cpu_env
,
20493 offsetof(CPUMIPSState
, hflags
), "hflags");
20495 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20496 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20498 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20499 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20503 #include "translate_init.inc.c"
20505 void cpu_mips_realize_env(CPUMIPSState
*env
)
20507 env
->exception_base
= (int32_t)0xBFC00000;
20509 #ifndef CONFIG_USER_ONLY
20510 mmu_init(env
, env
->cpu_model
);
20512 fpu_init(env
, env
->cpu_model
);
20513 mvp_init(env
, env
->cpu_model
);
20516 bool cpu_supports_cps_smp(const char *cpu_type
)
20518 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20519 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20522 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
20524 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
20525 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
20528 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20530 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20531 vp
->env
.exception_base
= address
;
20534 void cpu_state_reset(CPUMIPSState
*env
)
20536 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20537 CPUState
*cs
= CPU(cpu
);
20539 /* Reset registers to their default values */
20540 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20541 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20542 #ifdef TARGET_WORDS_BIGENDIAN
20543 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20545 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20546 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20547 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20548 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20549 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20550 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20551 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20552 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20553 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20554 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20555 << env
->cpu_model
->CP0_LLAddr_shift
;
20556 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20557 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20558 env
->CCRes
= env
->cpu_model
->CCRes
;
20559 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20560 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20561 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20562 env
->current_tc
= 0;
20563 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20564 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20565 #if defined(TARGET_MIPS64)
20566 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20567 env
->SEGMask
|= 3ULL << 62;
20570 env
->PABITS
= env
->cpu_model
->PABITS
;
20571 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20572 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20573 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20574 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20575 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20576 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20577 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20578 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20579 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20580 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20581 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20582 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20583 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20584 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20585 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20586 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20587 env
->msair
= env
->cpu_model
->MSAIR
;
20588 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20590 #if defined(CONFIG_USER_ONLY)
20591 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20592 # ifdef TARGET_MIPS64
20593 /* Enable 64-bit register mode. */
20594 env
->CP0_Status
|= (1 << CP0St_PX
);
20596 # ifdef TARGET_ABI_MIPSN64
20597 /* Enable 64-bit address mode. */
20598 env
->CP0_Status
|= (1 << CP0St_UX
);
20600 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20601 hardware registers. */
20602 env
->CP0_HWREna
|= 0x0000000F;
20603 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20604 env
->CP0_Status
|= (1 << CP0St_CU1
);
20606 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20607 env
->CP0_Status
|= (1 << CP0St_MX
);
20609 # if defined(TARGET_MIPS64)
20610 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20611 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20612 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20613 env
->CP0_Status
|= (1 << CP0St_FR
);
20617 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20618 /* If the exception was raised from a delay slot,
20619 come back to the jump. */
20620 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20621 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20623 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20625 env
->active_tc
.PC
= env
->exception_base
;
20626 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20627 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20628 env
->CP0_Wired
= 0;
20629 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20630 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20631 if (mips_um_ksegs_enabled()) {
20632 env
->CP0_EBase
|= 0x40000000;
20634 env
->CP0_EBase
|= (int32_t)0x80000000;
20636 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20637 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20639 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20641 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20642 /* vectored interrupts not implemented, timer on int 7,
20643 no performance counters. */
20644 env
->CP0_IntCtl
= 0xe0000000;
20648 for (i
= 0; i
< 7; i
++) {
20649 env
->CP0_WatchLo
[i
] = 0;
20650 env
->CP0_WatchHi
[i
] = 0x80000000;
20652 env
->CP0_WatchLo
[7] = 0;
20653 env
->CP0_WatchHi
[7] = 0;
20655 /* Count register increments in debug mode, EJTAG version 1 */
20656 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20658 cpu_mips_store_count(env
, 1);
20660 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20663 /* Only TC0 on VPE 0 starts as active. */
20664 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20665 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20666 env
->tcs
[i
].CP0_TCHalt
= 1;
20668 env
->active_tc
.CP0_TCHalt
= 1;
20671 if (cs
->cpu_index
== 0) {
20672 /* VPE0 starts up enabled. */
20673 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20674 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20676 /* TC0 starts up unhalted. */
20678 env
->active_tc
.CP0_TCHalt
= 0;
20679 env
->tcs
[0].CP0_TCHalt
= 0;
20680 /* With thread 0 active. */
20681 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20682 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20687 * Configure default legacy segmentation control. We use this regardless of
20688 * whether segmentation control is presented to the guest.
20690 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20691 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20692 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20693 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20694 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20695 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20697 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20698 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20699 (3 << CP0SC_C
)) << 16;
20700 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20701 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20702 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20703 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20704 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20705 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20706 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20707 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20709 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20710 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20711 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20712 env
->CP0_Status
|= (1 << CP0St_FR
);
20715 if (env
->CP0_Config3
& (1 << CP0C3_ISA
)) {
20716 /* microMIPS on reset when Config3.ISA == {1, 3} */
20717 env
->hflags
|= MIPS_HFLAG_M16
;
20721 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20725 compute_hflags(env
);
20726 restore_fp_status(env
);
20727 restore_pamask(env
);
20728 cs
->exception_index
= EXCP_NONE
;
20730 if (semihosting_get_argc()) {
20731 /* UHI interface can be used to obtain argc and argv */
20732 env
->active_tc
.gpr
[4] = -1;
20736 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20737 target_ulong
*data
)
20739 env
->active_tc
.PC
= data
[0];
20740 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20741 env
->hflags
|= data
[1];
20742 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20743 case MIPS_HFLAG_BR
:
20745 case MIPS_HFLAG_BC
:
20746 case MIPS_HFLAG_BL
:
20748 env
->btarget
= data
[2];