2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
37 #define MIPS_DEBUG_DISAS 0
39 /* MIPS major opcodes */
40 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
43 /* indirect opcode tables */
44 OPC_SPECIAL
= (0x00 << 26),
45 OPC_REGIMM
= (0x01 << 26),
46 OPC_CP0
= (0x10 << 26),
47 OPC_CP1
= (0x11 << 26),
48 OPC_CP2
= (0x12 << 26),
49 OPC_CP3
= (0x13 << 26),
50 OPC_SPECIAL2
= (0x1C << 26),
51 OPC_SPECIAL3
= (0x1F << 26),
52 /* arithmetic with immediate */
53 OPC_ADDI
= (0x08 << 26),
54 OPC_ADDIU
= (0x09 << 26),
55 OPC_SLTI
= (0x0A << 26),
56 OPC_SLTIU
= (0x0B << 26),
57 /* logic with immediate */
58 OPC_ANDI
= (0x0C << 26),
59 OPC_ORI
= (0x0D << 26),
60 OPC_XORI
= (0x0E << 26),
61 OPC_LUI
= (0x0F << 26),
62 /* arithmetic with immediate */
63 OPC_DADDI
= (0x18 << 26),
64 OPC_DADDIU
= (0x19 << 26),
65 /* Jump and branches */
67 OPC_JAL
= (0x03 << 26),
68 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
69 OPC_BEQL
= (0x14 << 26),
70 OPC_BNE
= (0x05 << 26),
71 OPC_BNEL
= (0x15 << 26),
72 OPC_BLEZ
= (0x06 << 26),
73 OPC_BLEZL
= (0x16 << 26),
74 OPC_BGTZ
= (0x07 << 26),
75 OPC_BGTZL
= (0x17 << 26),
76 OPC_JALX
= (0x1D << 26),
77 OPC_DAUI
= (0x1D << 26),
79 OPC_LDL
= (0x1A << 26),
80 OPC_LDR
= (0x1B << 26),
81 OPC_LB
= (0x20 << 26),
82 OPC_LH
= (0x21 << 26),
83 OPC_LWL
= (0x22 << 26),
84 OPC_LW
= (0x23 << 26),
85 OPC_LWPC
= OPC_LW
| 0x5,
86 OPC_LBU
= (0x24 << 26),
87 OPC_LHU
= (0x25 << 26),
88 OPC_LWR
= (0x26 << 26),
89 OPC_LWU
= (0x27 << 26),
90 OPC_SB
= (0x28 << 26),
91 OPC_SH
= (0x29 << 26),
92 OPC_SWL
= (0x2A << 26),
93 OPC_SW
= (0x2B << 26),
94 OPC_SDL
= (0x2C << 26),
95 OPC_SDR
= (0x2D << 26),
96 OPC_SWR
= (0x2E << 26),
97 OPC_LL
= (0x30 << 26),
98 OPC_LLD
= (0x34 << 26),
99 OPC_LD
= (0x37 << 26),
100 OPC_LDPC
= OPC_LD
| 0x5,
101 OPC_SC
= (0x38 << 26),
102 OPC_SCD
= (0x3C << 26),
103 OPC_SD
= (0x3F << 26),
104 /* Floating point load/store */
105 OPC_LWC1
= (0x31 << 26),
106 OPC_LWC2
= (0x32 << 26),
107 OPC_LDC1
= (0x35 << 26),
108 OPC_LDC2
= (0x36 << 26),
109 OPC_SWC1
= (0x39 << 26),
110 OPC_SWC2
= (0x3A << 26),
111 OPC_SDC1
= (0x3D << 26),
112 OPC_SDC2
= (0x3E << 26),
113 /* Compact Branches */
114 OPC_BLEZALC
= (0x06 << 26),
115 OPC_BGEZALC
= (0x06 << 26),
116 OPC_BGEUC
= (0x06 << 26),
117 OPC_BGTZALC
= (0x07 << 26),
118 OPC_BLTZALC
= (0x07 << 26),
119 OPC_BLTUC
= (0x07 << 26),
120 OPC_BOVC
= (0x08 << 26),
121 OPC_BEQZALC
= (0x08 << 26),
122 OPC_BEQC
= (0x08 << 26),
123 OPC_BLEZC
= (0x16 << 26),
124 OPC_BGEZC
= (0x16 << 26),
125 OPC_BGEC
= (0x16 << 26),
126 OPC_BGTZC
= (0x17 << 26),
127 OPC_BLTZC
= (0x17 << 26),
128 OPC_BLTC
= (0x17 << 26),
129 OPC_BNVC
= (0x18 << 26),
130 OPC_BNEZALC
= (0x18 << 26),
131 OPC_BNEC
= (0x18 << 26),
132 OPC_BC
= (0x32 << 26),
133 OPC_BEQZC
= (0x36 << 26),
134 OPC_JIC
= (0x36 << 26),
135 OPC_BALC
= (0x3A << 26),
136 OPC_BNEZC
= (0x3E << 26),
137 OPC_JIALC
= (0x3E << 26),
138 /* MDMX ASE specific */
139 OPC_MDMX
= (0x1E << 26),
140 /* MSA ASE, same as MDMX */
142 /* Cache and prefetch */
143 OPC_CACHE
= (0x2F << 26),
144 OPC_PREF
= (0x33 << 26),
145 /* PC-relative address computation / loads */
146 OPC_PCREL
= (0x3B << 26),
149 /* PC-relative address computation / loads */
150 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
151 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
153 /* Instructions determined by bits 19 and 20 */
154 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
155 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
156 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
158 /* Instructions determined by bits 16 ... 20 */
159 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
160 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
163 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
166 /* MIPS special opcodes */
167 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
171 OPC_SLL
= 0x00 | OPC_SPECIAL
,
172 /* NOP is SLL r0, r0, 0 */
173 /* SSNOP is SLL r0, r0, 1 */
174 /* EHB is SLL r0, r0, 3 */
175 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
176 OPC_ROTR
= OPC_SRL
| (1 << 21),
177 OPC_SRA
= 0x03 | OPC_SPECIAL
,
178 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
179 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
180 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
181 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
182 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
183 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
184 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
185 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
186 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
187 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
188 OPC_DROTR
= OPC_DSRL
| (1 << 21),
189 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
190 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
191 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
192 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
193 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
194 /* Multiplication / division */
195 OPC_MULT
= 0x18 | OPC_SPECIAL
,
196 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
197 OPC_DIV
= 0x1A | OPC_SPECIAL
,
198 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
199 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
200 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
201 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
202 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
204 /* 2 registers arithmetic / logic */
205 OPC_ADD
= 0x20 | OPC_SPECIAL
,
206 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
207 OPC_SUB
= 0x22 | OPC_SPECIAL
,
208 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
209 OPC_AND
= 0x24 | OPC_SPECIAL
,
210 OPC_OR
= 0x25 | OPC_SPECIAL
,
211 OPC_XOR
= 0x26 | OPC_SPECIAL
,
212 OPC_NOR
= 0x27 | OPC_SPECIAL
,
213 OPC_SLT
= 0x2A | OPC_SPECIAL
,
214 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
215 OPC_DADD
= 0x2C | OPC_SPECIAL
,
216 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
217 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
218 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
220 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
221 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
223 OPC_TGE
= 0x30 | OPC_SPECIAL
,
224 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
225 OPC_TLT
= 0x32 | OPC_SPECIAL
,
226 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
227 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
228 OPC_TNE
= 0x36 | OPC_SPECIAL
,
229 /* HI / LO registers load & stores */
230 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
231 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
232 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
233 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
234 /* Conditional moves */
235 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
236 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
238 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
239 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
241 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
244 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
245 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
246 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
247 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
248 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
250 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
251 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
252 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
253 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
256 /* R6 Multiply and Divide instructions have the same Opcode
257 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
258 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
261 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
262 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
263 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
264 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
265 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
266 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
267 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
268 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
270 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
271 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
272 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
273 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
274 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
275 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
276 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
277 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
279 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
280 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
281 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
282 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
283 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
285 OPC_LSA
= 0x05 | OPC_SPECIAL
,
286 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
289 /* Multiplication variants of the vr54xx. */
290 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
293 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
294 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
295 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
296 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
298 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
300 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
302 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
303 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
304 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
305 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
306 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
309 /* REGIMM (rt field) opcodes */
310 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
313 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
314 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
315 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
316 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
317 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
318 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
319 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
320 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
321 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
322 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
323 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
324 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
325 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
326 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
327 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
328 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
330 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
331 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
334 /* Special2 opcodes */
335 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
338 /* Multiply & xxx operations */
339 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
340 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
341 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
342 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
343 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
345 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
346 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
347 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
348 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
349 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
350 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
351 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
352 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
353 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
354 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
355 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
356 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
358 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
359 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
360 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
361 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
363 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
366 /* Special3 opcodes */
367 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
370 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
371 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
372 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
373 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
374 OPC_INS
= 0x04 | OPC_SPECIAL3
,
375 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
376 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
377 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
378 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
379 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
380 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
381 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
382 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
385 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
386 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
387 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
388 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
389 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
390 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
391 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
392 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
393 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
394 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
395 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
396 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
399 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
400 /* MIPS DSP Arithmetic */
401 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
402 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
403 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
405 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
406 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
407 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
408 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
409 /* MIPS DSP GPR-Based Shift Sub-class */
410 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
411 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
412 /* MIPS DSP Multiply Sub-class insns */
413 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
414 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
415 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
416 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
417 /* DSP Bit/Manipulation Sub-class */
418 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
419 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
420 /* MIPS DSP Append Sub-class */
421 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
422 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
423 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
424 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
425 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
428 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
429 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
430 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
431 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
432 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
433 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
437 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
440 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
441 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
442 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
443 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
444 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
445 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
449 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
452 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
453 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
454 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
455 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
456 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
459 /* MIPS DSP REGIMM opcodes */
461 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
462 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
465 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
468 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
469 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
470 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
471 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
474 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
476 /* MIPS DSP Arithmetic Sub-class */
477 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
478 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
484 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
491 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
495 /* MIPS DSP Multiply Sub-class insns */
496 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
497 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
504 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
505 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
509 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
520 /* MIPS DSP Multiply Sub-class insns */
521 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
527 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
529 /* MIPS DSP Arithmetic Sub-class */
530 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
531 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
543 /* DSP Bit/Manipulation Sub-class */
544 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
551 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
553 /* MIPS DSP Arithmetic Sub-class */
554 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
555 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
561 /* DSP Compare-Pick Sub-class */
562 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
579 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
581 /* MIPS DSP GPR-Based Shift Sub-class */
582 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
583 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
606 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
608 /* MIPS DSP Multiply Sub-class insns */
609 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
610 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
633 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
635 /* DSP Bit/Manipulation Sub-class */
636 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
639 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
641 /* MIPS DSP Append Sub-class */
642 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
643 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
644 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
647 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
649 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
650 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
651 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
662 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
664 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
665 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
666 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
669 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
671 /* MIPS DSP Arithmetic Sub-class */
672 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
673 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
689 /* DSP Bit/Manipulation Sub-class */
690 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
698 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
700 /* MIPS DSP Multiply Sub-class insns */
701 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
702 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
706 /* MIPS DSP Arithmetic Sub-class */
707 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
718 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
730 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
732 /* DSP Compare-Pick Sub-class */
733 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
734 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
752 /* MIPS DSP Arithmetic Sub-class */
753 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
763 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
765 /* DSP Append Sub-class */
766 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
767 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
769 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
772 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
774 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
775 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
776 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
798 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
800 /* DSP Bit/Manipulation Sub-class */
801 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
804 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
806 /* MIPS DSP Multiply Sub-class insns */
807 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
808 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
835 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP GPR-Based Shift Sub-class */
838 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
839 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
866 /* Coprocessor 0 (rs field) */
867 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
870 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
871 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
872 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
873 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
874 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
875 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
876 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
877 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
878 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
879 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
880 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
881 OPC_C0
= (0x10 << 21) | OPC_CP0
,
882 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
887 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
890 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
891 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
893 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
894 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
895 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 /* Coprocessor 0 (with rs == C0) */
899 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
902 OPC_TLBR
= 0x01 | OPC_C0
,
903 OPC_TLBWI
= 0x02 | OPC_C0
,
904 OPC_TLBINV
= 0x03 | OPC_C0
,
905 OPC_TLBINVF
= 0x04 | OPC_C0
,
906 OPC_TLBWR
= 0x06 | OPC_C0
,
907 OPC_TLBP
= 0x08 | OPC_C0
,
908 OPC_RFE
= 0x10 | OPC_C0
,
909 OPC_ERET
= 0x18 | OPC_C0
,
910 OPC_DERET
= 0x1F | OPC_C0
,
911 OPC_WAIT
= 0x20 | OPC_C0
,
914 /* Coprocessor 1 (rs field) */
915 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
917 /* Values for the fmt field in FP instructions */
919 /* 0 - 15 are reserved */
920 FMT_S
= 16, /* single fp */
921 FMT_D
= 17, /* double fp */
922 FMT_E
= 18, /* extended fp */
923 FMT_Q
= 19, /* quad fp */
924 FMT_W
= 20, /* 32-bit fixed */
925 FMT_L
= 21, /* 64-bit fixed */
926 FMT_PS
= 22, /* paired single fp */
927 /* 23 - 31 are reserved */
931 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
932 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
933 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
934 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
935 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
936 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
937 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
938 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
939 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
940 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
941 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
942 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
943 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
944 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
945 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
946 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
947 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
948 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
949 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
950 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
951 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
952 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
953 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
954 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
955 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
956 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
957 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
958 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
959 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
960 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
963 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
964 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
967 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
968 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
969 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
970 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
974 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
975 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
979 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
980 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
983 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
986 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
987 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
988 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
989 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
990 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
991 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
992 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
993 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
994 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
995 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
996 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
999 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1002 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1003 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1012 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1021 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1022 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1023 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1024 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1025 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1026 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1029 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1030 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1039 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1046 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1053 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1060 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1067 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1074 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1081 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1088 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1096 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1099 OPC_LWXC1
= 0x00 | OPC_CP3
,
1100 OPC_LDXC1
= 0x01 | OPC_CP3
,
1101 OPC_LUXC1
= 0x05 | OPC_CP3
,
1102 OPC_SWXC1
= 0x08 | OPC_CP3
,
1103 OPC_SDXC1
= 0x09 | OPC_CP3
,
1104 OPC_SUXC1
= 0x0D | OPC_CP3
,
1105 OPC_PREFX
= 0x0F | OPC_CP3
,
1106 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1107 OPC_MADD_S
= 0x20 | OPC_CP3
,
1108 OPC_MADD_D
= 0x21 | OPC_CP3
,
1109 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1110 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1111 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1112 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1113 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1114 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1115 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1116 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1117 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1118 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1122 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1124 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1125 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1126 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1127 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1128 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1129 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1130 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1131 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1132 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1133 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1134 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1135 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1136 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1137 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1138 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1139 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1140 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1141 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1142 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1143 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1144 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1146 /* MI10 instruction */
1147 OPC_LD_B
= (0x20) | OPC_MSA
,
1148 OPC_LD_H
= (0x21) | OPC_MSA
,
1149 OPC_LD_W
= (0x22) | OPC_MSA
,
1150 OPC_LD_D
= (0x23) | OPC_MSA
,
1151 OPC_ST_B
= (0x24) | OPC_MSA
,
1152 OPC_ST_H
= (0x25) | OPC_MSA
,
1153 OPC_ST_W
= (0x26) | OPC_MSA
,
1154 OPC_ST_D
= (0x27) | OPC_MSA
,
1158 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1159 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1160 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1161 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1162 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1163 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1164 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1165 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1166 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1168 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1169 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1170 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1172 /* I8 instruction */
1173 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1174 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1175 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1176 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1179 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1180 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1182 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1184 /* VEC/2R/2RF instruction */
1185 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1186 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1187 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1188 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1189 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1190 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1191 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1193 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1194 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1196 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1197 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1198 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1199 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1200 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1202 /* 2RF instruction df(bit 16) = _w, _d */
1203 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1204 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1205 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1206 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1207 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1208 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1209 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1210 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1211 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1212 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1213 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1214 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1215 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1216 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1217 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1218 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1220 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1221 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1222 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1223 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1224 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1225 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1226 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1227 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1228 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1229 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1230 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1231 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1232 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1233 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1234 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1235 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1236 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1237 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1238 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1239 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1240 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1241 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1242 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1243 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1244 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1245 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1246 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1247 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1248 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1249 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1250 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1251 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1252 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1253 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1254 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1255 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1256 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1257 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1258 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1259 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1260 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1261 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1262 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1263 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1264 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1265 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1266 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1267 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1268 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1269 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1270 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1271 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1272 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1273 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1274 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1275 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1276 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1277 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1278 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1279 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1280 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1281 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1282 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1283 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1285 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1286 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1287 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1288 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1289 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1290 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1292 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 /* 3RF instruction _df(bit 21) = _w, _d */
1297 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1298 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1299 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1300 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1301 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1302 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1316 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1320 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1322 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1327 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1328 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1329 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1339 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1340 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1341 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1342 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1343 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1344 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1347 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1348 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1349 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1350 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1351 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1354 /* global register indices */
1355 static TCGv_ptr cpu_env
;
1356 static TCGv cpu_gpr
[32], cpu_PC
;
1357 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1358 static TCGv cpu_dspctrl
, btarget
, bcond
;
1359 static TCGv_i32 hflags
;
1360 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1361 static TCGv_i64 fpu_f64
[32];
1362 static TCGv_i64 msa_wr_d
[64];
1364 #include "exec/gen-icount.h"
1366 #define gen_helper_0e0i(name, arg) do { \
1367 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1368 gen_helper_##name(cpu_env, helper_tmp); \
1369 tcg_temp_free_i32(helper_tmp); \
1372 #define gen_helper_0e1i(name, arg1, arg2) do { \
1373 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1374 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1375 tcg_temp_free_i32(helper_tmp); \
1378 #define gen_helper_1e0i(name, ret, arg1) do { \
1379 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1380 gen_helper_##name(ret, cpu_env, helper_tmp); \
1381 tcg_temp_free_i32(helper_tmp); \
1384 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1385 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1386 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1387 tcg_temp_free_i32(helper_tmp); \
1390 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1391 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1392 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1393 tcg_temp_free_i32(helper_tmp); \
1396 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1397 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1398 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1399 tcg_temp_free_i32(helper_tmp); \
1402 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1403 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1404 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1405 tcg_temp_free_i32(helper_tmp); \
1408 typedef struct DisasContext
{
1409 struct TranslationBlock
*tb
;
1410 target_ulong pc
, saved_pc
;
1412 int singlestep_enabled
;
1414 int32_t CP0_Config1
;
1415 /* Routine used to access memory */
1417 TCGMemOp default_tcg_memop_mask
;
1418 uint32_t hflags
, saved_hflags
;
1420 target_ulong btarget
;
1429 int CP0_LLAddr_shift
;
1434 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1435 * exception condition */
1436 BS_STOP
= 1, /* We want to stop translation for any reason */
1437 BS_BRANCH
= 2, /* We reached a branch condition */
1438 BS_EXCP
= 3, /* We reached an exception condition */
1441 static const char * const regnames
[] = {
1442 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1443 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1444 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1445 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1448 static const char * const regnames_HI
[] = {
1449 "HI0", "HI1", "HI2", "HI3",
1452 static const char * const regnames_LO
[] = {
1453 "LO0", "LO1", "LO2", "LO3",
1456 static const char * const fregnames
[] = {
1457 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1458 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1459 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1460 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1463 static const char * const msaregnames
[] = {
1464 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1465 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1466 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1467 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1468 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1469 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1470 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1471 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1472 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1473 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1474 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1475 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1476 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1477 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1478 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1479 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1482 #define LOG_DISAS(...) \
1484 if (MIPS_DEBUG_DISAS) { \
1485 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1489 #define MIPS_INVAL(op) \
1491 if (MIPS_DEBUG_DISAS) { \
1492 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1493 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1494 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1495 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1499 /* General purpose registers moves. */
1500 static inline void gen_load_gpr (TCGv t
, int reg
)
1503 tcg_gen_movi_tl(t
, 0);
1505 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1508 static inline void gen_store_gpr (TCGv t
, int reg
)
1511 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1514 /* Moves to/from shadow registers. */
1515 static inline void gen_load_srsgpr (int from
, int to
)
1517 TCGv t0
= tcg_temp_new();
1520 tcg_gen_movi_tl(t0
, 0);
1522 TCGv_i32 t2
= tcg_temp_new_i32();
1523 TCGv_ptr addr
= tcg_temp_new_ptr();
1525 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1526 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1527 tcg_gen_andi_i32(t2
, t2
, 0xf);
1528 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1529 tcg_gen_ext_i32_ptr(addr
, t2
);
1530 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1532 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1533 tcg_temp_free_ptr(addr
);
1534 tcg_temp_free_i32(t2
);
1536 gen_store_gpr(t0
, to
);
1540 static inline void gen_store_srsgpr (int from
, int to
)
1543 TCGv t0
= tcg_temp_new();
1544 TCGv_i32 t2
= tcg_temp_new_i32();
1545 TCGv_ptr addr
= tcg_temp_new_ptr();
1547 gen_load_gpr(t0
, from
);
1548 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1549 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1550 tcg_gen_andi_i32(t2
, t2
, 0xf);
1551 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1552 tcg_gen_ext_i32_ptr(addr
, t2
);
1553 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1555 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1556 tcg_temp_free_ptr(addr
);
1557 tcg_temp_free_i32(t2
);
1563 static inline void gen_save_pc(target_ulong pc
)
1565 tcg_gen_movi_tl(cpu_PC
, pc
);
1568 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1570 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1571 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1572 gen_save_pc(ctx
->pc
);
1573 ctx
->saved_pc
= ctx
->pc
;
1575 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1576 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1577 ctx
->saved_hflags
= ctx
->hflags
;
1578 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1584 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1590 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1592 ctx
->saved_hflags
= ctx
->hflags
;
1593 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1599 ctx
->btarget
= env
->btarget
;
1604 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1606 TCGv_i32 texcp
= tcg_const_i32(excp
);
1607 TCGv_i32 terr
= tcg_const_i32(err
);
1608 save_cpu_state(ctx
, 1);
1609 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1610 tcg_temp_free_i32(terr
);
1611 tcg_temp_free_i32(texcp
);
1612 ctx
->bstate
= BS_EXCP
;
1615 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1617 gen_helper_0e0i(raise_exception
, excp
);
1620 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1622 generate_exception_err(ctx
, excp
, 0);
1625 /* Floating point register moves. */
1626 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1628 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1629 generate_exception(ctx
, EXCP_RI
);
1631 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1634 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1637 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1638 generate_exception(ctx
, EXCP_RI
);
1640 t64
= tcg_temp_new_i64();
1641 tcg_gen_extu_i32_i64(t64
, t
);
1642 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1643 tcg_temp_free_i64(t64
);
1646 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1648 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1649 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1651 gen_load_fpr32(ctx
, t
, reg
| 1);
1655 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1657 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1658 TCGv_i64 t64
= tcg_temp_new_i64();
1659 tcg_gen_extu_i32_i64(t64
, t
);
1660 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1661 tcg_temp_free_i64(t64
);
1663 gen_store_fpr32(ctx
, t
, reg
| 1);
1667 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1669 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1670 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1672 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1676 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1682 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1683 t0
= tcg_temp_new_i64();
1684 tcg_gen_shri_i64(t0
, t
, 32);
1685 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1686 tcg_temp_free_i64(t0
);
1690 static inline int get_fp_bit (int cc
)
1698 /* Addresses computation */
1699 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1701 tcg_gen_add_tl(ret
, arg0
, arg1
);
1703 #if defined(TARGET_MIPS64)
1704 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1705 tcg_gen_ext32s_i64(ret
, ret
);
1710 /* Addresses computation (translation time) */
1711 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1714 target_long sum
= base
+ offset
;
1716 #if defined(TARGET_MIPS64)
1717 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1724 /* Sign-extract the low 32-bits to a target_long. */
1725 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1727 #if defined(TARGET_MIPS64)
1728 tcg_gen_ext32s_i64(ret
, arg
);
1730 tcg_gen_extrl_i64_i32(ret
, arg
);
1734 /* Sign-extract the high 32-bits to a target_long. */
1735 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1737 #if defined(TARGET_MIPS64)
1738 tcg_gen_sari_i64(ret
, arg
, 32);
1740 tcg_gen_extrh_i64_i32(ret
, arg
);
1744 static inline void check_cp0_enabled(DisasContext
*ctx
)
1746 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1747 generate_exception_err(ctx
, EXCP_CpU
, 0);
1750 static inline void check_cp1_enabled(DisasContext
*ctx
)
1752 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1753 generate_exception_err(ctx
, EXCP_CpU
, 1);
1756 /* Verify that the processor is running with COP1X instructions enabled.
1757 This is associated with the nabla symbol in the MIPS32 and MIPS64
1760 static inline void check_cop1x(DisasContext
*ctx
)
1762 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1763 generate_exception_end(ctx
, EXCP_RI
);
1766 /* Verify that the processor is running with 64-bit floating-point
1767 operations enabled. */
1769 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1771 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1772 generate_exception_end(ctx
, EXCP_RI
);
1776 * Verify if floating point register is valid; an operation is not defined
1777 * if bit 0 of any register specification is set and the FR bit in the
1778 * Status register equals zero, since the register numbers specify an
1779 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1780 * in the Status register equals one, both even and odd register numbers
1781 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1783 * Multiple 64 bit wide registers can be checked by calling
1784 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1786 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1788 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1789 generate_exception_end(ctx
, EXCP_RI
);
1792 /* Verify that the processor is running with DSP instructions enabled.
1793 This is enabled by CP0 Status register MX(24) bit.
1796 static inline void check_dsp(DisasContext
*ctx
)
1798 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1799 if (ctx
->insn_flags
& ASE_DSP
) {
1800 generate_exception_end(ctx
, EXCP_DSPDIS
);
1802 generate_exception_end(ctx
, EXCP_RI
);
1807 static inline void check_dspr2(DisasContext
*ctx
)
1809 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1810 if (ctx
->insn_flags
& ASE_DSP
) {
1811 generate_exception_end(ctx
, EXCP_DSPDIS
);
1813 generate_exception_end(ctx
, EXCP_RI
);
1818 /* This code generates a "reserved instruction" exception if the
1819 CPU does not support the instruction set corresponding to flags. */
1820 static inline void check_insn(DisasContext
*ctx
, int flags
)
1822 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1823 generate_exception_end(ctx
, EXCP_RI
);
1827 /* This code generates a "reserved instruction" exception if the
1828 CPU has corresponding flag set which indicates that the instruction
1829 has been removed. */
1830 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1832 if (unlikely(ctx
->insn_flags
& flags
)) {
1833 generate_exception_end(ctx
, EXCP_RI
);
1837 /* This code generates a "reserved instruction" exception if the
1838 CPU does not support 64-bit paired-single (PS) floating point data type */
1839 static inline void check_ps(DisasContext
*ctx
)
1841 if (unlikely(!ctx
->ps
)) {
1842 generate_exception(ctx
, EXCP_RI
);
1844 check_cp1_64bitmode(ctx
);
1847 #ifdef TARGET_MIPS64
1848 /* This code generates a "reserved instruction" exception if 64-bit
1849 instructions are not enabled. */
1850 static inline void check_mips_64(DisasContext
*ctx
)
1852 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1853 generate_exception_end(ctx
, EXCP_RI
);
1857 #ifndef CONFIG_USER_ONLY
1858 static inline void check_mvh(DisasContext
*ctx
)
1860 if (unlikely(!ctx
->mvh
)) {
1861 generate_exception(ctx
, EXCP_RI
);
1866 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1867 calling interface for 32 and 64-bit FPRs. No sense in changing
1868 all callers for gen_load_fpr32 when we need the CTX parameter for
1870 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1871 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1872 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1873 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1874 int ft, int fs, int cc) \
1876 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1877 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1886 check_cp1_registers(ctx, fs | ft); \
1894 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1895 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1897 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1898 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1899 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1900 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1901 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1902 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1903 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1904 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1905 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1906 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1907 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1908 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1909 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1910 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1911 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1912 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1915 tcg_temp_free_i##bits (fp0); \
1916 tcg_temp_free_i##bits (fp1); \
1919 FOP_CONDS(, 0, d
, FMT_D
, 64)
1920 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1921 FOP_CONDS(, 0, s
, FMT_S
, 32)
1922 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1923 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1924 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1927 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1928 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1929 int ft, int fs, int fd) \
1931 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1932 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1933 if (ifmt == FMT_D) { \
1934 check_cp1_registers(ctx, fs | ft | fd); \
1936 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1937 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1940 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1943 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1946 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1949 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2009 tcg_temp_free_i ## bits (fp0); \
2010 tcg_temp_free_i ## bits (fp1); \
2013 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2014 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2016 #undef gen_ldcmp_fpr32
2017 #undef gen_ldcmp_fpr64
2019 /* load/store instructions. */
2020 #ifdef CONFIG_USER_ONLY
2021 #define OP_LD_ATOMIC(insn,fname) \
2022 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2024 TCGv t0 = tcg_temp_new(); \
2025 tcg_gen_mov_tl(t0, arg1); \
2026 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2027 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2028 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2029 tcg_temp_free(t0); \
2032 #define OP_LD_ATOMIC(insn,fname) \
2033 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2035 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2038 OP_LD_ATOMIC(ll
,ld32s
);
2039 #if defined(TARGET_MIPS64)
2040 OP_LD_ATOMIC(lld
,ld64
);
2044 #ifdef CONFIG_USER_ONLY
2045 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2046 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2048 TCGv t0 = tcg_temp_new(); \
2049 TCGLabel *l1 = gen_new_label(); \
2050 TCGLabel *l2 = gen_new_label(); \
2052 tcg_gen_andi_tl(t0, arg2, almask); \
2053 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2054 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2055 generate_exception(ctx, EXCP_AdES); \
2056 gen_set_label(l1); \
2057 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2058 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2059 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2060 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2061 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2062 generate_exception_end(ctx, EXCP_SC); \
2063 gen_set_label(l2); \
2064 tcg_gen_movi_tl(t0, 0); \
2065 gen_store_gpr(t0, rt); \
2066 tcg_temp_free(t0); \
2069 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2070 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2072 TCGv t0 = tcg_temp_new(); \
2073 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2074 gen_store_gpr(t0, rt); \
2075 tcg_temp_free(t0); \
2078 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2079 #if defined(TARGET_MIPS64)
2080 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2084 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2085 int base
, int16_t offset
)
2088 tcg_gen_movi_tl(addr
, offset
);
2089 } else if (offset
== 0) {
2090 gen_load_gpr(addr
, base
);
2092 tcg_gen_movi_tl(addr
, offset
);
2093 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2097 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2099 target_ulong pc
= ctx
->pc
;
2101 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2102 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2107 pc
&= ~(target_ulong
)3;
2112 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2113 int rt
, int base
, int16_t offset
)
2117 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2118 /* Loongson CPU uses a load to zero register for prefetch.
2119 We emulate it as a NOP. On other CPU we must perform the
2120 actual memory access. */
2124 t0
= tcg_temp_new();
2125 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2128 #if defined(TARGET_MIPS64)
2130 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2131 ctx
->default_tcg_memop_mask
);
2132 gen_store_gpr(t0
, rt
);
2135 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2136 ctx
->default_tcg_memop_mask
);
2137 gen_store_gpr(t0
, rt
);
2141 op_ld_lld(t0
, t0
, ctx
);
2142 gen_store_gpr(t0
, rt
);
2145 t1
= tcg_temp_new();
2146 /* Do a byte access to possibly trigger a page
2147 fault with the unaligned address. */
2148 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2149 tcg_gen_andi_tl(t1
, t0
, 7);
2150 #ifndef TARGET_WORDS_BIGENDIAN
2151 tcg_gen_xori_tl(t1
, t1
, 7);
2153 tcg_gen_shli_tl(t1
, t1
, 3);
2154 tcg_gen_andi_tl(t0
, t0
, ~7);
2155 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2156 tcg_gen_shl_tl(t0
, t0
, t1
);
2157 t2
= tcg_const_tl(-1);
2158 tcg_gen_shl_tl(t2
, t2
, t1
);
2159 gen_load_gpr(t1
, rt
);
2160 tcg_gen_andc_tl(t1
, t1
, t2
);
2162 tcg_gen_or_tl(t0
, t0
, t1
);
2164 gen_store_gpr(t0
, rt
);
2167 t1
= tcg_temp_new();
2168 /* Do a byte access to possibly trigger a page
2169 fault with the unaligned address. */
2170 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2171 tcg_gen_andi_tl(t1
, t0
, 7);
2172 #ifdef TARGET_WORDS_BIGENDIAN
2173 tcg_gen_xori_tl(t1
, t1
, 7);
2175 tcg_gen_shli_tl(t1
, t1
, 3);
2176 tcg_gen_andi_tl(t0
, t0
, ~7);
2177 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2178 tcg_gen_shr_tl(t0
, t0
, t1
);
2179 tcg_gen_xori_tl(t1
, t1
, 63);
2180 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2181 tcg_gen_shl_tl(t2
, t2
, t1
);
2182 gen_load_gpr(t1
, rt
);
2183 tcg_gen_and_tl(t1
, t1
, t2
);
2185 tcg_gen_or_tl(t0
, t0
, t1
);
2187 gen_store_gpr(t0
, rt
);
2190 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2191 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2193 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2194 gen_store_gpr(t0
, rt
);
2198 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2199 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2201 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2202 gen_store_gpr(t0
, rt
);
2205 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2206 ctx
->default_tcg_memop_mask
);
2207 gen_store_gpr(t0
, rt
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2211 ctx
->default_tcg_memop_mask
);
2212 gen_store_gpr(t0
, rt
);
2215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2216 ctx
->default_tcg_memop_mask
);
2217 gen_store_gpr(t0
, rt
);
2220 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2221 gen_store_gpr(t0
, rt
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2225 gen_store_gpr(t0
, rt
);
2228 t1
= tcg_temp_new();
2229 /* Do a byte access to possibly trigger a page
2230 fault with the unaligned address. */
2231 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2232 tcg_gen_andi_tl(t1
, t0
, 3);
2233 #ifndef TARGET_WORDS_BIGENDIAN
2234 tcg_gen_xori_tl(t1
, t1
, 3);
2236 tcg_gen_shli_tl(t1
, t1
, 3);
2237 tcg_gen_andi_tl(t0
, t0
, ~3);
2238 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2239 tcg_gen_shl_tl(t0
, t0
, t1
);
2240 t2
= tcg_const_tl(-1);
2241 tcg_gen_shl_tl(t2
, t2
, t1
);
2242 gen_load_gpr(t1
, rt
);
2243 tcg_gen_andc_tl(t1
, t1
, t2
);
2245 tcg_gen_or_tl(t0
, t0
, t1
);
2247 tcg_gen_ext32s_tl(t0
, t0
);
2248 gen_store_gpr(t0
, rt
);
2251 t1
= tcg_temp_new();
2252 /* Do a byte access to possibly trigger a page
2253 fault with the unaligned address. */
2254 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2255 tcg_gen_andi_tl(t1
, t0
, 3);
2256 #ifdef TARGET_WORDS_BIGENDIAN
2257 tcg_gen_xori_tl(t1
, t1
, 3);
2259 tcg_gen_shli_tl(t1
, t1
, 3);
2260 tcg_gen_andi_tl(t0
, t0
, ~3);
2261 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2262 tcg_gen_shr_tl(t0
, t0
, t1
);
2263 tcg_gen_xori_tl(t1
, t1
, 31);
2264 t2
= tcg_const_tl(0xfffffffeull
);
2265 tcg_gen_shl_tl(t2
, t2
, t1
);
2266 gen_load_gpr(t1
, rt
);
2267 tcg_gen_and_tl(t1
, t1
, t2
);
2269 tcg_gen_or_tl(t0
, t0
, t1
);
2271 tcg_gen_ext32s_tl(t0
, t0
);
2272 gen_store_gpr(t0
, rt
);
2276 op_ld_ll(t0
, t0
, ctx
);
2277 gen_store_gpr(t0
, rt
);
2284 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2285 int base
, int16_t offset
)
2287 TCGv t0
= tcg_temp_new();
2288 TCGv t1
= tcg_temp_new();
2290 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2291 gen_load_gpr(t1
, rt
);
2293 #if defined(TARGET_MIPS64)
2295 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2296 ctx
->default_tcg_memop_mask
);
2299 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2302 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2306 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2307 ctx
->default_tcg_memop_mask
);
2310 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2311 ctx
->default_tcg_memop_mask
);
2314 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2317 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2320 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2328 /* Store conditional */
2329 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2330 int base
, int16_t offset
)
2334 #ifdef CONFIG_USER_ONLY
2335 t0
= tcg_temp_local_new();
2336 t1
= tcg_temp_local_new();
2338 t0
= tcg_temp_new();
2339 t1
= tcg_temp_new();
2341 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2342 gen_load_gpr(t1
, rt
);
2344 #if defined(TARGET_MIPS64)
2347 op_st_scd(t1
, t0
, rt
, ctx
);
2352 op_st_sc(t1
, t0
, rt
, ctx
);
2359 /* Load and store */
2360 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2361 int base
, int16_t offset
)
2363 TCGv t0
= tcg_temp_new();
2365 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2366 /* Don't do NOP if destination is zero: we must perform the actual
2371 TCGv_i32 fp0
= tcg_temp_new_i32();
2372 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2373 ctx
->default_tcg_memop_mask
);
2374 gen_store_fpr32(ctx
, fp0
, ft
);
2375 tcg_temp_free_i32(fp0
);
2380 TCGv_i32 fp0
= tcg_temp_new_i32();
2381 gen_load_fpr32(ctx
, fp0
, ft
);
2382 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2383 ctx
->default_tcg_memop_mask
);
2384 tcg_temp_free_i32(fp0
);
2389 TCGv_i64 fp0
= tcg_temp_new_i64();
2390 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2391 ctx
->default_tcg_memop_mask
);
2392 gen_store_fpr64(ctx
, fp0
, ft
);
2393 tcg_temp_free_i64(fp0
);
2398 TCGv_i64 fp0
= tcg_temp_new_i64();
2399 gen_load_fpr64(ctx
, fp0
, ft
);
2400 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2401 ctx
->default_tcg_memop_mask
);
2402 tcg_temp_free_i64(fp0
);
2406 MIPS_INVAL("flt_ldst");
2407 generate_exception_end(ctx
, EXCP_RI
);
2414 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2415 int rs
, int16_t imm
)
2417 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2418 check_cp1_enabled(ctx
);
2422 check_insn(ctx
, ISA_MIPS2
);
2425 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2428 generate_exception_err(ctx
, EXCP_CpU
, 1);
2432 /* Arithmetic with immediate operand */
2433 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2434 int rt
, int rs
, int16_t imm
)
2436 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2438 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2439 /* If no destination, treat it as a NOP.
2440 For addi, we must generate the overflow exception when needed. */
2446 TCGv t0
= tcg_temp_local_new();
2447 TCGv t1
= tcg_temp_new();
2448 TCGv t2
= tcg_temp_new();
2449 TCGLabel
*l1
= gen_new_label();
2451 gen_load_gpr(t1
, rs
);
2452 tcg_gen_addi_tl(t0
, t1
, uimm
);
2453 tcg_gen_ext32s_tl(t0
, t0
);
2455 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2456 tcg_gen_xori_tl(t2
, t0
, uimm
);
2457 tcg_gen_and_tl(t1
, t1
, t2
);
2459 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2461 /* operands of same sign, result different sign */
2462 generate_exception(ctx
, EXCP_OVERFLOW
);
2464 tcg_gen_ext32s_tl(t0
, t0
);
2465 gen_store_gpr(t0
, rt
);
2471 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2472 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2474 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2477 #if defined(TARGET_MIPS64)
2480 TCGv t0
= tcg_temp_local_new();
2481 TCGv t1
= tcg_temp_new();
2482 TCGv t2
= tcg_temp_new();
2483 TCGLabel
*l1
= gen_new_label();
2485 gen_load_gpr(t1
, rs
);
2486 tcg_gen_addi_tl(t0
, t1
, uimm
);
2488 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2489 tcg_gen_xori_tl(t2
, t0
, uimm
);
2490 tcg_gen_and_tl(t1
, t1
, t2
);
2492 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2494 /* operands of same sign, result different sign */
2495 generate_exception(ctx
, EXCP_OVERFLOW
);
2497 gen_store_gpr(t0
, rt
);
2503 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2505 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2512 /* Logic with immediate operand */
2513 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2514 int rt
, int rs
, int16_t imm
)
2519 /* If no destination, treat it as a NOP. */
2522 uimm
= (uint16_t)imm
;
2525 if (likely(rs
!= 0))
2526 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2528 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2532 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2534 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2537 if (likely(rs
!= 0))
2538 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2540 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2543 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2545 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2546 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2548 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2557 /* Set on less than with immediate operand */
2558 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2559 int rt
, int rs
, int16_t imm
)
2561 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2565 /* If no destination, treat it as a NOP. */
2568 t0
= tcg_temp_new();
2569 gen_load_gpr(t0
, rs
);
2572 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2575 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2581 /* Shifts with immediate operand */
2582 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2583 int rt
, int rs
, int16_t imm
)
2585 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2589 /* If no destination, treat it as a NOP. */
2593 t0
= tcg_temp_new();
2594 gen_load_gpr(t0
, rs
);
2597 tcg_gen_shli_tl(t0
, t0
, uimm
);
2598 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2601 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2605 tcg_gen_ext32u_tl(t0
, t0
);
2606 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2608 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2613 TCGv_i32 t1
= tcg_temp_new_i32();
2615 tcg_gen_trunc_tl_i32(t1
, t0
);
2616 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2617 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2618 tcg_temp_free_i32(t1
);
2620 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2623 #if defined(TARGET_MIPS64)
2625 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2628 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2631 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2641 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2644 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2647 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2650 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2658 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2659 int rd
, int rs
, int rt
)
2661 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2662 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2663 /* If no destination, treat it as a NOP.
2664 For add & sub, we must generate the overflow exception when needed. */
2671 TCGv t0
= tcg_temp_local_new();
2672 TCGv t1
= tcg_temp_new();
2673 TCGv t2
= tcg_temp_new();
2674 TCGLabel
*l1
= gen_new_label();
2676 gen_load_gpr(t1
, rs
);
2677 gen_load_gpr(t2
, rt
);
2678 tcg_gen_add_tl(t0
, t1
, t2
);
2679 tcg_gen_ext32s_tl(t0
, t0
);
2680 tcg_gen_xor_tl(t1
, t1
, t2
);
2681 tcg_gen_xor_tl(t2
, t0
, t2
);
2682 tcg_gen_andc_tl(t1
, t2
, t1
);
2684 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2686 /* operands of same sign, result different sign */
2687 generate_exception(ctx
, EXCP_OVERFLOW
);
2689 gen_store_gpr(t0
, rd
);
2694 if (rs
!= 0 && rt
!= 0) {
2695 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2696 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2697 } else if (rs
== 0 && rt
!= 0) {
2698 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2699 } else if (rs
!= 0 && rt
== 0) {
2700 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2702 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2707 TCGv t0
= tcg_temp_local_new();
2708 TCGv t1
= tcg_temp_new();
2709 TCGv t2
= tcg_temp_new();
2710 TCGLabel
*l1
= gen_new_label();
2712 gen_load_gpr(t1
, rs
);
2713 gen_load_gpr(t2
, rt
);
2714 tcg_gen_sub_tl(t0
, t1
, t2
);
2715 tcg_gen_ext32s_tl(t0
, t0
);
2716 tcg_gen_xor_tl(t2
, t1
, t2
);
2717 tcg_gen_xor_tl(t1
, t0
, t1
);
2718 tcg_gen_and_tl(t1
, t1
, t2
);
2720 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2722 /* operands of different sign, first operand and result different sign */
2723 generate_exception(ctx
, EXCP_OVERFLOW
);
2725 gen_store_gpr(t0
, rd
);
2730 if (rs
!= 0 && rt
!= 0) {
2731 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2732 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2733 } else if (rs
== 0 && rt
!= 0) {
2734 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2735 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2736 } else if (rs
!= 0 && rt
== 0) {
2737 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2739 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2742 #if defined(TARGET_MIPS64)
2745 TCGv t0
= tcg_temp_local_new();
2746 TCGv t1
= tcg_temp_new();
2747 TCGv t2
= tcg_temp_new();
2748 TCGLabel
*l1
= gen_new_label();
2750 gen_load_gpr(t1
, rs
);
2751 gen_load_gpr(t2
, rt
);
2752 tcg_gen_add_tl(t0
, t1
, t2
);
2753 tcg_gen_xor_tl(t1
, t1
, t2
);
2754 tcg_gen_xor_tl(t2
, t0
, t2
);
2755 tcg_gen_andc_tl(t1
, t2
, t1
);
2757 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2759 /* operands of same sign, result different sign */
2760 generate_exception(ctx
, EXCP_OVERFLOW
);
2762 gen_store_gpr(t0
, rd
);
2767 if (rs
!= 0 && rt
!= 0) {
2768 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2769 } else if (rs
== 0 && rt
!= 0) {
2770 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2771 } else if (rs
!= 0 && rt
== 0) {
2772 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2774 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2779 TCGv t0
= tcg_temp_local_new();
2780 TCGv t1
= tcg_temp_new();
2781 TCGv t2
= tcg_temp_new();
2782 TCGLabel
*l1
= gen_new_label();
2784 gen_load_gpr(t1
, rs
);
2785 gen_load_gpr(t2
, rt
);
2786 tcg_gen_sub_tl(t0
, t1
, t2
);
2787 tcg_gen_xor_tl(t2
, t1
, t2
);
2788 tcg_gen_xor_tl(t1
, t0
, t1
);
2789 tcg_gen_and_tl(t1
, t1
, t2
);
2791 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2793 /* operands of different sign, first operand and result different sign */
2794 generate_exception(ctx
, EXCP_OVERFLOW
);
2796 gen_store_gpr(t0
, rd
);
2801 if (rs
!= 0 && rt
!= 0) {
2802 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2803 } else if (rs
== 0 && rt
!= 0) {
2804 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2805 } else if (rs
!= 0 && rt
== 0) {
2806 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2808 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2813 if (likely(rs
!= 0 && rt
!= 0)) {
2814 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2815 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2823 /* Conditional move */
2824 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2825 int rd
, int rs
, int rt
)
2830 /* If no destination, treat it as a NOP. */
2834 t0
= tcg_temp_new();
2835 gen_load_gpr(t0
, rt
);
2836 t1
= tcg_const_tl(0);
2837 t2
= tcg_temp_new();
2838 gen_load_gpr(t2
, rs
);
2841 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2844 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2847 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2850 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2859 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2860 int rd
, int rs
, int rt
)
2863 /* If no destination, treat it as a NOP. */
2869 if (likely(rs
!= 0 && rt
!= 0)) {
2870 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2872 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2876 if (rs
!= 0 && rt
!= 0) {
2877 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2878 } else if (rs
== 0 && rt
!= 0) {
2879 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2880 } else if (rs
!= 0 && rt
== 0) {
2881 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2883 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2887 if (likely(rs
!= 0 && rt
!= 0)) {
2888 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2889 } else if (rs
== 0 && rt
!= 0) {
2890 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2891 } else if (rs
!= 0 && rt
== 0) {
2892 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2894 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2898 if (likely(rs
!= 0 && rt
!= 0)) {
2899 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2900 } else if (rs
== 0 && rt
!= 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2902 } else if (rs
!= 0 && rt
== 0) {
2903 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2905 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2911 /* Set on lower than */
2912 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2913 int rd
, int rs
, int rt
)
2918 /* If no destination, treat it as a NOP. */
2922 t0
= tcg_temp_new();
2923 t1
= tcg_temp_new();
2924 gen_load_gpr(t0
, rs
);
2925 gen_load_gpr(t1
, rt
);
2928 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2931 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2939 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2940 int rd
, int rs
, int rt
)
2945 /* If no destination, treat it as a NOP.
2946 For add & sub, we must generate the overflow exception when needed. */
2950 t0
= tcg_temp_new();
2951 t1
= tcg_temp_new();
2952 gen_load_gpr(t0
, rs
);
2953 gen_load_gpr(t1
, rt
);
2956 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2957 tcg_gen_shl_tl(t0
, t1
, t0
);
2958 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2961 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2962 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2965 tcg_gen_ext32u_tl(t1
, t1
);
2966 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2967 tcg_gen_shr_tl(t0
, t1
, t0
);
2968 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2972 TCGv_i32 t2
= tcg_temp_new_i32();
2973 TCGv_i32 t3
= tcg_temp_new_i32();
2975 tcg_gen_trunc_tl_i32(t2
, t0
);
2976 tcg_gen_trunc_tl_i32(t3
, t1
);
2977 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2978 tcg_gen_rotr_i32(t2
, t3
, t2
);
2979 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2980 tcg_temp_free_i32(t2
);
2981 tcg_temp_free_i32(t3
);
2984 #if defined(TARGET_MIPS64)
2986 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2987 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2990 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2991 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2994 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2995 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2998 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2999 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3007 /* Arithmetic on HI/LO registers */
3008 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3010 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3021 #if defined(TARGET_MIPS64)
3023 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3027 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3031 #if defined(TARGET_MIPS64)
3033 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3037 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3042 #if defined(TARGET_MIPS64)
3044 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3048 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3051 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3056 #if defined(TARGET_MIPS64)
3058 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3062 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3065 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3071 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3074 TCGv t0
= tcg_const_tl(addr
);
3075 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3076 gen_store_gpr(t0
, reg
);
3080 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3086 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3089 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3090 addr
= addr_add(ctx
, pc
, offset
);
3091 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3095 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3096 addr
= addr_add(ctx
, pc
, offset
);
3097 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3099 #if defined(TARGET_MIPS64)
3102 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3103 addr
= addr_add(ctx
, pc
, offset
);
3104 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3108 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3111 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3112 addr
= addr_add(ctx
, pc
, offset
);
3113 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3118 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3119 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3120 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3123 #if defined(TARGET_MIPS64)
3124 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3125 case R6_OPC_LDPC
+ (1 << 16):
3126 case R6_OPC_LDPC
+ (2 << 16):
3127 case R6_OPC_LDPC
+ (3 << 16):
3129 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3130 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3131 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3135 MIPS_INVAL("OPC_PCREL");
3136 generate_exception_end(ctx
, EXCP_RI
);
3143 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3152 t0
= tcg_temp_new();
3153 t1
= tcg_temp_new();
3155 gen_load_gpr(t0
, rs
);
3156 gen_load_gpr(t1
, rt
);
3161 TCGv t2
= tcg_temp_new();
3162 TCGv t3
= tcg_temp_new();
3163 tcg_gen_ext32s_tl(t0
, t0
);
3164 tcg_gen_ext32s_tl(t1
, t1
);
3165 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3166 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3167 tcg_gen_and_tl(t2
, t2
, t3
);
3168 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3169 tcg_gen_or_tl(t2
, t2
, t3
);
3170 tcg_gen_movi_tl(t3
, 0);
3171 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3172 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3173 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3180 TCGv t2
= tcg_temp_new();
3181 TCGv t3
= tcg_temp_new();
3182 tcg_gen_ext32s_tl(t0
, t0
);
3183 tcg_gen_ext32s_tl(t1
, t1
);
3184 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3185 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3186 tcg_gen_and_tl(t2
, t2
, t3
);
3187 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3188 tcg_gen_or_tl(t2
, t2
, t3
);
3189 tcg_gen_movi_tl(t3
, 0);
3190 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3191 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3192 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3199 TCGv t2
= tcg_const_tl(0);
3200 TCGv t3
= tcg_const_tl(1);
3201 tcg_gen_ext32u_tl(t0
, t0
);
3202 tcg_gen_ext32u_tl(t1
, t1
);
3203 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3204 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3205 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3212 TCGv t2
= tcg_const_tl(0);
3213 TCGv t3
= tcg_const_tl(1);
3214 tcg_gen_ext32u_tl(t0
, t0
);
3215 tcg_gen_ext32u_tl(t1
, t1
);
3216 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3217 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3218 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3225 TCGv_i32 t2
= tcg_temp_new_i32();
3226 TCGv_i32 t3
= tcg_temp_new_i32();
3227 tcg_gen_trunc_tl_i32(t2
, t0
);
3228 tcg_gen_trunc_tl_i32(t3
, t1
);
3229 tcg_gen_mul_i32(t2
, t2
, t3
);
3230 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3231 tcg_temp_free_i32(t2
);
3232 tcg_temp_free_i32(t3
);
3237 TCGv_i32 t2
= tcg_temp_new_i32();
3238 TCGv_i32 t3
= tcg_temp_new_i32();
3239 tcg_gen_trunc_tl_i32(t2
, t0
);
3240 tcg_gen_trunc_tl_i32(t3
, t1
);
3241 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3242 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3243 tcg_temp_free_i32(t2
);
3244 tcg_temp_free_i32(t3
);
3249 TCGv_i32 t2
= tcg_temp_new_i32();
3250 TCGv_i32 t3
= tcg_temp_new_i32();
3251 tcg_gen_trunc_tl_i32(t2
, t0
);
3252 tcg_gen_trunc_tl_i32(t3
, t1
);
3253 tcg_gen_mul_i32(t2
, t2
, t3
);
3254 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3255 tcg_temp_free_i32(t2
);
3256 tcg_temp_free_i32(t3
);
3261 TCGv_i32 t2
= tcg_temp_new_i32();
3262 TCGv_i32 t3
= tcg_temp_new_i32();
3263 tcg_gen_trunc_tl_i32(t2
, t0
);
3264 tcg_gen_trunc_tl_i32(t3
, t1
);
3265 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3266 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3267 tcg_temp_free_i32(t2
);
3268 tcg_temp_free_i32(t3
);
3271 #if defined(TARGET_MIPS64)
3274 TCGv t2
= tcg_temp_new();
3275 TCGv t3
= tcg_temp_new();
3276 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3277 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3278 tcg_gen_and_tl(t2
, t2
, t3
);
3279 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3280 tcg_gen_or_tl(t2
, t2
, t3
);
3281 tcg_gen_movi_tl(t3
, 0);
3282 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3283 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3290 TCGv t2
= tcg_temp_new();
3291 TCGv t3
= tcg_temp_new();
3292 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3293 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3294 tcg_gen_and_tl(t2
, t2
, t3
);
3295 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3296 tcg_gen_or_tl(t2
, t2
, t3
);
3297 tcg_gen_movi_tl(t3
, 0);
3298 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3299 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3306 TCGv t2
= tcg_const_tl(0);
3307 TCGv t3
= tcg_const_tl(1);
3308 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3309 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3316 TCGv t2
= tcg_const_tl(0);
3317 TCGv t3
= tcg_const_tl(1);
3318 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3319 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3325 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3329 TCGv t2
= tcg_temp_new();
3330 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3335 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3339 TCGv t2
= tcg_temp_new();
3340 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3346 MIPS_INVAL("r6 mul/div");
3347 generate_exception_end(ctx
, EXCP_RI
);
3355 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3356 int acc
, int rs
, int rt
)
3360 t0
= tcg_temp_new();
3361 t1
= tcg_temp_new();
3363 gen_load_gpr(t0
, rs
);
3364 gen_load_gpr(t1
, rt
);
3373 TCGv t2
= tcg_temp_new();
3374 TCGv t3
= tcg_temp_new();
3375 tcg_gen_ext32s_tl(t0
, t0
);
3376 tcg_gen_ext32s_tl(t1
, t1
);
3377 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3378 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3379 tcg_gen_and_tl(t2
, t2
, t3
);
3380 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3381 tcg_gen_or_tl(t2
, t2
, t3
);
3382 tcg_gen_movi_tl(t3
, 0);
3383 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3384 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3385 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3386 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3387 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3394 TCGv t2
= tcg_const_tl(0);
3395 TCGv t3
= tcg_const_tl(1);
3396 tcg_gen_ext32u_tl(t0
, t0
);
3397 tcg_gen_ext32u_tl(t1
, t1
);
3398 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3399 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3400 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3401 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3402 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3409 TCGv_i32 t2
= tcg_temp_new_i32();
3410 TCGv_i32 t3
= tcg_temp_new_i32();
3411 tcg_gen_trunc_tl_i32(t2
, t0
);
3412 tcg_gen_trunc_tl_i32(t3
, t1
);
3413 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3414 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3415 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3416 tcg_temp_free_i32(t2
);
3417 tcg_temp_free_i32(t3
);
3422 TCGv_i32 t2
= tcg_temp_new_i32();
3423 TCGv_i32 t3
= tcg_temp_new_i32();
3424 tcg_gen_trunc_tl_i32(t2
, t0
);
3425 tcg_gen_trunc_tl_i32(t3
, t1
);
3426 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3427 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3428 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3429 tcg_temp_free_i32(t2
);
3430 tcg_temp_free_i32(t3
);
3433 #if defined(TARGET_MIPS64)
3436 TCGv t2
= tcg_temp_new();
3437 TCGv t3
= tcg_temp_new();
3438 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3439 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3440 tcg_gen_and_tl(t2
, t2
, t3
);
3441 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3442 tcg_gen_or_tl(t2
, t2
, t3
);
3443 tcg_gen_movi_tl(t3
, 0);
3444 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3445 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3446 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3453 TCGv t2
= tcg_const_tl(0);
3454 TCGv t3
= tcg_const_tl(1);
3455 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3456 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3457 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3463 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3466 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3471 TCGv_i64 t2
= tcg_temp_new_i64();
3472 TCGv_i64 t3
= tcg_temp_new_i64();
3474 tcg_gen_ext_tl_i64(t2
, t0
);
3475 tcg_gen_ext_tl_i64(t3
, t1
);
3476 tcg_gen_mul_i64(t2
, t2
, t3
);
3477 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3478 tcg_gen_add_i64(t2
, t2
, t3
);
3479 tcg_temp_free_i64(t3
);
3480 gen_move_low32(cpu_LO
[acc
], t2
);
3481 gen_move_high32(cpu_HI
[acc
], t2
);
3482 tcg_temp_free_i64(t2
);
3487 TCGv_i64 t2
= tcg_temp_new_i64();
3488 TCGv_i64 t3
= tcg_temp_new_i64();
3490 tcg_gen_ext32u_tl(t0
, t0
);
3491 tcg_gen_ext32u_tl(t1
, t1
);
3492 tcg_gen_extu_tl_i64(t2
, t0
);
3493 tcg_gen_extu_tl_i64(t3
, t1
);
3494 tcg_gen_mul_i64(t2
, t2
, t3
);
3495 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3496 tcg_gen_add_i64(t2
, t2
, t3
);
3497 tcg_temp_free_i64(t3
);
3498 gen_move_low32(cpu_LO
[acc
], t2
);
3499 gen_move_high32(cpu_HI
[acc
], t2
);
3500 tcg_temp_free_i64(t2
);
3505 TCGv_i64 t2
= tcg_temp_new_i64();
3506 TCGv_i64 t3
= tcg_temp_new_i64();
3508 tcg_gen_ext_tl_i64(t2
, t0
);
3509 tcg_gen_ext_tl_i64(t3
, t1
);
3510 tcg_gen_mul_i64(t2
, t2
, t3
);
3511 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3512 tcg_gen_sub_i64(t2
, t3
, t2
);
3513 tcg_temp_free_i64(t3
);
3514 gen_move_low32(cpu_LO
[acc
], t2
);
3515 gen_move_high32(cpu_HI
[acc
], t2
);
3516 tcg_temp_free_i64(t2
);
3521 TCGv_i64 t2
= tcg_temp_new_i64();
3522 TCGv_i64 t3
= tcg_temp_new_i64();
3524 tcg_gen_ext32u_tl(t0
, t0
);
3525 tcg_gen_ext32u_tl(t1
, t1
);
3526 tcg_gen_extu_tl_i64(t2
, t0
);
3527 tcg_gen_extu_tl_i64(t3
, t1
);
3528 tcg_gen_mul_i64(t2
, t2
, t3
);
3529 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3530 tcg_gen_sub_i64(t2
, t3
, t2
);
3531 tcg_temp_free_i64(t3
);
3532 gen_move_low32(cpu_LO
[acc
], t2
);
3533 gen_move_high32(cpu_HI
[acc
], t2
);
3534 tcg_temp_free_i64(t2
);
3538 MIPS_INVAL("mul/div");
3539 generate_exception_end(ctx
, EXCP_RI
);
3547 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3548 int rd
, int rs
, int rt
)
3550 TCGv t0
= tcg_temp_new();
3551 TCGv t1
= tcg_temp_new();
3553 gen_load_gpr(t0
, rs
);
3554 gen_load_gpr(t1
, rt
);
3557 case OPC_VR54XX_MULS
:
3558 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3560 case OPC_VR54XX_MULSU
:
3561 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3563 case OPC_VR54XX_MACC
:
3564 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3566 case OPC_VR54XX_MACCU
:
3567 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MSAC
:
3570 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MSACU
:
3573 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MULHI
:
3576 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MULHIU
:
3579 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MULSHI
:
3582 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULSHIU
:
3585 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MACCHI
:
3588 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MACCHIU
:
3591 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MSACHI
:
3594 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MSACHIU
:
3597 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3600 MIPS_INVAL("mul vr54xx");
3601 generate_exception_end(ctx
, EXCP_RI
);
3604 gen_store_gpr(t0
, rd
);
3611 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3620 t0
= tcg_temp_new();
3621 gen_load_gpr(t0
, rs
);
3625 gen_helper_clo(cpu_gpr
[rd
], t0
);
3629 gen_helper_clz(cpu_gpr
[rd
], t0
);
3631 #if defined(TARGET_MIPS64)
3634 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3638 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3645 /* Godson integer instructions */
3646 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3647 int rd
, int rs
, int rt
)
3659 case OPC_MULTU_G_2E
:
3660 case OPC_MULTU_G_2F
:
3661 #if defined(TARGET_MIPS64)
3662 case OPC_DMULT_G_2E
:
3663 case OPC_DMULT_G_2F
:
3664 case OPC_DMULTU_G_2E
:
3665 case OPC_DMULTU_G_2F
:
3667 t0
= tcg_temp_new();
3668 t1
= tcg_temp_new();
3671 t0
= tcg_temp_local_new();
3672 t1
= tcg_temp_local_new();
3676 gen_load_gpr(t0
, rs
);
3677 gen_load_gpr(t1
, rt
);
3682 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3683 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3685 case OPC_MULTU_G_2E
:
3686 case OPC_MULTU_G_2F
:
3687 tcg_gen_ext32u_tl(t0
, t0
);
3688 tcg_gen_ext32u_tl(t1
, t1
);
3689 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3690 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3695 TCGLabel
*l1
= gen_new_label();
3696 TCGLabel
*l2
= gen_new_label();
3697 TCGLabel
*l3
= gen_new_label();
3698 tcg_gen_ext32s_tl(t0
, t0
);
3699 tcg_gen_ext32s_tl(t1
, t1
);
3700 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3701 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3704 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3705 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3706 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3709 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3710 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3717 TCGLabel
*l1
= gen_new_label();
3718 TCGLabel
*l2
= gen_new_label();
3719 tcg_gen_ext32u_tl(t0
, t0
);
3720 tcg_gen_ext32u_tl(t1
, t1
);
3721 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3722 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3725 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3726 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3733 TCGLabel
*l1
= gen_new_label();
3734 TCGLabel
*l2
= gen_new_label();
3735 TCGLabel
*l3
= gen_new_label();
3736 tcg_gen_ext32u_tl(t0
, t0
);
3737 tcg_gen_ext32u_tl(t1
, t1
);
3738 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3739 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3740 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3742 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3745 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3746 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3753 TCGLabel
*l1
= gen_new_label();
3754 TCGLabel
*l2
= gen_new_label();
3755 tcg_gen_ext32u_tl(t0
, t0
);
3756 tcg_gen_ext32u_tl(t1
, t1
);
3757 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3758 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3761 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3762 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3766 #if defined(TARGET_MIPS64)
3767 case OPC_DMULT_G_2E
:
3768 case OPC_DMULT_G_2F
:
3769 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3771 case OPC_DMULTU_G_2E
:
3772 case OPC_DMULTU_G_2F
:
3773 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3778 TCGLabel
*l1
= gen_new_label();
3779 TCGLabel
*l2
= gen_new_label();
3780 TCGLabel
*l3
= gen_new_label();
3781 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3782 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3785 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3786 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3787 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3790 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3794 case OPC_DDIVU_G_2E
:
3795 case OPC_DDIVU_G_2F
:
3797 TCGLabel
*l1
= gen_new_label();
3798 TCGLabel
*l2
= gen_new_label();
3799 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3800 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3803 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3810 TCGLabel
*l1
= gen_new_label();
3811 TCGLabel
*l2
= gen_new_label();
3812 TCGLabel
*l3
= gen_new_label();
3813 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3814 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3815 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3820 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3824 case OPC_DMODU_G_2E
:
3825 case OPC_DMODU_G_2F
:
3827 TCGLabel
*l1
= gen_new_label();
3828 TCGLabel
*l2
= gen_new_label();
3829 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3830 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3833 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3844 /* Loongson multimedia instructions */
3845 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3847 uint32_t opc
, shift_max
;
3850 opc
= MASK_LMI(ctx
->opcode
);
3856 t0
= tcg_temp_local_new_i64();
3857 t1
= tcg_temp_local_new_i64();
3860 t0
= tcg_temp_new_i64();
3861 t1
= tcg_temp_new_i64();
3865 gen_load_fpr64(ctx
, t0
, rs
);
3866 gen_load_fpr64(ctx
, t1
, rt
);
3868 #define LMI_HELPER(UP, LO) \
3869 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3870 #define LMI_HELPER_1(UP, LO) \
3871 case OPC_##UP: gen_helper_##LO(t0, t0); break
3872 #define LMI_DIRECT(UP, LO, OP) \
3873 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3876 LMI_HELPER(PADDSH
, paddsh
);
3877 LMI_HELPER(PADDUSH
, paddush
);
3878 LMI_HELPER(PADDH
, paddh
);
3879 LMI_HELPER(PADDW
, paddw
);
3880 LMI_HELPER(PADDSB
, paddsb
);
3881 LMI_HELPER(PADDUSB
, paddusb
);
3882 LMI_HELPER(PADDB
, paddb
);
3884 LMI_HELPER(PSUBSH
, psubsh
);
3885 LMI_HELPER(PSUBUSH
, psubush
);
3886 LMI_HELPER(PSUBH
, psubh
);
3887 LMI_HELPER(PSUBW
, psubw
);
3888 LMI_HELPER(PSUBSB
, psubsb
);
3889 LMI_HELPER(PSUBUSB
, psubusb
);
3890 LMI_HELPER(PSUBB
, psubb
);
3892 LMI_HELPER(PSHUFH
, pshufh
);
3893 LMI_HELPER(PACKSSWH
, packsswh
);
3894 LMI_HELPER(PACKSSHB
, packsshb
);
3895 LMI_HELPER(PACKUSHB
, packushb
);
3897 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3898 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3899 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3900 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3901 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3902 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3904 LMI_HELPER(PAVGH
, pavgh
);
3905 LMI_HELPER(PAVGB
, pavgb
);
3906 LMI_HELPER(PMAXSH
, pmaxsh
);
3907 LMI_HELPER(PMINSH
, pminsh
);
3908 LMI_HELPER(PMAXUB
, pmaxub
);
3909 LMI_HELPER(PMINUB
, pminub
);
3911 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3912 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3913 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3914 LMI_HELPER(PCMPGTH
, pcmpgth
);
3915 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3916 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3918 LMI_HELPER(PSLLW
, psllw
);
3919 LMI_HELPER(PSLLH
, psllh
);
3920 LMI_HELPER(PSRLW
, psrlw
);
3921 LMI_HELPER(PSRLH
, psrlh
);
3922 LMI_HELPER(PSRAW
, psraw
);
3923 LMI_HELPER(PSRAH
, psrah
);
3925 LMI_HELPER(PMULLH
, pmullh
);
3926 LMI_HELPER(PMULHH
, pmulhh
);
3927 LMI_HELPER(PMULHUH
, pmulhuh
);
3928 LMI_HELPER(PMADDHW
, pmaddhw
);
3930 LMI_HELPER(PASUBUB
, pasubub
);
3931 LMI_HELPER_1(BIADD
, biadd
);
3932 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3934 LMI_DIRECT(PADDD
, paddd
, add
);
3935 LMI_DIRECT(PSUBD
, psubd
, sub
);
3936 LMI_DIRECT(XOR_CP2
, xor, xor);
3937 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3938 LMI_DIRECT(AND_CP2
, and, and);
3939 LMI_DIRECT(PANDN
, pandn
, andc
);
3940 LMI_DIRECT(OR
, or, or);
3943 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3946 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3949 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3952 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3956 tcg_gen_andi_i64(t1
, t1
, 3);
3957 tcg_gen_shli_i64(t1
, t1
, 4);
3958 tcg_gen_shr_i64(t0
, t0
, t1
);
3959 tcg_gen_ext16u_i64(t0
, t0
);
3963 tcg_gen_add_i64(t0
, t0
, t1
);
3964 tcg_gen_ext32s_i64(t0
, t0
);
3967 tcg_gen_sub_i64(t0
, t0
, t1
);
3968 tcg_gen_ext32s_i64(t0
, t0
);
3990 /* Make sure shift count isn't TCG undefined behaviour. */
3991 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3996 tcg_gen_shl_i64(t0
, t0
, t1
);
4000 /* Since SRA is UndefinedResult without sign-extended inputs,
4001 we can treat SRA and DSRA the same. */
4002 tcg_gen_sar_i64(t0
, t0
, t1
);
4005 /* We want to shift in zeros for SRL; zero-extend first. */
4006 tcg_gen_ext32u_i64(t0
, t0
);
4009 tcg_gen_shr_i64(t0
, t0
, t1
);
4013 if (shift_max
== 32) {
4014 tcg_gen_ext32s_i64(t0
, t0
);
4017 /* Shifts larger than MAX produce zero. */
4018 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4019 tcg_gen_neg_i64(t1
, t1
);
4020 tcg_gen_and_i64(t0
, t0
, t1
);
4026 TCGv_i64 t2
= tcg_temp_new_i64();
4027 TCGLabel
*lab
= gen_new_label();
4029 tcg_gen_mov_i64(t2
, t0
);
4030 tcg_gen_add_i64(t0
, t1
, t2
);
4031 if (opc
== OPC_ADD_CP2
) {
4032 tcg_gen_ext32s_i64(t0
, t0
);
4034 tcg_gen_xor_i64(t1
, t1
, t2
);
4035 tcg_gen_xor_i64(t2
, t2
, t0
);
4036 tcg_gen_andc_i64(t1
, t2
, t1
);
4037 tcg_temp_free_i64(t2
);
4038 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4039 generate_exception(ctx
, EXCP_OVERFLOW
);
4047 TCGv_i64 t2
= tcg_temp_new_i64();
4048 TCGLabel
*lab
= gen_new_label();
4050 tcg_gen_mov_i64(t2
, t0
);
4051 tcg_gen_sub_i64(t0
, t1
, t2
);
4052 if (opc
== OPC_SUB_CP2
) {
4053 tcg_gen_ext32s_i64(t0
, t0
);
4055 tcg_gen_xor_i64(t1
, t1
, t2
);
4056 tcg_gen_xor_i64(t2
, t2
, t0
);
4057 tcg_gen_and_i64(t1
, t1
, t2
);
4058 tcg_temp_free_i64(t2
);
4059 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4060 generate_exception(ctx
, EXCP_OVERFLOW
);
4066 tcg_gen_ext32u_i64(t0
, t0
);
4067 tcg_gen_ext32u_i64(t1
, t1
);
4068 tcg_gen_mul_i64(t0
, t0
, t1
);
4077 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4078 FD field is the CC field? */
4080 MIPS_INVAL("loongson_cp2");
4081 generate_exception_end(ctx
, EXCP_RI
);
4088 gen_store_fpr64(ctx
, t0
, rd
);
4090 tcg_temp_free_i64(t0
);
4091 tcg_temp_free_i64(t1
);
4095 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4096 int rs
, int rt
, int16_t imm
)
4099 TCGv t0
= tcg_temp_new();
4100 TCGv t1
= tcg_temp_new();
4103 /* Load needed operands */
4111 /* Compare two registers */
4113 gen_load_gpr(t0
, rs
);
4114 gen_load_gpr(t1
, rt
);
4124 /* Compare register to immediate */
4125 if (rs
!= 0 || imm
!= 0) {
4126 gen_load_gpr(t0
, rs
);
4127 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4134 case OPC_TEQ
: /* rs == rs */
4135 case OPC_TEQI
: /* r0 == 0 */
4136 case OPC_TGE
: /* rs >= rs */
4137 case OPC_TGEI
: /* r0 >= 0 */
4138 case OPC_TGEU
: /* rs >= rs unsigned */
4139 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4141 generate_exception_end(ctx
, EXCP_TRAP
);
4143 case OPC_TLT
: /* rs < rs */
4144 case OPC_TLTI
: /* r0 < 0 */
4145 case OPC_TLTU
: /* rs < rs unsigned */
4146 case OPC_TLTIU
: /* r0 < 0 unsigned */
4147 case OPC_TNE
: /* rs != rs */
4148 case OPC_TNEI
: /* r0 != 0 */
4149 /* Never trap: treat as NOP. */
4153 TCGLabel
*l1
= gen_new_label();
4158 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4162 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4166 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4170 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4174 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4178 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4181 generate_exception(ctx
, EXCP_TRAP
);
4188 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4190 TranslationBlock
*tb
;
4192 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4193 likely(!ctx
->singlestep_enabled
)) {
4196 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4199 if (ctx
->singlestep_enabled
) {
4200 save_cpu_state(ctx
, 0);
4201 gen_helper_raise_exception_debug(cpu_env
);
4207 /* Branches (before delay slot) */
4208 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4210 int rs
, int rt
, int32_t offset
,
4213 target_ulong btgt
= -1;
4215 int bcond_compute
= 0;
4216 TCGv t0
= tcg_temp_new();
4217 TCGv t1
= tcg_temp_new();
4219 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4220 #ifdef MIPS_DEBUG_DISAS
4221 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4222 TARGET_FMT_lx
"\n", ctx
->pc
);
4224 generate_exception_end(ctx
, EXCP_RI
);
4228 /* Load needed operands */
4234 /* Compare two registers */
4236 gen_load_gpr(t0
, rs
);
4237 gen_load_gpr(t1
, rt
);
4240 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4254 /* Compare to zero */
4256 gen_load_gpr(t0
, rs
);
4259 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4262 #if defined(TARGET_MIPS64)
4264 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4266 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4269 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4274 /* Jump to immediate */
4275 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4279 /* Jump to register */
4280 if (offset
!= 0 && offset
!= 16) {
4281 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4282 others are reserved. */
4283 MIPS_INVAL("jump hint");
4284 generate_exception_end(ctx
, EXCP_RI
);
4287 gen_load_gpr(btarget
, rs
);
4290 MIPS_INVAL("branch/jump");
4291 generate_exception_end(ctx
, EXCP_RI
);
4294 if (bcond_compute
== 0) {
4295 /* No condition to be computed */
4297 case OPC_BEQ
: /* rx == rx */
4298 case OPC_BEQL
: /* rx == rx likely */
4299 case OPC_BGEZ
: /* 0 >= 0 */
4300 case OPC_BGEZL
: /* 0 >= 0 likely */
4301 case OPC_BLEZ
: /* 0 <= 0 */
4302 case OPC_BLEZL
: /* 0 <= 0 likely */
4304 ctx
->hflags
|= MIPS_HFLAG_B
;
4306 case OPC_BGEZAL
: /* 0 >= 0 */
4307 case OPC_BGEZALL
: /* 0 >= 0 likely */
4308 /* Always take and link */
4310 ctx
->hflags
|= MIPS_HFLAG_B
;
4312 case OPC_BNE
: /* rx != rx */
4313 case OPC_BGTZ
: /* 0 > 0 */
4314 case OPC_BLTZ
: /* 0 < 0 */
4317 case OPC_BLTZAL
: /* 0 < 0 */
4318 /* Handle as an unconditional branch to get correct delay
4321 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4322 ctx
->hflags
|= MIPS_HFLAG_B
;
4324 case OPC_BLTZALL
: /* 0 < 0 likely */
4325 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4326 /* Skip the instruction in the delay slot */
4329 case OPC_BNEL
: /* rx != rx likely */
4330 case OPC_BGTZL
: /* 0 > 0 likely */
4331 case OPC_BLTZL
: /* 0 < 0 likely */
4332 /* Skip the instruction in the delay slot */
4336 ctx
->hflags
|= MIPS_HFLAG_B
;
4339 ctx
->hflags
|= MIPS_HFLAG_BX
;
4343 ctx
->hflags
|= MIPS_HFLAG_B
;
4346 ctx
->hflags
|= MIPS_HFLAG_BR
;
4350 ctx
->hflags
|= MIPS_HFLAG_BR
;
4353 MIPS_INVAL("branch/jump");
4354 generate_exception_end(ctx
, EXCP_RI
);
4360 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4363 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4366 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4369 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4372 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4375 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4378 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4382 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4386 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4389 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4392 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4395 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4398 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4401 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4404 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4406 #if defined(TARGET_MIPS64)
4408 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4412 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4415 ctx
->hflags
|= MIPS_HFLAG_BC
;
4418 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4421 ctx
->hflags
|= MIPS_HFLAG_BL
;
4424 MIPS_INVAL("conditional branch/jump");
4425 generate_exception_end(ctx
, EXCP_RI
);
4430 ctx
->btarget
= btgt
;
4432 switch (delayslot_size
) {
4434 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4437 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4442 int post_delay
= insn_bytes
+ delayslot_size
;
4443 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4445 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4449 if (insn_bytes
== 2)
4450 ctx
->hflags
|= MIPS_HFLAG_B16
;
4455 /* special3 bitfield operations */
4456 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4457 int rs
, int lsb
, int msb
)
4459 TCGv t0
= tcg_temp_new();
4460 TCGv t1
= tcg_temp_new();
4462 gen_load_gpr(t1
, rs
);
4465 if (lsb
+ msb
> 31) {
4468 tcg_gen_shri_tl(t0
, t1
, lsb
);
4470 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4472 tcg_gen_ext32s_tl(t0
, t0
);
4475 #if defined(TARGET_MIPS64)
4484 if (lsb
+ msb
> 63) {
4487 tcg_gen_shri_tl(t0
, t1
, lsb
);
4489 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4497 gen_load_gpr(t0
, rt
);
4498 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4499 tcg_gen_ext32s_tl(t0
, t0
);
4501 #if defined(TARGET_MIPS64)
4512 gen_load_gpr(t0
, rt
);
4513 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4518 MIPS_INVAL("bitops");
4519 generate_exception_end(ctx
, EXCP_RI
);
4524 gen_store_gpr(t0
, rt
);
4529 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4534 /* If no destination, treat it as a NOP. */
4538 t0
= tcg_temp_new();
4539 gen_load_gpr(t0
, rt
);
4543 TCGv t1
= tcg_temp_new();
4545 tcg_gen_shri_tl(t1
, t0
, 8);
4546 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4547 tcg_gen_shli_tl(t0
, t0
, 8);
4548 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4549 tcg_gen_or_tl(t0
, t0
, t1
);
4551 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4555 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4558 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4560 #if defined(TARGET_MIPS64)
4563 TCGv t1
= tcg_temp_new();
4565 tcg_gen_shri_tl(t1
, t0
, 8);
4566 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4567 tcg_gen_shli_tl(t0
, t0
, 8);
4568 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4569 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4575 TCGv t1
= tcg_temp_new();
4577 tcg_gen_shri_tl(t1
, t0
, 16);
4578 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4579 tcg_gen_shli_tl(t0
, t0
, 16);
4580 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4581 tcg_gen_or_tl(t0
, t0
, t1
);
4582 tcg_gen_shri_tl(t1
, t0
, 32);
4583 tcg_gen_shli_tl(t0
, t0
, 32);
4584 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4590 MIPS_INVAL("bsfhl");
4591 generate_exception_end(ctx
, EXCP_RI
);
4598 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4607 t0
= tcg_temp_new();
4608 t1
= tcg_temp_new();
4609 gen_load_gpr(t0
, rs
);
4610 gen_load_gpr(t1
, rt
);
4611 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4612 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4613 if (opc
== OPC_LSA
) {
4614 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4623 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4631 t0
= tcg_temp_new();
4632 gen_load_gpr(t0
, rt
);
4636 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4638 #if defined(TARGET_MIPS64)
4640 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4645 TCGv t1
= tcg_temp_new();
4646 gen_load_gpr(t1
, rs
);
4650 TCGv_i64 t2
= tcg_temp_new_i64();
4651 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4652 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4653 gen_move_low32(cpu_gpr
[rd
], t2
);
4654 tcg_temp_free_i64(t2
);
4657 #if defined(TARGET_MIPS64)
4659 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4660 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4661 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4671 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4678 t0
= tcg_temp_new();
4679 gen_load_gpr(t0
, rt
);
4682 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4684 #if defined(TARGET_MIPS64)
4686 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4693 #ifndef CONFIG_USER_ONLY
4694 /* CP0 (MMU and control) */
4695 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4697 TCGv_i64 t0
= tcg_temp_new_i64();
4698 TCGv_i64 t1
= tcg_temp_new_i64();
4700 tcg_gen_ext_tl_i64(t0
, arg
);
4701 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4702 #if defined(TARGET_MIPS64)
4703 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4705 tcg_gen_concat32_i64(t1
, t1
, t0
);
4707 tcg_gen_st_i64(t1
, cpu_env
, off
);
4708 tcg_temp_free_i64(t1
);
4709 tcg_temp_free_i64(t0
);
4712 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4714 TCGv_i64 t0
= tcg_temp_new_i64();
4715 TCGv_i64 t1
= tcg_temp_new_i64();
4717 tcg_gen_ext_tl_i64(t0
, arg
);
4718 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4719 tcg_gen_concat32_i64(t1
, t1
, t0
);
4720 tcg_gen_st_i64(t1
, cpu_env
, off
);
4721 tcg_temp_free_i64(t1
);
4722 tcg_temp_free_i64(t0
);
4725 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4727 TCGv_i64 t0
= tcg_temp_new_i64();
4729 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4730 #if defined(TARGET_MIPS64)
4731 tcg_gen_shri_i64(t0
, t0
, 30);
4733 tcg_gen_shri_i64(t0
, t0
, 32);
4735 gen_move_low32(arg
, t0
);
4736 tcg_temp_free_i64(t0
);
4739 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4741 TCGv_i64 t0
= tcg_temp_new_i64();
4743 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4744 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4745 gen_move_low32(arg
, t0
);
4746 tcg_temp_free_i64(t0
);
4749 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4751 TCGv_i32 t0
= tcg_temp_new_i32();
4753 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4754 tcg_gen_ext_i32_tl(arg
, t0
);
4755 tcg_temp_free_i32(t0
);
4758 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4760 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4761 tcg_gen_ext32s_tl(arg
, arg
);
4764 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4766 TCGv_i32 t0
= tcg_temp_new_i32();
4768 tcg_gen_trunc_tl_i32(t0
, arg
);
4769 tcg_gen_st_i32(t0
, cpu_env
, off
);
4770 tcg_temp_free_i32(t0
);
4773 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4775 const char *rn
= "invalid";
4777 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4778 goto mfhc0_read_zero
;
4785 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4789 goto mfhc0_read_zero
;
4795 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4799 goto mfhc0_read_zero
;
4805 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4806 ctx
->CP0_LLAddr_shift
);
4810 goto mfhc0_read_zero
;
4819 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4823 goto mfhc0_read_zero
;
4827 goto mfhc0_read_zero
;
4830 (void)rn
; /* avoid a compiler warning */
4831 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4835 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4836 tcg_gen_movi_tl(arg
, 0);
4839 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4841 const char *rn
= "invalid";
4842 uint64_t mask
= ctx
->PAMask
>> 36;
4844 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4852 tcg_gen_andi_tl(arg
, arg
, mask
);
4853 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4863 tcg_gen_andi_tl(arg
, arg
, mask
);
4864 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4874 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4875 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4876 relevant for modern MIPS cores supporting MTHC0, therefore
4877 treating MTHC0 to LLAddr as NOP. */
4890 tcg_gen_andi_tl(arg
, arg
, mask
);
4891 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4902 (void)rn
; /* avoid a compiler warning */
4904 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4907 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4909 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4910 tcg_gen_movi_tl(arg
, 0);
4912 tcg_gen_movi_tl(arg
, ~0);
4916 #define CP0_CHECK(c) \
4919 goto cp0_unimplemented; \
4923 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4925 const char *rn
= "invalid";
4928 check_insn(ctx
, ISA_MIPS32
);
4934 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4938 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4939 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4944 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4948 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4949 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4953 goto cp0_unimplemented
;
4959 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4960 gen_helper_mfc0_random(arg
, cpu_env
);
4964 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4969 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4970 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4990 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4991 rn
= "VPEScheFBack";
4994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4995 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4999 goto cp0_unimplemented
;
5006 TCGv_i64 tmp
= tcg_temp_new_i64();
5007 tcg_gen_ld_i64(tmp
, cpu_env
,
5008 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5009 #if defined(TARGET_MIPS64)
5011 /* Move RI/XI fields to bits 31:30 */
5012 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5013 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5016 gen_move_low32(arg
, tmp
);
5017 tcg_temp_free_i64(tmp
);
5022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5023 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5027 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5028 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5033 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5038 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5043 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5048 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5053 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5057 goto cp0_unimplemented
;
5064 TCGv_i64 tmp
= tcg_temp_new_i64();
5065 tcg_gen_ld_i64(tmp
, cpu_env
,
5066 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5067 #if defined(TARGET_MIPS64)
5069 /* Move RI/XI fields to bits 31:30 */
5070 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5071 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5074 gen_move_low32(arg
, tmp
);
5075 tcg_temp_free_i64(tmp
);
5080 goto cp0_unimplemented
;
5086 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5087 tcg_gen_ext32s_tl(arg
, arg
);
5091 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5092 rn
= "ContextConfig";
5093 goto cp0_unimplemented
;
5096 CP0_CHECK(ctx
->ulri
);
5097 tcg_gen_ld32s_tl(arg
, cpu_env
,
5098 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5102 goto cp0_unimplemented
;
5108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5112 check_insn(ctx
, ISA_MIPS32R2
);
5113 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5117 goto cp0_unimplemented
;
5123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5127 check_insn(ctx
, ISA_MIPS32R2
);
5128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5132 check_insn(ctx
, ISA_MIPS32R2
);
5133 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5137 check_insn(ctx
, ISA_MIPS32R2
);
5138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5142 check_insn(ctx
, ISA_MIPS32R2
);
5143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5152 goto cp0_unimplemented
;
5158 check_insn(ctx
, ISA_MIPS32R2
);
5159 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5163 goto cp0_unimplemented
;
5169 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5170 tcg_gen_ext32s_tl(arg
, arg
);
5175 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5180 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5184 goto cp0_unimplemented
;
5190 /* Mark as an IO operation because we read the time. */
5191 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5194 gen_helper_mfc0_count(arg
, cpu_env
);
5195 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5198 /* Break the TB to be able to take timer interrupts immediately
5199 after reading count. */
5200 ctx
->bstate
= BS_STOP
;
5203 /* 6,7 are implementation dependent */
5205 goto cp0_unimplemented
;
5211 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5212 tcg_gen_ext32s_tl(arg
, arg
);
5216 goto cp0_unimplemented
;
5222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5225 /* 6,7 are implementation dependent */
5227 goto cp0_unimplemented
;
5233 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5237 check_insn(ctx
, ISA_MIPS32R2
);
5238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5242 check_insn(ctx
, ISA_MIPS32R2
);
5243 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5247 check_insn(ctx
, ISA_MIPS32R2
);
5248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5252 goto cp0_unimplemented
;
5258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5262 goto cp0_unimplemented
;
5268 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5269 tcg_gen_ext32s_tl(arg
, arg
);
5273 goto cp0_unimplemented
;
5279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5283 check_insn(ctx
, ISA_MIPS32R2
);
5284 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5288 goto cp0_unimplemented
;
5294 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5302 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5306 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5317 /* 6,7 are implementation dependent */
5319 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5327 goto cp0_unimplemented
;
5333 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5337 goto cp0_unimplemented
;
5343 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5347 goto cp0_unimplemented
;
5353 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5357 goto cp0_unimplemented
;
5363 #if defined(TARGET_MIPS64)
5364 check_insn(ctx
, ISA_MIPS3
);
5365 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5366 tcg_gen_ext32s_tl(arg
, arg
);
5371 goto cp0_unimplemented
;
5375 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5376 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5383 goto cp0_unimplemented
;
5387 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5388 rn
= "'Diagnostic"; /* implementation dependent */
5393 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5397 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5398 rn
= "TraceControl";
5401 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5402 rn
= "TraceControl2";
5405 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5406 rn
= "UserTraceData";
5409 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5413 goto cp0_unimplemented
;
5420 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5421 tcg_gen_ext32s_tl(arg
, arg
);
5425 goto cp0_unimplemented
;
5431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5432 rn
= "Performance0";
5435 // gen_helper_mfc0_performance1(arg);
5436 rn
= "Performance1";
5439 // gen_helper_mfc0_performance2(arg);
5440 rn
= "Performance2";
5443 // gen_helper_mfc0_performance3(arg);
5444 rn
= "Performance3";
5447 // gen_helper_mfc0_performance4(arg);
5448 rn
= "Performance4";
5451 // gen_helper_mfc0_performance5(arg);
5452 rn
= "Performance5";
5455 // gen_helper_mfc0_performance6(arg);
5456 rn
= "Performance6";
5459 // gen_helper_mfc0_performance7(arg);
5460 rn
= "Performance7";
5463 goto cp0_unimplemented
;
5467 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5473 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5477 goto cp0_unimplemented
;
5487 TCGv_i64 tmp
= tcg_temp_new_i64();
5488 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5489 gen_move_low32(arg
, tmp
);
5490 tcg_temp_free_i64(tmp
);
5498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5502 goto cp0_unimplemented
;
5511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5522 goto cp0_unimplemented
;
5528 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5529 tcg_gen_ext32s_tl(arg
, arg
);
5533 goto cp0_unimplemented
;
5540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5544 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5545 tcg_gen_ld_tl(arg
, cpu_env
,
5546 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5547 tcg_gen_ext32s_tl(arg
, arg
);
5551 goto cp0_unimplemented
;
5555 goto cp0_unimplemented
;
5557 (void)rn
; /* avoid a compiler warning */
5558 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5562 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5563 gen_mfc0_unimplemented(ctx
, arg
);
5566 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5568 const char *rn
= "invalid";
5571 check_insn(ctx
, ISA_MIPS32
);
5573 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5581 gen_helper_mtc0_index(cpu_env
, arg
);
5585 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5586 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5590 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5595 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5600 goto cp0_unimplemented
;
5610 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5611 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5615 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5616 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5620 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5621 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5625 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5626 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5630 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5631 tcg_gen_st_tl(arg
, cpu_env
,
5632 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5636 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5637 tcg_gen_st_tl(arg
, cpu_env
,
5638 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5639 rn
= "VPEScheFBack";
5642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5643 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5647 goto cp0_unimplemented
;
5653 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5658 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5663 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5668 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5672 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5673 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5677 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5678 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5682 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5683 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5687 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5688 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5692 goto cp0_unimplemented
;
5698 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5702 goto cp0_unimplemented
;
5708 gen_helper_mtc0_context(cpu_env
, arg
);
5712 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5713 rn
= "ContextConfig";
5714 goto cp0_unimplemented
;
5717 CP0_CHECK(ctx
->ulri
);
5718 tcg_gen_st_tl(arg
, cpu_env
,
5719 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5723 goto cp0_unimplemented
;
5729 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5733 check_insn(ctx
, ISA_MIPS32R2
);
5734 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5736 ctx
->bstate
= BS_STOP
;
5739 goto cp0_unimplemented
;
5745 gen_helper_mtc0_wired(cpu_env
, arg
);
5749 check_insn(ctx
, ISA_MIPS32R2
);
5750 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5754 check_insn(ctx
, ISA_MIPS32R2
);
5755 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5759 check_insn(ctx
, ISA_MIPS32R2
);
5760 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5764 check_insn(ctx
, ISA_MIPS32R2
);
5765 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5769 check_insn(ctx
, ISA_MIPS32R2
);
5770 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5774 goto cp0_unimplemented
;
5780 check_insn(ctx
, ISA_MIPS32R2
);
5781 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5782 ctx
->bstate
= BS_STOP
;
5786 goto cp0_unimplemented
;
5804 goto cp0_unimplemented
;
5810 gen_helper_mtc0_count(cpu_env
, arg
);
5813 /* 6,7 are implementation dependent */
5815 goto cp0_unimplemented
;
5821 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5825 goto cp0_unimplemented
;
5831 gen_helper_mtc0_compare(cpu_env
, arg
);
5834 /* 6,7 are implementation dependent */
5836 goto cp0_unimplemented
;
5842 save_cpu_state(ctx
, 1);
5843 gen_helper_mtc0_status(cpu_env
, arg
);
5844 /* BS_STOP isn't good enough here, hflags may have changed. */
5845 gen_save_pc(ctx
->pc
+ 4);
5846 ctx
->bstate
= BS_EXCP
;
5850 check_insn(ctx
, ISA_MIPS32R2
);
5851 gen_helper_mtc0_intctl(cpu_env
, arg
);
5852 /* Stop translation as we may have switched the execution mode */
5853 ctx
->bstate
= BS_STOP
;
5857 check_insn(ctx
, ISA_MIPS32R2
);
5858 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5859 /* Stop translation as we may have switched the execution mode */
5860 ctx
->bstate
= BS_STOP
;
5864 check_insn(ctx
, ISA_MIPS32R2
);
5865 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5866 /* Stop translation as we may have switched the execution mode */
5867 ctx
->bstate
= BS_STOP
;
5871 goto cp0_unimplemented
;
5877 save_cpu_state(ctx
, 1);
5878 gen_helper_mtc0_cause(cpu_env
, arg
);
5882 goto cp0_unimplemented
;
5888 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5892 goto cp0_unimplemented
;
5902 check_insn(ctx
, ISA_MIPS32R2
);
5903 gen_helper_mtc0_ebase(cpu_env
, arg
);
5907 goto cp0_unimplemented
;
5913 gen_helper_mtc0_config0(cpu_env
, arg
);
5915 /* Stop translation as we may have switched the execution mode */
5916 ctx
->bstate
= BS_STOP
;
5919 /* ignored, read only */
5923 gen_helper_mtc0_config2(cpu_env
, arg
);
5925 /* Stop translation as we may have switched the execution mode */
5926 ctx
->bstate
= BS_STOP
;
5929 gen_helper_mtc0_config3(cpu_env
, arg
);
5931 /* Stop translation as we may have switched the execution mode */
5932 ctx
->bstate
= BS_STOP
;
5935 gen_helper_mtc0_config4(cpu_env
, arg
);
5937 ctx
->bstate
= BS_STOP
;
5940 gen_helper_mtc0_config5(cpu_env
, arg
);
5942 /* Stop translation as we may have switched the execution mode */
5943 ctx
->bstate
= BS_STOP
;
5945 /* 6,7 are implementation dependent */
5955 rn
= "Invalid config selector";
5956 goto cp0_unimplemented
;
5962 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5966 goto cp0_unimplemented
;
5972 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5976 goto cp0_unimplemented
;
5982 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5986 goto cp0_unimplemented
;
5992 #if defined(TARGET_MIPS64)
5993 check_insn(ctx
, ISA_MIPS3
);
5994 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5999 goto cp0_unimplemented
;
6003 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6004 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6007 gen_helper_mtc0_framemask(cpu_env
, arg
);
6011 goto cp0_unimplemented
;
6016 rn
= "Diagnostic"; /* implementation dependent */
6021 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6022 /* BS_STOP isn't good enough here, hflags may have changed. */
6023 gen_save_pc(ctx
->pc
+ 4);
6024 ctx
->bstate
= BS_EXCP
;
6028 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6029 rn
= "TraceControl";
6030 /* Stop translation as we may have switched the execution mode */
6031 ctx
->bstate
= BS_STOP
;
6034 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6035 rn
= "TraceControl2";
6036 /* Stop translation as we may have switched the execution mode */
6037 ctx
->bstate
= BS_STOP
;
6040 /* Stop translation as we may have switched the execution mode */
6041 ctx
->bstate
= BS_STOP
;
6042 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6043 rn
= "UserTraceData";
6044 /* Stop translation as we may have switched the execution mode */
6045 ctx
->bstate
= BS_STOP
;
6048 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6049 /* Stop translation as we may have switched the execution mode */
6050 ctx
->bstate
= BS_STOP
;
6054 goto cp0_unimplemented
;
6061 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6065 goto cp0_unimplemented
;
6071 gen_helper_mtc0_performance0(cpu_env
, arg
);
6072 rn
= "Performance0";
6075 // gen_helper_mtc0_performance1(arg);
6076 rn
= "Performance1";
6079 // gen_helper_mtc0_performance2(arg);
6080 rn
= "Performance2";
6083 // gen_helper_mtc0_performance3(arg);
6084 rn
= "Performance3";
6087 // gen_helper_mtc0_performance4(arg);
6088 rn
= "Performance4";
6091 // gen_helper_mtc0_performance5(arg);
6092 rn
= "Performance5";
6095 // gen_helper_mtc0_performance6(arg);
6096 rn
= "Performance6";
6099 // gen_helper_mtc0_performance7(arg);
6100 rn
= "Performance7";
6103 goto cp0_unimplemented
;
6117 goto cp0_unimplemented
;
6126 gen_helper_mtc0_taglo(cpu_env
, arg
);
6133 gen_helper_mtc0_datalo(cpu_env
, arg
);
6137 goto cp0_unimplemented
;
6146 gen_helper_mtc0_taghi(cpu_env
, arg
);
6153 gen_helper_mtc0_datahi(cpu_env
, arg
);
6158 goto cp0_unimplemented
;
6164 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6168 goto cp0_unimplemented
;
6175 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6179 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6180 tcg_gen_st_tl(arg
, cpu_env
,
6181 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6185 goto cp0_unimplemented
;
6187 /* Stop translation as we may have switched the execution mode */
6188 ctx
->bstate
= BS_STOP
;
6191 goto cp0_unimplemented
;
6193 (void)rn
; /* avoid a compiler warning */
6194 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6195 /* For simplicity assume that all writes can cause interrupts. */
6196 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6198 ctx
->bstate
= BS_STOP
;
6203 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6206 #if defined(TARGET_MIPS64)
6207 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6209 const char *rn
= "invalid";
6212 check_insn(ctx
, ISA_MIPS64
);
6218 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6223 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6227 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6228 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6232 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6233 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6237 goto cp0_unimplemented
;
6243 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6244 gen_helper_mfc0_random(arg
, cpu_env
);
6248 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6258 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6263 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6264 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6268 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6269 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6273 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6274 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6275 rn
= "VPEScheFBack";
6278 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6283 goto cp0_unimplemented
;
6289 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6293 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6294 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6298 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6299 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6303 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6304 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6308 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6309 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6313 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6314 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6318 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6319 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6323 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6324 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6328 goto cp0_unimplemented
;
6334 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6338 goto cp0_unimplemented
;
6344 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6348 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6349 rn
= "ContextConfig";
6350 goto cp0_unimplemented
;
6353 CP0_CHECK(ctx
->ulri
);
6354 tcg_gen_ld_tl(arg
, cpu_env
,
6355 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6359 goto cp0_unimplemented
;
6365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6369 check_insn(ctx
, ISA_MIPS32R2
);
6370 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6374 goto cp0_unimplemented
;
6380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6384 check_insn(ctx
, ISA_MIPS32R2
);
6385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6389 check_insn(ctx
, ISA_MIPS32R2
);
6390 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6394 check_insn(ctx
, ISA_MIPS32R2
);
6395 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6399 check_insn(ctx
, ISA_MIPS32R2
);
6400 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6404 check_insn(ctx
, ISA_MIPS32R2
);
6405 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6409 goto cp0_unimplemented
;
6415 check_insn(ctx
, ISA_MIPS32R2
);
6416 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6420 goto cp0_unimplemented
;
6426 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6436 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6440 goto cp0_unimplemented
;
6446 /* Mark as an IO operation because we read the time. */
6447 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6450 gen_helper_mfc0_count(arg
, cpu_env
);
6451 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6454 /* Break the TB to be able to take timer interrupts immediately
6455 after reading count. */
6456 ctx
->bstate
= BS_STOP
;
6459 /* 6,7 are implementation dependent */
6461 goto cp0_unimplemented
;
6467 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6471 goto cp0_unimplemented
;
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6480 /* 6,7 are implementation dependent */
6482 goto cp0_unimplemented
;
6488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6492 check_insn(ctx
, ISA_MIPS32R2
);
6493 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6497 check_insn(ctx
, ISA_MIPS32R2
);
6498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6502 check_insn(ctx
, ISA_MIPS32R2
);
6503 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6507 goto cp0_unimplemented
;
6513 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6517 goto cp0_unimplemented
;
6523 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6527 goto cp0_unimplemented
;
6533 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6537 check_insn(ctx
, ISA_MIPS32R2
);
6538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6542 goto cp0_unimplemented
;
6548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6560 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6564 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6568 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6571 /* 6,7 are implementation dependent */
6573 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6577 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6581 goto cp0_unimplemented
;
6587 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6591 goto cp0_unimplemented
;
6597 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6601 goto cp0_unimplemented
;
6607 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6611 goto cp0_unimplemented
;
6617 check_insn(ctx
, ISA_MIPS3
);
6618 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6622 goto cp0_unimplemented
;
6626 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6627 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6634 goto cp0_unimplemented
;
6638 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6639 rn
= "'Diagnostic"; /* implementation dependent */
6644 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6648 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6649 rn
= "TraceControl";
6652 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6653 rn
= "TraceControl2";
6656 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6657 rn
= "UserTraceData";
6660 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6664 goto cp0_unimplemented
;
6671 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6675 goto cp0_unimplemented
;
6681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6682 rn
= "Performance0";
6685 // gen_helper_dmfc0_performance1(arg);
6686 rn
= "Performance1";
6689 // gen_helper_dmfc0_performance2(arg);
6690 rn
= "Performance2";
6693 // gen_helper_dmfc0_performance3(arg);
6694 rn
= "Performance3";
6697 // gen_helper_dmfc0_performance4(arg);
6698 rn
= "Performance4";
6701 // gen_helper_dmfc0_performance5(arg);
6702 rn
= "Performance5";
6705 // gen_helper_dmfc0_performance6(arg);
6706 rn
= "Performance6";
6709 // gen_helper_dmfc0_performance7(arg);
6710 rn
= "Performance7";
6713 goto cp0_unimplemented
;
6717 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6724 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6728 goto cp0_unimplemented
;
6737 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6744 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6748 goto cp0_unimplemented
;
6757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6764 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6768 goto cp0_unimplemented
;
6774 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6778 goto cp0_unimplemented
;
6785 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6789 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6790 tcg_gen_ld_tl(arg
, cpu_env
,
6791 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6795 goto cp0_unimplemented
;
6799 goto cp0_unimplemented
;
6801 (void)rn
; /* avoid a compiler warning */
6802 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6806 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6807 gen_mfc0_unimplemented(ctx
, arg
);
6810 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6812 const char *rn
= "invalid";
6815 check_insn(ctx
, ISA_MIPS64
);
6817 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6825 gen_helper_mtc0_index(cpu_env
, arg
);
6829 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6830 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6834 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6839 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6844 goto cp0_unimplemented
;
6854 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6855 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6859 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6860 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6864 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6865 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6869 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6870 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6874 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6875 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6879 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6880 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6881 rn
= "VPEScheFBack";
6884 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6885 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6889 goto cp0_unimplemented
;
6895 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6899 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6900 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6904 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6905 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6909 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6910 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6914 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6915 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6919 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6920 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6924 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6925 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6929 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6930 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6934 goto cp0_unimplemented
;
6940 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6944 goto cp0_unimplemented
;
6950 gen_helper_mtc0_context(cpu_env
, arg
);
6954 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6955 rn
= "ContextConfig";
6956 goto cp0_unimplemented
;
6959 CP0_CHECK(ctx
->ulri
);
6960 tcg_gen_st_tl(arg
, cpu_env
,
6961 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6965 goto cp0_unimplemented
;
6971 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6975 check_insn(ctx
, ISA_MIPS32R2
);
6976 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6980 goto cp0_unimplemented
;
6986 gen_helper_mtc0_wired(cpu_env
, arg
);
6990 check_insn(ctx
, ISA_MIPS32R2
);
6991 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6995 check_insn(ctx
, ISA_MIPS32R2
);
6996 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7000 check_insn(ctx
, ISA_MIPS32R2
);
7001 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7005 check_insn(ctx
, ISA_MIPS32R2
);
7006 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7010 check_insn(ctx
, ISA_MIPS32R2
);
7011 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7015 goto cp0_unimplemented
;
7021 check_insn(ctx
, ISA_MIPS32R2
);
7022 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7023 ctx
->bstate
= BS_STOP
;
7027 goto cp0_unimplemented
;
7045 goto cp0_unimplemented
;
7051 gen_helper_mtc0_count(cpu_env
, arg
);
7054 /* 6,7 are implementation dependent */
7056 goto cp0_unimplemented
;
7058 /* Stop translation as we may have switched the execution mode */
7059 ctx
->bstate
= BS_STOP
;
7064 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7068 goto cp0_unimplemented
;
7074 gen_helper_mtc0_compare(cpu_env
, arg
);
7077 /* 6,7 are implementation dependent */
7079 goto cp0_unimplemented
;
7081 /* Stop translation as we may have switched the execution mode */
7082 ctx
->bstate
= BS_STOP
;
7087 save_cpu_state(ctx
, 1);
7088 gen_helper_mtc0_status(cpu_env
, arg
);
7089 /* BS_STOP isn't good enough here, hflags may have changed. */
7090 gen_save_pc(ctx
->pc
+ 4);
7091 ctx
->bstate
= BS_EXCP
;
7095 check_insn(ctx
, ISA_MIPS32R2
);
7096 gen_helper_mtc0_intctl(cpu_env
, arg
);
7097 /* Stop translation as we may have switched the execution mode */
7098 ctx
->bstate
= BS_STOP
;
7102 check_insn(ctx
, ISA_MIPS32R2
);
7103 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7104 /* Stop translation as we may have switched the execution mode */
7105 ctx
->bstate
= BS_STOP
;
7109 check_insn(ctx
, ISA_MIPS32R2
);
7110 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7111 /* Stop translation as we may have switched the execution mode */
7112 ctx
->bstate
= BS_STOP
;
7116 goto cp0_unimplemented
;
7122 save_cpu_state(ctx
, 1);
7123 /* Mark as an IO operation because we may trigger a software
7125 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7128 gen_helper_mtc0_cause(cpu_env
, arg
);
7129 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7132 /* Stop translation as we may have triggered an intetrupt */
7133 ctx
->bstate
= BS_STOP
;
7137 goto cp0_unimplemented
;
7143 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7147 goto cp0_unimplemented
;
7157 check_insn(ctx
, ISA_MIPS32R2
);
7158 gen_helper_mtc0_ebase(cpu_env
, arg
);
7162 goto cp0_unimplemented
;
7168 gen_helper_mtc0_config0(cpu_env
, arg
);
7170 /* Stop translation as we may have switched the execution mode */
7171 ctx
->bstate
= BS_STOP
;
7174 /* ignored, read only */
7178 gen_helper_mtc0_config2(cpu_env
, arg
);
7180 /* Stop translation as we may have switched the execution mode */
7181 ctx
->bstate
= BS_STOP
;
7184 gen_helper_mtc0_config3(cpu_env
, arg
);
7186 /* Stop translation as we may have switched the execution mode */
7187 ctx
->bstate
= BS_STOP
;
7190 /* currently ignored */
7194 gen_helper_mtc0_config5(cpu_env
, arg
);
7196 /* Stop translation as we may have switched the execution mode */
7197 ctx
->bstate
= BS_STOP
;
7199 /* 6,7 are implementation dependent */
7201 rn
= "Invalid config selector";
7202 goto cp0_unimplemented
;
7208 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7212 goto cp0_unimplemented
;
7218 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7222 goto cp0_unimplemented
;
7228 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7232 goto cp0_unimplemented
;
7238 check_insn(ctx
, ISA_MIPS3
);
7239 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7243 goto cp0_unimplemented
;
7247 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7248 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7251 gen_helper_mtc0_framemask(cpu_env
, arg
);
7255 goto cp0_unimplemented
;
7260 rn
= "Diagnostic"; /* implementation dependent */
7265 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7266 /* BS_STOP isn't good enough here, hflags may have changed. */
7267 gen_save_pc(ctx
->pc
+ 4);
7268 ctx
->bstate
= BS_EXCP
;
7272 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7273 /* Stop translation as we may have switched the execution mode */
7274 ctx
->bstate
= BS_STOP
;
7275 rn
= "TraceControl";
7278 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7279 /* Stop translation as we may have switched the execution mode */
7280 ctx
->bstate
= BS_STOP
;
7281 rn
= "TraceControl2";
7284 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7285 /* Stop translation as we may have switched the execution mode */
7286 ctx
->bstate
= BS_STOP
;
7287 rn
= "UserTraceData";
7290 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7291 /* Stop translation as we may have switched the execution mode */
7292 ctx
->bstate
= BS_STOP
;
7296 goto cp0_unimplemented
;
7303 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7307 goto cp0_unimplemented
;
7313 gen_helper_mtc0_performance0(cpu_env
, arg
);
7314 rn
= "Performance0";
7317 // gen_helper_mtc0_performance1(cpu_env, arg);
7318 rn
= "Performance1";
7321 // gen_helper_mtc0_performance2(cpu_env, arg);
7322 rn
= "Performance2";
7325 // gen_helper_mtc0_performance3(cpu_env, arg);
7326 rn
= "Performance3";
7329 // gen_helper_mtc0_performance4(cpu_env, arg);
7330 rn
= "Performance4";
7333 // gen_helper_mtc0_performance5(cpu_env, arg);
7334 rn
= "Performance5";
7337 // gen_helper_mtc0_performance6(cpu_env, arg);
7338 rn
= "Performance6";
7341 // gen_helper_mtc0_performance7(cpu_env, arg);
7342 rn
= "Performance7";
7345 goto cp0_unimplemented
;
7359 goto cp0_unimplemented
;
7368 gen_helper_mtc0_taglo(cpu_env
, arg
);
7375 gen_helper_mtc0_datalo(cpu_env
, arg
);
7379 goto cp0_unimplemented
;
7388 gen_helper_mtc0_taghi(cpu_env
, arg
);
7395 gen_helper_mtc0_datahi(cpu_env
, arg
);
7400 goto cp0_unimplemented
;
7406 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7410 goto cp0_unimplemented
;
7417 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7421 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7422 tcg_gen_st_tl(arg
, cpu_env
,
7423 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7427 goto cp0_unimplemented
;
7429 /* Stop translation as we may have switched the execution mode */
7430 ctx
->bstate
= BS_STOP
;
7433 goto cp0_unimplemented
;
7435 (void)rn
; /* avoid a compiler warning */
7436 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7437 /* For simplicity assume that all writes can cause interrupts. */
7438 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7440 ctx
->bstate
= BS_STOP
;
7445 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7447 #endif /* TARGET_MIPS64 */
7449 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7450 int u
, int sel
, int h
)
7452 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7453 TCGv t0
= tcg_temp_local_new();
7455 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7456 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7457 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7458 tcg_gen_movi_tl(t0
, -1);
7459 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7460 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7461 tcg_gen_movi_tl(t0
, -1);
7467 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7470 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7480 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7483 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7486 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7489 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7492 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7495 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7498 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7501 gen_mfc0(ctx
, t0
, rt
, sel
);
7508 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7511 gen_mfc0(ctx
, t0
, rt
, sel
);
7517 gen_helper_mftc0_status(t0
, cpu_env
);
7520 gen_mfc0(ctx
, t0
, rt
, sel
);
7526 gen_helper_mftc0_cause(t0
, cpu_env
);
7536 gen_helper_mftc0_epc(t0
, cpu_env
);
7546 gen_helper_mftc0_ebase(t0
, cpu_env
);
7556 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7566 gen_helper_mftc0_debug(t0
, cpu_env
);
7569 gen_mfc0(ctx
, t0
, rt
, sel
);
7574 gen_mfc0(ctx
, t0
, rt
, sel
);
7576 } else switch (sel
) {
7577 /* GPR registers. */
7579 gen_helper_1e0i(mftgpr
, t0
, rt
);
7581 /* Auxiliary CPU registers */
7585 gen_helper_1e0i(mftlo
, t0
, 0);
7588 gen_helper_1e0i(mfthi
, t0
, 0);
7591 gen_helper_1e0i(mftacx
, t0
, 0);
7594 gen_helper_1e0i(mftlo
, t0
, 1);
7597 gen_helper_1e0i(mfthi
, t0
, 1);
7600 gen_helper_1e0i(mftacx
, t0
, 1);
7603 gen_helper_1e0i(mftlo
, t0
, 2);
7606 gen_helper_1e0i(mfthi
, t0
, 2);
7609 gen_helper_1e0i(mftacx
, t0
, 2);
7612 gen_helper_1e0i(mftlo
, t0
, 3);
7615 gen_helper_1e0i(mfthi
, t0
, 3);
7618 gen_helper_1e0i(mftacx
, t0
, 3);
7621 gen_helper_mftdsp(t0
, cpu_env
);
7627 /* Floating point (COP1). */
7629 /* XXX: For now we support only a single FPU context. */
7631 TCGv_i32 fp0
= tcg_temp_new_i32();
7633 gen_load_fpr32(ctx
, fp0
, rt
);
7634 tcg_gen_ext_i32_tl(t0
, fp0
);
7635 tcg_temp_free_i32(fp0
);
7637 TCGv_i32 fp0
= tcg_temp_new_i32();
7639 gen_load_fpr32h(ctx
, fp0
, rt
);
7640 tcg_gen_ext_i32_tl(t0
, fp0
);
7641 tcg_temp_free_i32(fp0
);
7645 /* XXX: For now we support only a single FPU context. */
7646 gen_helper_1e0i(cfc1
, t0
, rt
);
7648 /* COP2: Not implemented. */
7655 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7656 gen_store_gpr(t0
, rd
);
7662 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7663 generate_exception_end(ctx
, EXCP_RI
);
7666 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7667 int u
, int sel
, int h
)
7669 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7670 TCGv t0
= tcg_temp_local_new();
7672 gen_load_gpr(t0
, rt
);
7673 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7674 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7675 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7677 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7678 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7685 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7688 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7698 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7701 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7704 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7707 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7710 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7713 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7716 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7719 gen_mtc0(ctx
, t0
, rd
, sel
);
7726 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7729 gen_mtc0(ctx
, t0
, rd
, sel
);
7735 gen_helper_mttc0_status(cpu_env
, t0
);
7738 gen_mtc0(ctx
, t0
, rd
, sel
);
7744 gen_helper_mttc0_cause(cpu_env
, t0
);
7754 gen_helper_mttc0_ebase(cpu_env
, t0
);
7764 gen_helper_mttc0_debug(cpu_env
, t0
);
7767 gen_mtc0(ctx
, t0
, rd
, sel
);
7772 gen_mtc0(ctx
, t0
, rd
, sel
);
7774 } else switch (sel
) {
7775 /* GPR registers. */
7777 gen_helper_0e1i(mttgpr
, t0
, rd
);
7779 /* Auxiliary CPU registers */
7783 gen_helper_0e1i(mttlo
, t0
, 0);
7786 gen_helper_0e1i(mtthi
, t0
, 0);
7789 gen_helper_0e1i(mttacx
, t0
, 0);
7792 gen_helper_0e1i(mttlo
, t0
, 1);
7795 gen_helper_0e1i(mtthi
, t0
, 1);
7798 gen_helper_0e1i(mttacx
, t0
, 1);
7801 gen_helper_0e1i(mttlo
, t0
, 2);
7804 gen_helper_0e1i(mtthi
, t0
, 2);
7807 gen_helper_0e1i(mttacx
, t0
, 2);
7810 gen_helper_0e1i(mttlo
, t0
, 3);
7813 gen_helper_0e1i(mtthi
, t0
, 3);
7816 gen_helper_0e1i(mttacx
, t0
, 3);
7819 gen_helper_mttdsp(cpu_env
, t0
);
7825 /* Floating point (COP1). */
7827 /* XXX: For now we support only a single FPU context. */
7829 TCGv_i32 fp0
= tcg_temp_new_i32();
7831 tcg_gen_trunc_tl_i32(fp0
, t0
);
7832 gen_store_fpr32(ctx
, fp0
, rd
);
7833 tcg_temp_free_i32(fp0
);
7835 TCGv_i32 fp0
= tcg_temp_new_i32();
7837 tcg_gen_trunc_tl_i32(fp0
, t0
);
7838 gen_store_fpr32h(ctx
, fp0
, rd
);
7839 tcg_temp_free_i32(fp0
);
7843 /* XXX: For now we support only a single FPU context. */
7845 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7847 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7848 tcg_temp_free_i32(fs_tmp
);
7850 /* Stop translation as we may have changed hflags */
7851 ctx
->bstate
= BS_STOP
;
7853 /* COP2: Not implemented. */
7860 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7866 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7867 generate_exception_end(ctx
, EXCP_RI
);
7870 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7872 const char *opn
= "ldst";
7874 check_cp0_enabled(ctx
);
7881 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7886 TCGv t0
= tcg_temp_new();
7888 gen_load_gpr(t0
, rt
);
7889 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7894 #if defined(TARGET_MIPS64)
7896 check_insn(ctx
, ISA_MIPS3
);
7901 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7905 check_insn(ctx
, ISA_MIPS3
);
7907 TCGv t0
= tcg_temp_new();
7909 gen_load_gpr(t0
, rt
);
7910 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7922 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7928 TCGv t0
= tcg_temp_new();
7929 gen_load_gpr(t0
, rt
);
7930 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7936 check_insn(ctx
, ASE_MT
);
7941 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7942 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7946 check_insn(ctx
, ASE_MT
);
7947 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7948 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7953 if (!env
->tlb
->helper_tlbwi
)
7955 gen_helper_tlbwi(cpu_env
);
7960 if (!env
->tlb
->helper_tlbinv
) {
7963 gen_helper_tlbinv(cpu_env
);
7964 } /* treat as nop if TLBINV not supported */
7969 if (!env
->tlb
->helper_tlbinvf
) {
7972 gen_helper_tlbinvf(cpu_env
);
7973 } /* treat as nop if TLBINV not supported */
7977 if (!env
->tlb
->helper_tlbwr
)
7979 gen_helper_tlbwr(cpu_env
);
7983 if (!env
->tlb
->helper_tlbp
)
7985 gen_helper_tlbp(cpu_env
);
7989 if (!env
->tlb
->helper_tlbr
)
7991 gen_helper_tlbr(cpu_env
);
7993 case OPC_ERET
: /* OPC_ERETNC */
7994 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7995 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7998 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
7999 if (ctx
->opcode
& (1 << bit_shift
)) {
8002 check_insn(ctx
, ISA_MIPS32R5
);
8003 gen_helper_eretnc(cpu_env
);
8007 check_insn(ctx
, ISA_MIPS2
);
8008 gen_helper_eret(cpu_env
);
8010 ctx
->bstate
= BS_EXCP
;
8015 check_insn(ctx
, ISA_MIPS32
);
8016 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8017 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8020 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8022 generate_exception_end(ctx
, EXCP_RI
);
8024 gen_helper_deret(cpu_env
);
8025 ctx
->bstate
= BS_EXCP
;
8030 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8031 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8032 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8035 /* If we get an exception, we want to restart at next instruction */
8037 save_cpu_state(ctx
, 1);
8039 gen_helper_wait(cpu_env
);
8040 ctx
->bstate
= BS_EXCP
;
8045 generate_exception_end(ctx
, EXCP_RI
);
8048 (void)opn
; /* avoid a compiler warning */
8050 #endif /* !CONFIG_USER_ONLY */
8052 /* CP1 Branches (before delay slot) */
8053 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8054 int32_t cc
, int32_t offset
)
8056 target_ulong btarget
;
8057 TCGv_i32 t0
= tcg_temp_new_i32();
8059 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8060 generate_exception_end(ctx
, EXCP_RI
);
8065 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8067 btarget
= ctx
->pc
+ 4 + offset
;
8071 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8072 tcg_gen_not_i32(t0
, t0
);
8073 tcg_gen_andi_i32(t0
, t0
, 1);
8074 tcg_gen_extu_i32_tl(bcond
, t0
);
8077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8078 tcg_gen_not_i32(t0
, t0
);
8079 tcg_gen_andi_i32(t0
, t0
, 1);
8080 tcg_gen_extu_i32_tl(bcond
, t0
);
8083 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8084 tcg_gen_andi_i32(t0
, t0
, 1);
8085 tcg_gen_extu_i32_tl(bcond
, t0
);
8088 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8089 tcg_gen_andi_i32(t0
, t0
, 1);
8090 tcg_gen_extu_i32_tl(bcond
, t0
);
8092 ctx
->hflags
|= MIPS_HFLAG_BL
;
8096 TCGv_i32 t1
= tcg_temp_new_i32();
8097 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8098 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8099 tcg_gen_nand_i32(t0
, t0
, t1
);
8100 tcg_temp_free_i32(t1
);
8101 tcg_gen_andi_i32(t0
, t0
, 1);
8102 tcg_gen_extu_i32_tl(bcond
, t0
);
8107 TCGv_i32 t1
= tcg_temp_new_i32();
8108 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8109 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8110 tcg_gen_or_i32(t0
, t0
, t1
);
8111 tcg_temp_free_i32(t1
);
8112 tcg_gen_andi_i32(t0
, t0
, 1);
8113 tcg_gen_extu_i32_tl(bcond
, t0
);
8118 TCGv_i32 t1
= tcg_temp_new_i32();
8119 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8120 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8121 tcg_gen_and_i32(t0
, t0
, t1
);
8122 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8123 tcg_gen_and_i32(t0
, t0
, t1
);
8124 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8125 tcg_gen_nand_i32(t0
, t0
, t1
);
8126 tcg_temp_free_i32(t1
);
8127 tcg_gen_andi_i32(t0
, t0
, 1);
8128 tcg_gen_extu_i32_tl(bcond
, t0
);
8133 TCGv_i32 t1
= tcg_temp_new_i32();
8134 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8135 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8136 tcg_gen_or_i32(t0
, t0
, t1
);
8137 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8138 tcg_gen_or_i32(t0
, t0
, t1
);
8139 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8140 tcg_gen_or_i32(t0
, t0
, t1
);
8141 tcg_temp_free_i32(t1
);
8142 tcg_gen_andi_i32(t0
, t0
, 1);
8143 tcg_gen_extu_i32_tl(bcond
, t0
);
8146 ctx
->hflags
|= MIPS_HFLAG_BC
;
8149 MIPS_INVAL("cp1 cond branch");
8150 generate_exception_end(ctx
, EXCP_RI
);
8153 ctx
->btarget
= btarget
;
8154 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8156 tcg_temp_free_i32(t0
);
8159 /* R6 CP1 Branches */
8160 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8161 int32_t ft
, int32_t offset
,
8164 target_ulong btarget
;
8165 TCGv_i64 t0
= tcg_temp_new_i64();
8167 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8168 #ifdef MIPS_DEBUG_DISAS
8169 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8172 generate_exception_end(ctx
, EXCP_RI
);
8176 gen_load_fpr64(ctx
, t0
, ft
);
8177 tcg_gen_andi_i64(t0
, t0
, 1);
8179 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8183 tcg_gen_xori_i64(t0
, t0
, 1);
8184 ctx
->hflags
|= MIPS_HFLAG_BC
;
8187 /* t0 already set */
8188 ctx
->hflags
|= MIPS_HFLAG_BC
;
8191 MIPS_INVAL("cp1 cond branch");
8192 generate_exception_end(ctx
, EXCP_RI
);
8196 tcg_gen_trunc_i64_tl(bcond
, t0
);
8198 ctx
->btarget
= btarget
;
8200 switch (delayslot_size
) {
8202 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8205 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8210 tcg_temp_free_i64(t0
);
8213 /* Coprocessor 1 (FPU) */
8215 #define FOP(func, fmt) (((fmt) << 21) | (func))
8218 OPC_ADD_S
= FOP(0, FMT_S
),
8219 OPC_SUB_S
= FOP(1, FMT_S
),
8220 OPC_MUL_S
= FOP(2, FMT_S
),
8221 OPC_DIV_S
= FOP(3, FMT_S
),
8222 OPC_SQRT_S
= FOP(4, FMT_S
),
8223 OPC_ABS_S
= FOP(5, FMT_S
),
8224 OPC_MOV_S
= FOP(6, FMT_S
),
8225 OPC_NEG_S
= FOP(7, FMT_S
),
8226 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8227 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8228 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8229 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8230 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8231 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8232 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8233 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8234 OPC_SEL_S
= FOP(16, FMT_S
),
8235 OPC_MOVCF_S
= FOP(17, FMT_S
),
8236 OPC_MOVZ_S
= FOP(18, FMT_S
),
8237 OPC_MOVN_S
= FOP(19, FMT_S
),
8238 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8239 OPC_RECIP_S
= FOP(21, FMT_S
),
8240 OPC_RSQRT_S
= FOP(22, FMT_S
),
8241 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8242 OPC_MADDF_S
= FOP(24, FMT_S
),
8243 OPC_MSUBF_S
= FOP(25, FMT_S
),
8244 OPC_RINT_S
= FOP(26, FMT_S
),
8245 OPC_CLASS_S
= FOP(27, FMT_S
),
8246 OPC_MIN_S
= FOP(28, FMT_S
),
8247 OPC_RECIP2_S
= FOP(28, FMT_S
),
8248 OPC_MINA_S
= FOP(29, FMT_S
),
8249 OPC_RECIP1_S
= FOP(29, FMT_S
),
8250 OPC_MAX_S
= FOP(30, FMT_S
),
8251 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8252 OPC_MAXA_S
= FOP(31, FMT_S
),
8253 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8254 OPC_CVT_D_S
= FOP(33, FMT_S
),
8255 OPC_CVT_W_S
= FOP(36, FMT_S
),
8256 OPC_CVT_L_S
= FOP(37, FMT_S
),
8257 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8258 OPC_CMP_F_S
= FOP (48, FMT_S
),
8259 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8260 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8261 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8262 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8263 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8264 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8265 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8266 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8267 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8268 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8269 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8270 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8271 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8272 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8273 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8275 OPC_ADD_D
= FOP(0, FMT_D
),
8276 OPC_SUB_D
= FOP(1, FMT_D
),
8277 OPC_MUL_D
= FOP(2, FMT_D
),
8278 OPC_DIV_D
= FOP(3, FMT_D
),
8279 OPC_SQRT_D
= FOP(4, FMT_D
),
8280 OPC_ABS_D
= FOP(5, FMT_D
),
8281 OPC_MOV_D
= FOP(6, FMT_D
),
8282 OPC_NEG_D
= FOP(7, FMT_D
),
8283 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8284 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8285 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8286 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8287 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8288 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8289 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8290 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8291 OPC_SEL_D
= FOP(16, FMT_D
),
8292 OPC_MOVCF_D
= FOP(17, FMT_D
),
8293 OPC_MOVZ_D
= FOP(18, FMT_D
),
8294 OPC_MOVN_D
= FOP(19, FMT_D
),
8295 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8296 OPC_RECIP_D
= FOP(21, FMT_D
),
8297 OPC_RSQRT_D
= FOP(22, FMT_D
),
8298 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8299 OPC_MADDF_D
= FOP(24, FMT_D
),
8300 OPC_MSUBF_D
= FOP(25, FMT_D
),
8301 OPC_RINT_D
= FOP(26, FMT_D
),
8302 OPC_CLASS_D
= FOP(27, FMT_D
),
8303 OPC_MIN_D
= FOP(28, FMT_D
),
8304 OPC_RECIP2_D
= FOP(28, FMT_D
),
8305 OPC_MINA_D
= FOP(29, FMT_D
),
8306 OPC_RECIP1_D
= FOP(29, FMT_D
),
8307 OPC_MAX_D
= FOP(30, FMT_D
),
8308 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8309 OPC_MAXA_D
= FOP(31, FMT_D
),
8310 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8311 OPC_CVT_S_D
= FOP(32, FMT_D
),
8312 OPC_CVT_W_D
= FOP(36, FMT_D
),
8313 OPC_CVT_L_D
= FOP(37, FMT_D
),
8314 OPC_CMP_F_D
= FOP (48, FMT_D
),
8315 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8316 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8317 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8318 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8319 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8320 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8321 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8322 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8323 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8324 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8325 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8326 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8327 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8328 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8329 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8331 OPC_CVT_S_W
= FOP(32, FMT_W
),
8332 OPC_CVT_D_W
= FOP(33, FMT_W
),
8333 OPC_CVT_S_L
= FOP(32, FMT_L
),
8334 OPC_CVT_D_L
= FOP(33, FMT_L
),
8335 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8337 OPC_ADD_PS
= FOP(0, FMT_PS
),
8338 OPC_SUB_PS
= FOP(1, FMT_PS
),
8339 OPC_MUL_PS
= FOP(2, FMT_PS
),
8340 OPC_DIV_PS
= FOP(3, FMT_PS
),
8341 OPC_ABS_PS
= FOP(5, FMT_PS
),
8342 OPC_MOV_PS
= FOP(6, FMT_PS
),
8343 OPC_NEG_PS
= FOP(7, FMT_PS
),
8344 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8345 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8346 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8347 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8348 OPC_MULR_PS
= FOP(26, FMT_PS
),
8349 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8350 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8351 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8352 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8354 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8355 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8356 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8357 OPC_PLL_PS
= FOP(44, FMT_PS
),
8358 OPC_PLU_PS
= FOP(45, FMT_PS
),
8359 OPC_PUL_PS
= FOP(46, FMT_PS
),
8360 OPC_PUU_PS
= FOP(47, FMT_PS
),
8361 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8362 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8363 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8364 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8365 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8366 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8367 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8368 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8369 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8370 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8371 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8372 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8373 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8374 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8375 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8376 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8380 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8381 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8382 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8383 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8384 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8385 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8386 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8387 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8388 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8389 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8390 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8391 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8392 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8393 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8394 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8395 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8396 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8397 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8398 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8399 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8400 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8401 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8403 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8404 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8405 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8406 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8407 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8408 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8409 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8410 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8411 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8412 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8413 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8414 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8415 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8416 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8417 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8418 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8419 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8420 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8421 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8422 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8423 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8424 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8426 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8428 TCGv t0
= tcg_temp_new();
8433 TCGv_i32 fp0
= tcg_temp_new_i32();
8435 gen_load_fpr32(ctx
, fp0
, fs
);
8436 tcg_gen_ext_i32_tl(t0
, fp0
);
8437 tcg_temp_free_i32(fp0
);
8439 gen_store_gpr(t0
, rt
);
8442 gen_load_gpr(t0
, rt
);
8444 TCGv_i32 fp0
= tcg_temp_new_i32();
8446 tcg_gen_trunc_tl_i32(fp0
, t0
);
8447 gen_store_fpr32(ctx
, fp0
, fs
);
8448 tcg_temp_free_i32(fp0
);
8452 gen_helper_1e0i(cfc1
, t0
, fs
);
8453 gen_store_gpr(t0
, rt
);
8456 gen_load_gpr(t0
, rt
);
8457 save_cpu_state(ctx
, 0);
8459 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8461 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8462 tcg_temp_free_i32(fs_tmp
);
8464 /* Stop translation as we may have changed hflags */
8465 ctx
->bstate
= BS_STOP
;
8467 #if defined(TARGET_MIPS64)
8469 gen_load_fpr64(ctx
, t0
, fs
);
8470 gen_store_gpr(t0
, rt
);
8473 gen_load_gpr(t0
, rt
);
8474 gen_store_fpr64(ctx
, t0
, fs
);
8479 TCGv_i32 fp0
= tcg_temp_new_i32();
8481 gen_load_fpr32h(ctx
, fp0
, fs
);
8482 tcg_gen_ext_i32_tl(t0
, fp0
);
8483 tcg_temp_free_i32(fp0
);
8485 gen_store_gpr(t0
, rt
);
8488 gen_load_gpr(t0
, rt
);
8490 TCGv_i32 fp0
= tcg_temp_new_i32();
8492 tcg_gen_trunc_tl_i32(fp0
, t0
);
8493 gen_store_fpr32h(ctx
, fp0
, fs
);
8494 tcg_temp_free_i32(fp0
);
8498 MIPS_INVAL("cp1 move");
8499 generate_exception_end(ctx
, EXCP_RI
);
8507 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8523 l1
= gen_new_label();
8524 t0
= tcg_temp_new_i32();
8525 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8526 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8527 tcg_temp_free_i32(t0
);
8529 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8531 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8536 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8540 TCGv_i32 t0
= tcg_temp_new_i32();
8541 TCGLabel
*l1
= gen_new_label();
8548 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8549 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8550 gen_load_fpr32(ctx
, t0
, fs
);
8551 gen_store_fpr32(ctx
, t0
, fd
);
8553 tcg_temp_free_i32(t0
);
8556 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8559 TCGv_i32 t0
= tcg_temp_new_i32();
8561 TCGLabel
*l1
= gen_new_label();
8568 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8569 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8570 tcg_temp_free_i32(t0
);
8571 fp0
= tcg_temp_new_i64();
8572 gen_load_fpr64(ctx
, fp0
, fs
);
8573 gen_store_fpr64(ctx
, fp0
, fd
);
8574 tcg_temp_free_i64(fp0
);
8578 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8582 TCGv_i32 t0
= tcg_temp_new_i32();
8583 TCGLabel
*l1
= gen_new_label();
8584 TCGLabel
*l2
= gen_new_label();
8591 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8592 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8593 gen_load_fpr32(ctx
, t0
, fs
);
8594 gen_store_fpr32(ctx
, t0
, fd
);
8597 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8598 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8599 gen_load_fpr32h(ctx
, t0
, fs
);
8600 gen_store_fpr32h(ctx
, t0
, fd
);
8601 tcg_temp_free_i32(t0
);
8605 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8608 TCGv_i32 t1
= tcg_const_i32(0);
8609 TCGv_i32 fp0
= tcg_temp_new_i32();
8610 TCGv_i32 fp1
= tcg_temp_new_i32();
8611 TCGv_i32 fp2
= tcg_temp_new_i32();
8612 gen_load_fpr32(ctx
, fp0
, fd
);
8613 gen_load_fpr32(ctx
, fp1
, ft
);
8614 gen_load_fpr32(ctx
, fp2
, fs
);
8618 tcg_gen_andi_i32(fp0
, fp0
, 1);
8619 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8622 tcg_gen_andi_i32(fp1
, fp1
, 1);
8623 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8626 tcg_gen_andi_i32(fp1
, fp1
, 1);
8627 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8630 MIPS_INVAL("gen_sel_s");
8631 generate_exception_end(ctx
, EXCP_RI
);
8635 gen_store_fpr32(ctx
, fp0
, fd
);
8636 tcg_temp_free_i32(fp2
);
8637 tcg_temp_free_i32(fp1
);
8638 tcg_temp_free_i32(fp0
);
8639 tcg_temp_free_i32(t1
);
8642 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8645 TCGv_i64 t1
= tcg_const_i64(0);
8646 TCGv_i64 fp0
= tcg_temp_new_i64();
8647 TCGv_i64 fp1
= tcg_temp_new_i64();
8648 TCGv_i64 fp2
= tcg_temp_new_i64();
8649 gen_load_fpr64(ctx
, fp0
, fd
);
8650 gen_load_fpr64(ctx
, fp1
, ft
);
8651 gen_load_fpr64(ctx
, fp2
, fs
);
8655 tcg_gen_andi_i64(fp0
, fp0
, 1);
8656 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8659 tcg_gen_andi_i64(fp1
, fp1
, 1);
8660 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8663 tcg_gen_andi_i64(fp1
, fp1
, 1);
8664 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8667 MIPS_INVAL("gen_sel_d");
8668 generate_exception_end(ctx
, EXCP_RI
);
8672 gen_store_fpr64(ctx
, fp0
, fd
);
8673 tcg_temp_free_i64(fp2
);
8674 tcg_temp_free_i64(fp1
);
8675 tcg_temp_free_i64(fp0
);
8676 tcg_temp_free_i64(t1
);
8679 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8680 int ft
, int fs
, int fd
, int cc
)
8682 uint32_t func
= ctx
->opcode
& 0x3f;
8686 TCGv_i32 fp0
= tcg_temp_new_i32();
8687 TCGv_i32 fp1
= tcg_temp_new_i32();
8689 gen_load_fpr32(ctx
, fp0
, fs
);
8690 gen_load_fpr32(ctx
, fp1
, ft
);
8691 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8692 tcg_temp_free_i32(fp1
);
8693 gen_store_fpr32(ctx
, fp0
, fd
);
8694 tcg_temp_free_i32(fp0
);
8699 TCGv_i32 fp0
= tcg_temp_new_i32();
8700 TCGv_i32 fp1
= tcg_temp_new_i32();
8702 gen_load_fpr32(ctx
, fp0
, fs
);
8703 gen_load_fpr32(ctx
, fp1
, ft
);
8704 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8705 tcg_temp_free_i32(fp1
);
8706 gen_store_fpr32(ctx
, fp0
, fd
);
8707 tcg_temp_free_i32(fp0
);
8712 TCGv_i32 fp0
= tcg_temp_new_i32();
8713 TCGv_i32 fp1
= tcg_temp_new_i32();
8715 gen_load_fpr32(ctx
, fp0
, fs
);
8716 gen_load_fpr32(ctx
, fp1
, ft
);
8717 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8718 tcg_temp_free_i32(fp1
);
8719 gen_store_fpr32(ctx
, fp0
, fd
);
8720 tcg_temp_free_i32(fp0
);
8725 TCGv_i32 fp0
= tcg_temp_new_i32();
8726 TCGv_i32 fp1
= tcg_temp_new_i32();
8728 gen_load_fpr32(ctx
, fp0
, fs
);
8729 gen_load_fpr32(ctx
, fp1
, ft
);
8730 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8731 tcg_temp_free_i32(fp1
);
8732 gen_store_fpr32(ctx
, fp0
, fd
);
8733 tcg_temp_free_i32(fp0
);
8738 TCGv_i32 fp0
= tcg_temp_new_i32();
8740 gen_load_fpr32(ctx
, fp0
, fs
);
8741 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8742 gen_store_fpr32(ctx
, fp0
, fd
);
8743 tcg_temp_free_i32(fp0
);
8748 TCGv_i32 fp0
= tcg_temp_new_i32();
8750 gen_load_fpr32(ctx
, fp0
, fs
);
8751 gen_helper_float_abs_s(fp0
, fp0
);
8752 gen_store_fpr32(ctx
, fp0
, fd
);
8753 tcg_temp_free_i32(fp0
);
8758 TCGv_i32 fp0
= tcg_temp_new_i32();
8760 gen_load_fpr32(ctx
, fp0
, fs
);
8761 gen_store_fpr32(ctx
, fp0
, fd
);
8762 tcg_temp_free_i32(fp0
);
8767 TCGv_i32 fp0
= tcg_temp_new_i32();
8769 gen_load_fpr32(ctx
, fp0
, fs
);
8770 gen_helper_float_chs_s(fp0
, fp0
);
8771 gen_store_fpr32(ctx
, fp0
, fd
);
8772 tcg_temp_free_i32(fp0
);
8776 check_cp1_64bitmode(ctx
);
8778 TCGv_i32 fp32
= tcg_temp_new_i32();
8779 TCGv_i64 fp64
= tcg_temp_new_i64();
8781 gen_load_fpr32(ctx
, fp32
, fs
);
8782 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8783 tcg_temp_free_i32(fp32
);
8784 gen_store_fpr64(ctx
, fp64
, fd
);
8785 tcg_temp_free_i64(fp64
);
8789 check_cp1_64bitmode(ctx
);
8791 TCGv_i32 fp32
= tcg_temp_new_i32();
8792 TCGv_i64 fp64
= tcg_temp_new_i64();
8794 gen_load_fpr32(ctx
, fp32
, fs
);
8795 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8796 tcg_temp_free_i32(fp32
);
8797 gen_store_fpr64(ctx
, fp64
, fd
);
8798 tcg_temp_free_i64(fp64
);
8802 check_cp1_64bitmode(ctx
);
8804 TCGv_i32 fp32
= tcg_temp_new_i32();
8805 TCGv_i64 fp64
= tcg_temp_new_i64();
8807 gen_load_fpr32(ctx
, fp32
, fs
);
8808 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8809 tcg_temp_free_i32(fp32
);
8810 gen_store_fpr64(ctx
, fp64
, fd
);
8811 tcg_temp_free_i64(fp64
);
8815 check_cp1_64bitmode(ctx
);
8817 TCGv_i32 fp32
= tcg_temp_new_i32();
8818 TCGv_i64 fp64
= tcg_temp_new_i64();
8820 gen_load_fpr32(ctx
, fp32
, fs
);
8821 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8822 tcg_temp_free_i32(fp32
);
8823 gen_store_fpr64(ctx
, fp64
, fd
);
8824 tcg_temp_free_i64(fp64
);
8829 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 gen_load_fpr32(ctx
, fp0
, fs
);
8832 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8833 gen_store_fpr32(ctx
, fp0
, fd
);
8834 tcg_temp_free_i32(fp0
);
8839 TCGv_i32 fp0
= tcg_temp_new_i32();
8841 gen_load_fpr32(ctx
, fp0
, fs
);
8842 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8843 gen_store_fpr32(ctx
, fp0
, fd
);
8844 tcg_temp_free_i32(fp0
);
8849 TCGv_i32 fp0
= tcg_temp_new_i32();
8851 gen_load_fpr32(ctx
, fp0
, fs
);
8852 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8853 gen_store_fpr32(ctx
, fp0
, fd
);
8854 tcg_temp_free_i32(fp0
);
8859 TCGv_i32 fp0
= tcg_temp_new_i32();
8861 gen_load_fpr32(ctx
, fp0
, fs
);
8862 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8863 gen_store_fpr32(ctx
, fp0
, fd
);
8864 tcg_temp_free_i32(fp0
);
8868 check_insn(ctx
, ISA_MIPS32R6
);
8869 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8872 check_insn(ctx
, ISA_MIPS32R6
);
8873 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8876 check_insn(ctx
, ISA_MIPS32R6
);
8877 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8880 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8881 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8884 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8886 TCGLabel
*l1
= gen_new_label();
8890 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8892 fp0
= tcg_temp_new_i32();
8893 gen_load_fpr32(ctx
, fp0
, fs
);
8894 gen_store_fpr32(ctx
, fp0
, fd
);
8895 tcg_temp_free_i32(fp0
);
8900 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8902 TCGLabel
*l1
= gen_new_label();
8906 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8907 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_store_fpr32(ctx
, fp0
, fd
);
8910 tcg_temp_free_i32(fp0
);
8917 TCGv_i32 fp0
= tcg_temp_new_i32();
8919 gen_load_fpr32(ctx
, fp0
, fs
);
8920 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8921 gen_store_fpr32(ctx
, fp0
, fd
);
8922 tcg_temp_free_i32(fp0
);
8927 TCGv_i32 fp0
= tcg_temp_new_i32();
8929 gen_load_fpr32(ctx
, fp0
, fs
);
8930 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8931 gen_store_fpr32(ctx
, fp0
, fd
);
8932 tcg_temp_free_i32(fp0
);
8936 check_insn(ctx
, ISA_MIPS32R6
);
8938 TCGv_i32 fp0
= tcg_temp_new_i32();
8939 TCGv_i32 fp1
= tcg_temp_new_i32();
8940 TCGv_i32 fp2
= tcg_temp_new_i32();
8941 gen_load_fpr32(ctx
, fp0
, fs
);
8942 gen_load_fpr32(ctx
, fp1
, ft
);
8943 gen_load_fpr32(ctx
, fp2
, fd
);
8944 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8945 gen_store_fpr32(ctx
, fp2
, fd
);
8946 tcg_temp_free_i32(fp2
);
8947 tcg_temp_free_i32(fp1
);
8948 tcg_temp_free_i32(fp0
);
8952 check_insn(ctx
, ISA_MIPS32R6
);
8954 TCGv_i32 fp0
= tcg_temp_new_i32();
8955 TCGv_i32 fp1
= tcg_temp_new_i32();
8956 TCGv_i32 fp2
= tcg_temp_new_i32();
8957 gen_load_fpr32(ctx
, fp0
, fs
);
8958 gen_load_fpr32(ctx
, fp1
, ft
);
8959 gen_load_fpr32(ctx
, fp2
, fd
);
8960 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8961 gen_store_fpr32(ctx
, fp2
, fd
);
8962 tcg_temp_free_i32(fp2
);
8963 tcg_temp_free_i32(fp1
);
8964 tcg_temp_free_i32(fp0
);
8968 check_insn(ctx
, ISA_MIPS32R6
);
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8971 gen_load_fpr32(ctx
, fp0
, fs
);
8972 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8973 gen_store_fpr32(ctx
, fp0
, fd
);
8974 tcg_temp_free_i32(fp0
);
8978 check_insn(ctx
, ISA_MIPS32R6
);
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8981 gen_load_fpr32(ctx
, fp0
, fs
);
8982 gen_helper_float_class_s(fp0
, fp0
);
8983 gen_store_fpr32(ctx
, fp0
, fd
);
8984 tcg_temp_free_i32(fp0
);
8987 case OPC_MIN_S
: /* OPC_RECIP2_S */
8988 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8990 TCGv_i32 fp0
= tcg_temp_new_i32();
8991 TCGv_i32 fp1
= tcg_temp_new_i32();
8992 TCGv_i32 fp2
= tcg_temp_new_i32();
8993 gen_load_fpr32(ctx
, fp0
, fs
);
8994 gen_load_fpr32(ctx
, fp1
, ft
);
8995 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8996 gen_store_fpr32(ctx
, fp2
, fd
);
8997 tcg_temp_free_i32(fp2
);
8998 tcg_temp_free_i32(fp1
);
8999 tcg_temp_free_i32(fp0
);
9002 check_cp1_64bitmode(ctx
);
9004 TCGv_i32 fp0
= tcg_temp_new_i32();
9005 TCGv_i32 fp1
= tcg_temp_new_i32();
9007 gen_load_fpr32(ctx
, fp0
, fs
);
9008 gen_load_fpr32(ctx
, fp1
, ft
);
9009 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9010 tcg_temp_free_i32(fp1
);
9011 gen_store_fpr32(ctx
, fp0
, fd
);
9012 tcg_temp_free_i32(fp0
);
9016 case OPC_MINA_S
: /* OPC_RECIP1_S */
9017 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9019 TCGv_i32 fp0
= tcg_temp_new_i32();
9020 TCGv_i32 fp1
= tcg_temp_new_i32();
9021 TCGv_i32 fp2
= tcg_temp_new_i32();
9022 gen_load_fpr32(ctx
, fp0
, fs
);
9023 gen_load_fpr32(ctx
, fp1
, ft
);
9024 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9025 gen_store_fpr32(ctx
, fp2
, fd
);
9026 tcg_temp_free_i32(fp2
);
9027 tcg_temp_free_i32(fp1
);
9028 tcg_temp_free_i32(fp0
);
9031 check_cp1_64bitmode(ctx
);
9033 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 gen_load_fpr32(ctx
, fp0
, fs
);
9036 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9037 gen_store_fpr32(ctx
, fp0
, fd
);
9038 tcg_temp_free_i32(fp0
);
9042 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9043 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9045 TCGv_i32 fp0
= tcg_temp_new_i32();
9046 TCGv_i32 fp1
= tcg_temp_new_i32();
9047 gen_load_fpr32(ctx
, fp0
, fs
);
9048 gen_load_fpr32(ctx
, fp1
, ft
);
9049 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9050 gen_store_fpr32(ctx
, fp1
, fd
);
9051 tcg_temp_free_i32(fp1
);
9052 tcg_temp_free_i32(fp0
);
9055 check_cp1_64bitmode(ctx
);
9057 TCGv_i32 fp0
= tcg_temp_new_i32();
9059 gen_load_fpr32(ctx
, fp0
, fs
);
9060 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9061 gen_store_fpr32(ctx
, fp0
, fd
);
9062 tcg_temp_free_i32(fp0
);
9066 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9067 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9069 TCGv_i32 fp0
= tcg_temp_new_i32();
9070 TCGv_i32 fp1
= tcg_temp_new_i32();
9071 gen_load_fpr32(ctx
, fp0
, fs
);
9072 gen_load_fpr32(ctx
, fp1
, ft
);
9073 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9074 gen_store_fpr32(ctx
, fp1
, fd
);
9075 tcg_temp_free_i32(fp1
);
9076 tcg_temp_free_i32(fp0
);
9079 check_cp1_64bitmode(ctx
);
9081 TCGv_i32 fp0
= tcg_temp_new_i32();
9082 TCGv_i32 fp1
= tcg_temp_new_i32();
9084 gen_load_fpr32(ctx
, fp0
, fs
);
9085 gen_load_fpr32(ctx
, fp1
, ft
);
9086 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9087 tcg_temp_free_i32(fp1
);
9088 gen_store_fpr32(ctx
, fp0
, fd
);
9089 tcg_temp_free_i32(fp0
);
9094 check_cp1_registers(ctx
, fd
);
9096 TCGv_i32 fp32
= tcg_temp_new_i32();
9097 TCGv_i64 fp64
= tcg_temp_new_i64();
9099 gen_load_fpr32(ctx
, fp32
, fs
);
9100 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9101 tcg_temp_free_i32(fp32
);
9102 gen_store_fpr64(ctx
, fp64
, fd
);
9103 tcg_temp_free_i64(fp64
);
9108 TCGv_i32 fp0
= tcg_temp_new_i32();
9110 gen_load_fpr32(ctx
, fp0
, fs
);
9111 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9112 gen_store_fpr32(ctx
, fp0
, fd
);
9113 tcg_temp_free_i32(fp0
);
9117 check_cp1_64bitmode(ctx
);
9119 TCGv_i32 fp32
= tcg_temp_new_i32();
9120 TCGv_i64 fp64
= tcg_temp_new_i64();
9122 gen_load_fpr32(ctx
, fp32
, fs
);
9123 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9124 tcg_temp_free_i32(fp32
);
9125 gen_store_fpr64(ctx
, fp64
, fd
);
9126 tcg_temp_free_i64(fp64
);
9132 TCGv_i64 fp64
= tcg_temp_new_i64();
9133 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9134 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9136 gen_load_fpr32(ctx
, fp32_0
, fs
);
9137 gen_load_fpr32(ctx
, fp32_1
, ft
);
9138 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9139 tcg_temp_free_i32(fp32_1
);
9140 tcg_temp_free_i32(fp32_0
);
9141 gen_store_fpr64(ctx
, fp64
, fd
);
9142 tcg_temp_free_i64(fp64
);
9154 case OPC_CMP_NGLE_S
:
9161 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9162 if (ctx
->opcode
& (1 << 6)) {
9163 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9165 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9169 check_cp1_registers(ctx
, fs
| ft
| fd
);
9171 TCGv_i64 fp0
= tcg_temp_new_i64();
9172 TCGv_i64 fp1
= tcg_temp_new_i64();
9174 gen_load_fpr64(ctx
, fp0
, fs
);
9175 gen_load_fpr64(ctx
, fp1
, ft
);
9176 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9177 tcg_temp_free_i64(fp1
);
9178 gen_store_fpr64(ctx
, fp0
, fd
);
9179 tcg_temp_free_i64(fp0
);
9183 check_cp1_registers(ctx
, fs
| ft
| fd
);
9185 TCGv_i64 fp0
= tcg_temp_new_i64();
9186 TCGv_i64 fp1
= tcg_temp_new_i64();
9188 gen_load_fpr64(ctx
, fp0
, fs
);
9189 gen_load_fpr64(ctx
, fp1
, ft
);
9190 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9191 tcg_temp_free_i64(fp1
);
9192 gen_store_fpr64(ctx
, fp0
, fd
);
9193 tcg_temp_free_i64(fp0
);
9197 check_cp1_registers(ctx
, fs
| ft
| fd
);
9199 TCGv_i64 fp0
= tcg_temp_new_i64();
9200 TCGv_i64 fp1
= tcg_temp_new_i64();
9202 gen_load_fpr64(ctx
, fp0
, fs
);
9203 gen_load_fpr64(ctx
, fp1
, ft
);
9204 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9205 tcg_temp_free_i64(fp1
);
9206 gen_store_fpr64(ctx
, fp0
, fd
);
9207 tcg_temp_free_i64(fp0
);
9211 check_cp1_registers(ctx
, fs
| ft
| fd
);
9213 TCGv_i64 fp0
= tcg_temp_new_i64();
9214 TCGv_i64 fp1
= tcg_temp_new_i64();
9216 gen_load_fpr64(ctx
, fp0
, fs
);
9217 gen_load_fpr64(ctx
, fp1
, ft
);
9218 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9219 tcg_temp_free_i64(fp1
);
9220 gen_store_fpr64(ctx
, fp0
, fd
);
9221 tcg_temp_free_i64(fp0
);
9225 check_cp1_registers(ctx
, fs
| fd
);
9227 TCGv_i64 fp0
= tcg_temp_new_i64();
9229 gen_load_fpr64(ctx
, fp0
, fs
);
9230 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9231 gen_store_fpr64(ctx
, fp0
, fd
);
9232 tcg_temp_free_i64(fp0
);
9236 check_cp1_registers(ctx
, fs
| fd
);
9238 TCGv_i64 fp0
= tcg_temp_new_i64();
9240 gen_load_fpr64(ctx
, fp0
, fs
);
9241 gen_helper_float_abs_d(fp0
, fp0
);
9242 gen_store_fpr64(ctx
, fp0
, fd
);
9243 tcg_temp_free_i64(fp0
);
9247 check_cp1_registers(ctx
, fs
| fd
);
9249 TCGv_i64 fp0
= tcg_temp_new_i64();
9251 gen_load_fpr64(ctx
, fp0
, fs
);
9252 gen_store_fpr64(ctx
, fp0
, fd
);
9253 tcg_temp_free_i64(fp0
);
9257 check_cp1_registers(ctx
, fs
| fd
);
9259 TCGv_i64 fp0
= tcg_temp_new_i64();
9261 gen_load_fpr64(ctx
, fp0
, fs
);
9262 gen_helper_float_chs_d(fp0
, fp0
);
9263 gen_store_fpr64(ctx
, fp0
, fd
);
9264 tcg_temp_free_i64(fp0
);
9268 check_cp1_64bitmode(ctx
);
9270 TCGv_i64 fp0
= tcg_temp_new_i64();
9272 gen_load_fpr64(ctx
, fp0
, fs
);
9273 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9274 gen_store_fpr64(ctx
, fp0
, fd
);
9275 tcg_temp_free_i64(fp0
);
9279 check_cp1_64bitmode(ctx
);
9281 TCGv_i64 fp0
= tcg_temp_new_i64();
9283 gen_load_fpr64(ctx
, fp0
, fs
);
9284 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9285 gen_store_fpr64(ctx
, fp0
, fd
);
9286 tcg_temp_free_i64(fp0
);
9290 check_cp1_64bitmode(ctx
);
9292 TCGv_i64 fp0
= tcg_temp_new_i64();
9294 gen_load_fpr64(ctx
, fp0
, fs
);
9295 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9296 gen_store_fpr64(ctx
, fp0
, fd
);
9297 tcg_temp_free_i64(fp0
);
9301 check_cp1_64bitmode(ctx
);
9303 TCGv_i64 fp0
= tcg_temp_new_i64();
9305 gen_load_fpr64(ctx
, fp0
, fs
);
9306 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9307 gen_store_fpr64(ctx
, fp0
, fd
);
9308 tcg_temp_free_i64(fp0
);
9312 check_cp1_registers(ctx
, fs
);
9314 TCGv_i32 fp32
= tcg_temp_new_i32();
9315 TCGv_i64 fp64
= tcg_temp_new_i64();
9317 gen_load_fpr64(ctx
, fp64
, fs
);
9318 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9319 tcg_temp_free_i64(fp64
);
9320 gen_store_fpr32(ctx
, fp32
, fd
);
9321 tcg_temp_free_i32(fp32
);
9325 check_cp1_registers(ctx
, fs
);
9327 TCGv_i32 fp32
= tcg_temp_new_i32();
9328 TCGv_i64 fp64
= tcg_temp_new_i64();
9330 gen_load_fpr64(ctx
, fp64
, fs
);
9331 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9332 tcg_temp_free_i64(fp64
);
9333 gen_store_fpr32(ctx
, fp32
, fd
);
9334 tcg_temp_free_i32(fp32
);
9338 check_cp1_registers(ctx
, fs
);
9340 TCGv_i32 fp32
= tcg_temp_new_i32();
9341 TCGv_i64 fp64
= tcg_temp_new_i64();
9343 gen_load_fpr64(ctx
, fp64
, fs
);
9344 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9345 tcg_temp_free_i64(fp64
);
9346 gen_store_fpr32(ctx
, fp32
, fd
);
9347 tcg_temp_free_i32(fp32
);
9351 check_cp1_registers(ctx
, fs
);
9353 TCGv_i32 fp32
= tcg_temp_new_i32();
9354 TCGv_i64 fp64
= tcg_temp_new_i64();
9356 gen_load_fpr64(ctx
, fp64
, fs
);
9357 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9358 tcg_temp_free_i64(fp64
);
9359 gen_store_fpr32(ctx
, fp32
, fd
);
9360 tcg_temp_free_i32(fp32
);
9364 check_insn(ctx
, ISA_MIPS32R6
);
9365 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9368 check_insn(ctx
, ISA_MIPS32R6
);
9369 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9372 check_insn(ctx
, ISA_MIPS32R6
);
9373 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9377 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9380 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9382 TCGLabel
*l1
= gen_new_label();
9386 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9388 fp0
= tcg_temp_new_i64();
9389 gen_load_fpr64(ctx
, fp0
, fs
);
9390 gen_store_fpr64(ctx
, fp0
, fd
);
9391 tcg_temp_free_i64(fp0
);
9396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9398 TCGLabel
*l1
= gen_new_label();
9402 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9403 fp0
= tcg_temp_new_i64();
9404 gen_load_fpr64(ctx
, fp0
, fs
);
9405 gen_store_fpr64(ctx
, fp0
, fd
);
9406 tcg_temp_free_i64(fp0
);
9412 check_cp1_registers(ctx
, fs
| fd
);
9414 TCGv_i64 fp0
= tcg_temp_new_i64();
9416 gen_load_fpr64(ctx
, fp0
, fs
);
9417 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9418 gen_store_fpr64(ctx
, fp0
, fd
);
9419 tcg_temp_free_i64(fp0
);
9423 check_cp1_registers(ctx
, fs
| fd
);
9425 TCGv_i64 fp0
= tcg_temp_new_i64();
9427 gen_load_fpr64(ctx
, fp0
, fs
);
9428 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9429 gen_store_fpr64(ctx
, fp0
, fd
);
9430 tcg_temp_free_i64(fp0
);
9434 check_insn(ctx
, ISA_MIPS32R6
);
9436 TCGv_i64 fp0
= tcg_temp_new_i64();
9437 TCGv_i64 fp1
= tcg_temp_new_i64();
9438 TCGv_i64 fp2
= tcg_temp_new_i64();
9439 gen_load_fpr64(ctx
, fp0
, fs
);
9440 gen_load_fpr64(ctx
, fp1
, ft
);
9441 gen_load_fpr64(ctx
, fp2
, fd
);
9442 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9443 gen_store_fpr64(ctx
, fp2
, fd
);
9444 tcg_temp_free_i64(fp2
);
9445 tcg_temp_free_i64(fp1
);
9446 tcg_temp_free_i64(fp0
);
9450 check_insn(ctx
, ISA_MIPS32R6
);
9452 TCGv_i64 fp0
= tcg_temp_new_i64();
9453 TCGv_i64 fp1
= tcg_temp_new_i64();
9454 TCGv_i64 fp2
= tcg_temp_new_i64();
9455 gen_load_fpr64(ctx
, fp0
, fs
);
9456 gen_load_fpr64(ctx
, fp1
, ft
);
9457 gen_load_fpr64(ctx
, fp2
, fd
);
9458 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9459 gen_store_fpr64(ctx
, fp2
, fd
);
9460 tcg_temp_free_i64(fp2
);
9461 tcg_temp_free_i64(fp1
);
9462 tcg_temp_free_i64(fp0
);
9466 check_insn(ctx
, ISA_MIPS32R6
);
9468 TCGv_i64 fp0
= tcg_temp_new_i64();
9469 gen_load_fpr64(ctx
, fp0
, fs
);
9470 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9471 gen_store_fpr64(ctx
, fp0
, fd
);
9472 tcg_temp_free_i64(fp0
);
9476 check_insn(ctx
, ISA_MIPS32R6
);
9478 TCGv_i64 fp0
= tcg_temp_new_i64();
9479 gen_load_fpr64(ctx
, fp0
, fs
);
9480 gen_helper_float_class_d(fp0
, fp0
);
9481 gen_store_fpr64(ctx
, fp0
, fd
);
9482 tcg_temp_free_i64(fp0
);
9485 case OPC_MIN_D
: /* OPC_RECIP2_D */
9486 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9488 TCGv_i64 fp0
= tcg_temp_new_i64();
9489 TCGv_i64 fp1
= tcg_temp_new_i64();
9490 gen_load_fpr64(ctx
, fp0
, fs
);
9491 gen_load_fpr64(ctx
, fp1
, ft
);
9492 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9493 gen_store_fpr64(ctx
, fp1
, fd
);
9494 tcg_temp_free_i64(fp1
);
9495 tcg_temp_free_i64(fp0
);
9498 check_cp1_64bitmode(ctx
);
9500 TCGv_i64 fp0
= tcg_temp_new_i64();
9501 TCGv_i64 fp1
= tcg_temp_new_i64();
9503 gen_load_fpr64(ctx
, fp0
, fs
);
9504 gen_load_fpr64(ctx
, fp1
, ft
);
9505 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9506 tcg_temp_free_i64(fp1
);
9507 gen_store_fpr64(ctx
, fp0
, fd
);
9508 tcg_temp_free_i64(fp0
);
9512 case OPC_MINA_D
: /* OPC_RECIP1_D */
9513 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9515 TCGv_i64 fp0
= tcg_temp_new_i64();
9516 TCGv_i64 fp1
= tcg_temp_new_i64();
9517 gen_load_fpr64(ctx
, fp0
, fs
);
9518 gen_load_fpr64(ctx
, fp1
, ft
);
9519 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9520 gen_store_fpr64(ctx
, fp1
, fd
);
9521 tcg_temp_free_i64(fp1
);
9522 tcg_temp_free_i64(fp0
);
9525 check_cp1_64bitmode(ctx
);
9527 TCGv_i64 fp0
= tcg_temp_new_i64();
9529 gen_load_fpr64(ctx
, fp0
, fs
);
9530 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9531 gen_store_fpr64(ctx
, fp0
, fd
);
9532 tcg_temp_free_i64(fp0
);
9536 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9537 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9539 TCGv_i64 fp0
= tcg_temp_new_i64();
9540 TCGv_i64 fp1
= tcg_temp_new_i64();
9541 gen_load_fpr64(ctx
, fp0
, fs
);
9542 gen_load_fpr64(ctx
, fp1
, ft
);
9543 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9544 gen_store_fpr64(ctx
, fp1
, fd
);
9545 tcg_temp_free_i64(fp1
);
9546 tcg_temp_free_i64(fp0
);
9549 check_cp1_64bitmode(ctx
);
9551 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 gen_load_fpr64(ctx
, fp0
, fs
);
9554 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9555 gen_store_fpr64(ctx
, fp0
, fd
);
9556 tcg_temp_free_i64(fp0
);
9560 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9561 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9563 TCGv_i64 fp0
= tcg_temp_new_i64();
9564 TCGv_i64 fp1
= tcg_temp_new_i64();
9565 gen_load_fpr64(ctx
, fp0
, fs
);
9566 gen_load_fpr64(ctx
, fp1
, ft
);
9567 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9568 gen_store_fpr64(ctx
, fp1
, fd
);
9569 tcg_temp_free_i64(fp1
);
9570 tcg_temp_free_i64(fp0
);
9573 check_cp1_64bitmode(ctx
);
9575 TCGv_i64 fp0
= tcg_temp_new_i64();
9576 TCGv_i64 fp1
= tcg_temp_new_i64();
9578 gen_load_fpr64(ctx
, fp0
, fs
);
9579 gen_load_fpr64(ctx
, fp1
, ft
);
9580 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9581 tcg_temp_free_i64(fp1
);
9582 gen_store_fpr64(ctx
, fp0
, fd
);
9583 tcg_temp_free_i64(fp0
);
9596 case OPC_CMP_NGLE_D
:
9603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9604 if (ctx
->opcode
& (1 << 6)) {
9605 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9607 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9611 check_cp1_registers(ctx
, fs
);
9613 TCGv_i32 fp32
= tcg_temp_new_i32();
9614 TCGv_i64 fp64
= tcg_temp_new_i64();
9616 gen_load_fpr64(ctx
, fp64
, fs
);
9617 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9618 tcg_temp_free_i64(fp64
);
9619 gen_store_fpr32(ctx
, fp32
, fd
);
9620 tcg_temp_free_i32(fp32
);
9624 check_cp1_registers(ctx
, fs
);
9626 TCGv_i32 fp32
= tcg_temp_new_i32();
9627 TCGv_i64 fp64
= tcg_temp_new_i64();
9629 gen_load_fpr64(ctx
, fp64
, fs
);
9630 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9631 tcg_temp_free_i64(fp64
);
9632 gen_store_fpr32(ctx
, fp32
, fd
);
9633 tcg_temp_free_i32(fp32
);
9637 check_cp1_64bitmode(ctx
);
9639 TCGv_i64 fp0
= tcg_temp_new_i64();
9641 gen_load_fpr64(ctx
, fp0
, fs
);
9642 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9643 gen_store_fpr64(ctx
, fp0
, fd
);
9644 tcg_temp_free_i64(fp0
);
9649 TCGv_i32 fp0
= tcg_temp_new_i32();
9651 gen_load_fpr32(ctx
, fp0
, fs
);
9652 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9653 gen_store_fpr32(ctx
, fp0
, fd
);
9654 tcg_temp_free_i32(fp0
);
9658 check_cp1_registers(ctx
, fd
);
9660 TCGv_i32 fp32
= tcg_temp_new_i32();
9661 TCGv_i64 fp64
= tcg_temp_new_i64();
9663 gen_load_fpr32(ctx
, fp32
, fs
);
9664 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9665 tcg_temp_free_i32(fp32
);
9666 gen_store_fpr64(ctx
, fp64
, fd
);
9667 tcg_temp_free_i64(fp64
);
9671 check_cp1_64bitmode(ctx
);
9673 TCGv_i32 fp32
= tcg_temp_new_i32();
9674 TCGv_i64 fp64
= tcg_temp_new_i64();
9676 gen_load_fpr64(ctx
, fp64
, fs
);
9677 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9678 tcg_temp_free_i64(fp64
);
9679 gen_store_fpr32(ctx
, fp32
, fd
);
9680 tcg_temp_free_i32(fp32
);
9684 check_cp1_64bitmode(ctx
);
9686 TCGv_i64 fp0
= tcg_temp_new_i64();
9688 gen_load_fpr64(ctx
, fp0
, fs
);
9689 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9690 gen_store_fpr64(ctx
, fp0
, fd
);
9691 tcg_temp_free_i64(fp0
);
9697 TCGv_i64 fp0
= tcg_temp_new_i64();
9699 gen_load_fpr64(ctx
, fp0
, fs
);
9700 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9701 gen_store_fpr64(ctx
, fp0
, fd
);
9702 tcg_temp_free_i64(fp0
);
9708 TCGv_i64 fp0
= tcg_temp_new_i64();
9709 TCGv_i64 fp1
= tcg_temp_new_i64();
9711 gen_load_fpr64(ctx
, fp0
, fs
);
9712 gen_load_fpr64(ctx
, fp1
, ft
);
9713 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9714 tcg_temp_free_i64(fp1
);
9715 gen_store_fpr64(ctx
, fp0
, fd
);
9716 tcg_temp_free_i64(fp0
);
9722 TCGv_i64 fp0
= tcg_temp_new_i64();
9723 TCGv_i64 fp1
= tcg_temp_new_i64();
9725 gen_load_fpr64(ctx
, fp0
, fs
);
9726 gen_load_fpr64(ctx
, fp1
, ft
);
9727 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9728 tcg_temp_free_i64(fp1
);
9729 gen_store_fpr64(ctx
, fp0
, fd
);
9730 tcg_temp_free_i64(fp0
);
9736 TCGv_i64 fp0
= tcg_temp_new_i64();
9737 TCGv_i64 fp1
= tcg_temp_new_i64();
9739 gen_load_fpr64(ctx
, fp0
, fs
);
9740 gen_load_fpr64(ctx
, fp1
, ft
);
9741 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9742 tcg_temp_free_i64(fp1
);
9743 gen_store_fpr64(ctx
, fp0
, fd
);
9744 tcg_temp_free_i64(fp0
);
9750 TCGv_i64 fp0
= tcg_temp_new_i64();
9752 gen_load_fpr64(ctx
, fp0
, fs
);
9753 gen_helper_float_abs_ps(fp0
, fp0
);
9754 gen_store_fpr64(ctx
, fp0
, fd
);
9755 tcg_temp_free_i64(fp0
);
9761 TCGv_i64 fp0
= tcg_temp_new_i64();
9763 gen_load_fpr64(ctx
, fp0
, fs
);
9764 gen_store_fpr64(ctx
, fp0
, fd
);
9765 tcg_temp_free_i64(fp0
);
9771 TCGv_i64 fp0
= tcg_temp_new_i64();
9773 gen_load_fpr64(ctx
, fp0
, fs
);
9774 gen_helper_float_chs_ps(fp0
, fp0
);
9775 gen_store_fpr64(ctx
, fp0
, fd
);
9776 tcg_temp_free_i64(fp0
);
9781 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9786 TCGLabel
*l1
= gen_new_label();
9790 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9791 fp0
= tcg_temp_new_i64();
9792 gen_load_fpr64(ctx
, fp0
, fs
);
9793 gen_store_fpr64(ctx
, fp0
, fd
);
9794 tcg_temp_free_i64(fp0
);
9801 TCGLabel
*l1
= gen_new_label();
9805 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9806 fp0
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp0
, fs
);
9808 gen_store_fpr64(ctx
, fp0
, fd
);
9809 tcg_temp_free_i64(fp0
);
9817 TCGv_i64 fp0
= tcg_temp_new_i64();
9818 TCGv_i64 fp1
= tcg_temp_new_i64();
9820 gen_load_fpr64(ctx
, fp0
, ft
);
9821 gen_load_fpr64(ctx
, fp1
, fs
);
9822 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9823 tcg_temp_free_i64(fp1
);
9824 gen_store_fpr64(ctx
, fp0
, fd
);
9825 tcg_temp_free_i64(fp0
);
9831 TCGv_i64 fp0
= tcg_temp_new_i64();
9832 TCGv_i64 fp1
= tcg_temp_new_i64();
9834 gen_load_fpr64(ctx
, fp0
, ft
);
9835 gen_load_fpr64(ctx
, fp1
, fs
);
9836 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9837 tcg_temp_free_i64(fp1
);
9838 gen_store_fpr64(ctx
, fp0
, fd
);
9839 tcg_temp_free_i64(fp0
);
9845 TCGv_i64 fp0
= tcg_temp_new_i64();
9846 TCGv_i64 fp1
= tcg_temp_new_i64();
9848 gen_load_fpr64(ctx
, fp0
, fs
);
9849 gen_load_fpr64(ctx
, fp1
, ft
);
9850 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9851 tcg_temp_free_i64(fp1
);
9852 gen_store_fpr64(ctx
, fp0
, fd
);
9853 tcg_temp_free_i64(fp0
);
9859 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp0
, fs
);
9862 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9863 gen_store_fpr64(ctx
, fp0
, fd
);
9864 tcg_temp_free_i64(fp0
);
9870 TCGv_i64 fp0
= tcg_temp_new_i64();
9872 gen_load_fpr64(ctx
, fp0
, fs
);
9873 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9874 gen_store_fpr64(ctx
, fp0
, fd
);
9875 tcg_temp_free_i64(fp0
);
9881 TCGv_i64 fp0
= tcg_temp_new_i64();
9882 TCGv_i64 fp1
= tcg_temp_new_i64();
9884 gen_load_fpr64(ctx
, fp0
, fs
);
9885 gen_load_fpr64(ctx
, fp1
, ft
);
9886 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9887 tcg_temp_free_i64(fp1
);
9888 gen_store_fpr64(ctx
, fp0
, fd
);
9889 tcg_temp_free_i64(fp0
);
9893 check_cp1_64bitmode(ctx
);
9895 TCGv_i32 fp0
= tcg_temp_new_i32();
9897 gen_load_fpr32h(ctx
, fp0
, fs
);
9898 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9899 gen_store_fpr32(ctx
, fp0
, fd
);
9900 tcg_temp_free_i32(fp0
);
9906 TCGv_i64 fp0
= tcg_temp_new_i64();
9908 gen_load_fpr64(ctx
, fp0
, fs
);
9909 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9910 gen_store_fpr64(ctx
, fp0
, fd
);
9911 tcg_temp_free_i64(fp0
);
9915 check_cp1_64bitmode(ctx
);
9917 TCGv_i32 fp0
= tcg_temp_new_i32();
9919 gen_load_fpr32(ctx
, fp0
, fs
);
9920 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9921 gen_store_fpr32(ctx
, fp0
, fd
);
9922 tcg_temp_free_i32(fp0
);
9928 TCGv_i32 fp0
= tcg_temp_new_i32();
9929 TCGv_i32 fp1
= tcg_temp_new_i32();
9931 gen_load_fpr32(ctx
, fp0
, fs
);
9932 gen_load_fpr32(ctx
, fp1
, ft
);
9933 gen_store_fpr32h(ctx
, fp0
, fd
);
9934 gen_store_fpr32(ctx
, fp1
, fd
);
9935 tcg_temp_free_i32(fp0
);
9936 tcg_temp_free_i32(fp1
);
9942 TCGv_i32 fp0
= tcg_temp_new_i32();
9943 TCGv_i32 fp1
= tcg_temp_new_i32();
9945 gen_load_fpr32(ctx
, fp0
, fs
);
9946 gen_load_fpr32h(ctx
, fp1
, ft
);
9947 gen_store_fpr32(ctx
, fp1
, fd
);
9948 gen_store_fpr32h(ctx
, fp0
, fd
);
9949 tcg_temp_free_i32(fp0
);
9950 tcg_temp_free_i32(fp1
);
9956 TCGv_i32 fp0
= tcg_temp_new_i32();
9957 TCGv_i32 fp1
= tcg_temp_new_i32();
9959 gen_load_fpr32h(ctx
, fp0
, fs
);
9960 gen_load_fpr32(ctx
, fp1
, ft
);
9961 gen_store_fpr32(ctx
, fp1
, fd
);
9962 gen_store_fpr32h(ctx
, fp0
, fd
);
9963 tcg_temp_free_i32(fp0
);
9964 tcg_temp_free_i32(fp1
);
9970 TCGv_i32 fp0
= tcg_temp_new_i32();
9971 TCGv_i32 fp1
= tcg_temp_new_i32();
9973 gen_load_fpr32h(ctx
, fp0
, fs
);
9974 gen_load_fpr32h(ctx
, fp1
, ft
);
9975 gen_store_fpr32(ctx
, fp1
, fd
);
9976 gen_store_fpr32h(ctx
, fp0
, fd
);
9977 tcg_temp_free_i32(fp0
);
9978 tcg_temp_free_i32(fp1
);
9984 case OPC_CMP_UEQ_PS
:
9985 case OPC_CMP_OLT_PS
:
9986 case OPC_CMP_ULT_PS
:
9987 case OPC_CMP_OLE_PS
:
9988 case OPC_CMP_ULE_PS
:
9990 case OPC_CMP_NGLE_PS
:
9991 case OPC_CMP_SEQ_PS
:
9992 case OPC_CMP_NGL_PS
:
9994 case OPC_CMP_NGE_PS
:
9996 case OPC_CMP_NGT_PS
:
9997 if (ctx
->opcode
& (1 << 6)) {
9998 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10000 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10004 MIPS_INVAL("farith");
10005 generate_exception_end(ctx
, EXCP_RI
);
10010 /* Coprocessor 3 (FPU) */
10011 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10012 int fd
, int fs
, int base
, int index
)
10014 TCGv t0
= tcg_temp_new();
10017 gen_load_gpr(t0
, index
);
10018 } else if (index
== 0) {
10019 gen_load_gpr(t0
, base
);
10021 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10023 /* Don't do NOP if destination is zero: we must perform the actual
10029 TCGv_i32 fp0
= tcg_temp_new_i32();
10031 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10032 tcg_gen_trunc_tl_i32(fp0
, t0
);
10033 gen_store_fpr32(ctx
, fp0
, fd
);
10034 tcg_temp_free_i32(fp0
);
10039 check_cp1_registers(ctx
, fd
);
10041 TCGv_i64 fp0
= tcg_temp_new_i64();
10042 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10043 gen_store_fpr64(ctx
, fp0
, fd
);
10044 tcg_temp_free_i64(fp0
);
10048 check_cp1_64bitmode(ctx
);
10049 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10051 TCGv_i64 fp0
= tcg_temp_new_i64();
10053 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10054 gen_store_fpr64(ctx
, fp0
, fd
);
10055 tcg_temp_free_i64(fp0
);
10061 TCGv_i32 fp0
= tcg_temp_new_i32();
10062 gen_load_fpr32(ctx
, fp0
, fs
);
10063 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10064 tcg_temp_free_i32(fp0
);
10069 check_cp1_registers(ctx
, fs
);
10071 TCGv_i64 fp0
= tcg_temp_new_i64();
10072 gen_load_fpr64(ctx
, fp0
, fs
);
10073 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10074 tcg_temp_free_i64(fp0
);
10078 check_cp1_64bitmode(ctx
);
10079 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10081 TCGv_i64 fp0
= tcg_temp_new_i64();
10082 gen_load_fpr64(ctx
, fp0
, fs
);
10083 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10084 tcg_temp_free_i64(fp0
);
10091 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10092 int fd
, int fr
, int fs
, int ft
)
10098 TCGv t0
= tcg_temp_local_new();
10099 TCGv_i32 fp
= tcg_temp_new_i32();
10100 TCGv_i32 fph
= tcg_temp_new_i32();
10101 TCGLabel
*l1
= gen_new_label();
10102 TCGLabel
*l2
= gen_new_label();
10104 gen_load_gpr(t0
, fr
);
10105 tcg_gen_andi_tl(t0
, t0
, 0x7);
10107 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10108 gen_load_fpr32(ctx
, fp
, fs
);
10109 gen_load_fpr32h(ctx
, fph
, fs
);
10110 gen_store_fpr32(ctx
, fp
, fd
);
10111 gen_store_fpr32h(ctx
, fph
, fd
);
10114 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10116 #ifdef TARGET_WORDS_BIGENDIAN
10117 gen_load_fpr32(ctx
, fp
, fs
);
10118 gen_load_fpr32h(ctx
, fph
, ft
);
10119 gen_store_fpr32h(ctx
, fp
, fd
);
10120 gen_store_fpr32(ctx
, fph
, fd
);
10122 gen_load_fpr32h(ctx
, fph
, fs
);
10123 gen_load_fpr32(ctx
, fp
, ft
);
10124 gen_store_fpr32(ctx
, fph
, fd
);
10125 gen_store_fpr32h(ctx
, fp
, fd
);
10128 tcg_temp_free_i32(fp
);
10129 tcg_temp_free_i32(fph
);
10135 TCGv_i32 fp0
= tcg_temp_new_i32();
10136 TCGv_i32 fp1
= tcg_temp_new_i32();
10137 TCGv_i32 fp2
= tcg_temp_new_i32();
10139 gen_load_fpr32(ctx
, fp0
, fs
);
10140 gen_load_fpr32(ctx
, fp1
, ft
);
10141 gen_load_fpr32(ctx
, fp2
, fr
);
10142 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10143 tcg_temp_free_i32(fp0
);
10144 tcg_temp_free_i32(fp1
);
10145 gen_store_fpr32(ctx
, fp2
, fd
);
10146 tcg_temp_free_i32(fp2
);
10151 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10153 TCGv_i64 fp0
= tcg_temp_new_i64();
10154 TCGv_i64 fp1
= tcg_temp_new_i64();
10155 TCGv_i64 fp2
= tcg_temp_new_i64();
10157 gen_load_fpr64(ctx
, fp0
, fs
);
10158 gen_load_fpr64(ctx
, fp1
, ft
);
10159 gen_load_fpr64(ctx
, fp2
, fr
);
10160 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10161 tcg_temp_free_i64(fp0
);
10162 tcg_temp_free_i64(fp1
);
10163 gen_store_fpr64(ctx
, fp2
, fd
);
10164 tcg_temp_free_i64(fp2
);
10170 TCGv_i64 fp0
= tcg_temp_new_i64();
10171 TCGv_i64 fp1
= tcg_temp_new_i64();
10172 TCGv_i64 fp2
= tcg_temp_new_i64();
10174 gen_load_fpr64(ctx
, fp0
, fs
);
10175 gen_load_fpr64(ctx
, fp1
, ft
);
10176 gen_load_fpr64(ctx
, fp2
, fr
);
10177 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10178 tcg_temp_free_i64(fp0
);
10179 tcg_temp_free_i64(fp1
);
10180 gen_store_fpr64(ctx
, fp2
, fd
);
10181 tcg_temp_free_i64(fp2
);
10187 TCGv_i32 fp0
= tcg_temp_new_i32();
10188 TCGv_i32 fp1
= tcg_temp_new_i32();
10189 TCGv_i32 fp2
= tcg_temp_new_i32();
10191 gen_load_fpr32(ctx
, fp0
, fs
);
10192 gen_load_fpr32(ctx
, fp1
, ft
);
10193 gen_load_fpr32(ctx
, fp2
, fr
);
10194 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10195 tcg_temp_free_i32(fp0
);
10196 tcg_temp_free_i32(fp1
);
10197 gen_store_fpr32(ctx
, fp2
, fd
);
10198 tcg_temp_free_i32(fp2
);
10203 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10205 TCGv_i64 fp0
= tcg_temp_new_i64();
10206 TCGv_i64 fp1
= tcg_temp_new_i64();
10207 TCGv_i64 fp2
= tcg_temp_new_i64();
10209 gen_load_fpr64(ctx
, fp0
, fs
);
10210 gen_load_fpr64(ctx
, fp1
, ft
);
10211 gen_load_fpr64(ctx
, fp2
, fr
);
10212 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10213 tcg_temp_free_i64(fp0
);
10214 tcg_temp_free_i64(fp1
);
10215 gen_store_fpr64(ctx
, fp2
, fd
);
10216 tcg_temp_free_i64(fp2
);
10222 TCGv_i64 fp0
= tcg_temp_new_i64();
10223 TCGv_i64 fp1
= tcg_temp_new_i64();
10224 TCGv_i64 fp2
= tcg_temp_new_i64();
10226 gen_load_fpr64(ctx
, fp0
, fs
);
10227 gen_load_fpr64(ctx
, fp1
, ft
);
10228 gen_load_fpr64(ctx
, fp2
, fr
);
10229 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10230 tcg_temp_free_i64(fp0
);
10231 tcg_temp_free_i64(fp1
);
10232 gen_store_fpr64(ctx
, fp2
, fd
);
10233 tcg_temp_free_i64(fp2
);
10239 TCGv_i32 fp0
= tcg_temp_new_i32();
10240 TCGv_i32 fp1
= tcg_temp_new_i32();
10241 TCGv_i32 fp2
= tcg_temp_new_i32();
10243 gen_load_fpr32(ctx
, fp0
, fs
);
10244 gen_load_fpr32(ctx
, fp1
, ft
);
10245 gen_load_fpr32(ctx
, fp2
, fr
);
10246 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10247 tcg_temp_free_i32(fp0
);
10248 tcg_temp_free_i32(fp1
);
10249 gen_store_fpr32(ctx
, fp2
, fd
);
10250 tcg_temp_free_i32(fp2
);
10255 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10257 TCGv_i64 fp0
= tcg_temp_new_i64();
10258 TCGv_i64 fp1
= tcg_temp_new_i64();
10259 TCGv_i64 fp2
= tcg_temp_new_i64();
10261 gen_load_fpr64(ctx
, fp0
, fs
);
10262 gen_load_fpr64(ctx
, fp1
, ft
);
10263 gen_load_fpr64(ctx
, fp2
, fr
);
10264 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10265 tcg_temp_free_i64(fp0
);
10266 tcg_temp_free_i64(fp1
);
10267 gen_store_fpr64(ctx
, fp2
, fd
);
10268 tcg_temp_free_i64(fp2
);
10274 TCGv_i64 fp0
= tcg_temp_new_i64();
10275 TCGv_i64 fp1
= tcg_temp_new_i64();
10276 TCGv_i64 fp2
= tcg_temp_new_i64();
10278 gen_load_fpr64(ctx
, fp0
, fs
);
10279 gen_load_fpr64(ctx
, fp1
, ft
);
10280 gen_load_fpr64(ctx
, fp2
, fr
);
10281 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10282 tcg_temp_free_i64(fp0
);
10283 tcg_temp_free_i64(fp1
);
10284 gen_store_fpr64(ctx
, fp2
, fd
);
10285 tcg_temp_free_i64(fp2
);
10291 TCGv_i32 fp0
= tcg_temp_new_i32();
10292 TCGv_i32 fp1
= tcg_temp_new_i32();
10293 TCGv_i32 fp2
= tcg_temp_new_i32();
10295 gen_load_fpr32(ctx
, fp0
, fs
);
10296 gen_load_fpr32(ctx
, fp1
, ft
);
10297 gen_load_fpr32(ctx
, fp2
, fr
);
10298 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10299 tcg_temp_free_i32(fp0
);
10300 tcg_temp_free_i32(fp1
);
10301 gen_store_fpr32(ctx
, fp2
, fd
);
10302 tcg_temp_free_i32(fp2
);
10307 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10309 TCGv_i64 fp0
= tcg_temp_new_i64();
10310 TCGv_i64 fp1
= tcg_temp_new_i64();
10311 TCGv_i64 fp2
= tcg_temp_new_i64();
10313 gen_load_fpr64(ctx
, fp0
, fs
);
10314 gen_load_fpr64(ctx
, fp1
, ft
);
10315 gen_load_fpr64(ctx
, fp2
, fr
);
10316 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10317 tcg_temp_free_i64(fp0
);
10318 tcg_temp_free_i64(fp1
);
10319 gen_store_fpr64(ctx
, fp2
, fd
);
10320 tcg_temp_free_i64(fp2
);
10326 TCGv_i64 fp0
= tcg_temp_new_i64();
10327 TCGv_i64 fp1
= tcg_temp_new_i64();
10328 TCGv_i64 fp2
= tcg_temp_new_i64();
10330 gen_load_fpr64(ctx
, fp0
, fs
);
10331 gen_load_fpr64(ctx
, fp1
, ft
);
10332 gen_load_fpr64(ctx
, fp2
, fr
);
10333 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10334 tcg_temp_free_i64(fp0
);
10335 tcg_temp_free_i64(fp1
);
10336 gen_store_fpr64(ctx
, fp2
, fd
);
10337 tcg_temp_free_i64(fp2
);
10341 MIPS_INVAL("flt3_arith");
10342 generate_exception_end(ctx
, EXCP_RI
);
10347 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10351 #if !defined(CONFIG_USER_ONLY)
10352 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10353 Therefore only check the ISA in system mode. */
10354 check_insn(ctx
, ISA_MIPS32R2
);
10356 t0
= tcg_temp_new();
10360 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10361 gen_store_gpr(t0
, rt
);
10364 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10365 gen_store_gpr(t0
, rt
);
10368 gen_helper_rdhwr_cc(t0
, cpu_env
);
10369 gen_store_gpr(t0
, rt
);
10372 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10373 gen_store_gpr(t0
, rt
);
10376 check_insn(ctx
, ISA_MIPS32R6
);
10378 /* Performance counter registers are not implemented other than
10379 * control register 0.
10381 generate_exception(ctx
, EXCP_RI
);
10383 gen_helper_rdhwr_performance(t0
, cpu_env
);
10384 gen_store_gpr(t0
, rt
);
10387 check_insn(ctx
, ISA_MIPS32R6
);
10388 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10389 gen_store_gpr(t0
, rt
);
10392 #if defined(CONFIG_USER_ONLY)
10393 tcg_gen_ld_tl(t0
, cpu_env
,
10394 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10395 gen_store_gpr(t0
, rt
);
10398 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10399 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10400 tcg_gen_ld_tl(t0
, cpu_env
,
10401 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10402 gen_store_gpr(t0
, rt
);
10404 generate_exception_end(ctx
, EXCP_RI
);
10408 default: /* Invalid */
10409 MIPS_INVAL("rdhwr");
10410 generate_exception_end(ctx
, EXCP_RI
);
10416 static inline void clear_branch_hflags(DisasContext
*ctx
)
10418 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10419 if (ctx
->bstate
== BS_NONE
) {
10420 save_cpu_state(ctx
, 0);
10422 /* it is not safe to save ctx->hflags as hflags may be changed
10423 in execution time by the instruction in delay / forbidden slot. */
10424 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10428 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10430 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10431 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10432 /* Branches completion */
10433 clear_branch_hflags(ctx
);
10434 ctx
->bstate
= BS_BRANCH
;
10435 /* FIXME: Need to clear can_do_io. */
10436 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10437 case MIPS_HFLAG_FBNSLOT
:
10438 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10441 /* unconditional branch */
10442 if (proc_hflags
& MIPS_HFLAG_BX
) {
10443 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10445 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10447 case MIPS_HFLAG_BL
:
10448 /* blikely taken case */
10449 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10451 case MIPS_HFLAG_BC
:
10452 /* Conditional branch */
10454 TCGLabel
*l1
= gen_new_label();
10456 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10457 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10459 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10462 case MIPS_HFLAG_BR
:
10463 /* unconditional branch to register */
10464 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10465 TCGv t0
= tcg_temp_new();
10466 TCGv_i32 t1
= tcg_temp_new_i32();
10468 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10469 tcg_gen_trunc_tl_i32(t1
, t0
);
10471 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10472 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10473 tcg_gen_or_i32(hflags
, hflags
, t1
);
10474 tcg_temp_free_i32(t1
);
10476 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10478 tcg_gen_mov_tl(cpu_PC
, btarget
);
10480 if (ctx
->singlestep_enabled
) {
10481 save_cpu_state(ctx
, 0);
10482 gen_helper_raise_exception_debug(cpu_env
);
10484 tcg_gen_exit_tb(0);
10487 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10493 /* Compact Branches */
10494 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10495 int rs
, int rt
, int32_t offset
)
10497 int bcond_compute
= 0;
10498 TCGv t0
= tcg_temp_new();
10499 TCGv t1
= tcg_temp_new();
10500 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10502 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10503 #ifdef MIPS_DEBUG_DISAS
10504 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10507 generate_exception_end(ctx
, EXCP_RI
);
10511 /* Load needed operands and calculate btarget */
10513 /* compact branch */
10514 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10515 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10516 gen_load_gpr(t0
, rs
);
10517 gen_load_gpr(t1
, rt
);
10519 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10520 if (rs
<= rt
&& rs
== 0) {
10521 /* OPC_BEQZALC, OPC_BNEZALC */
10522 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10525 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10526 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10527 gen_load_gpr(t0
, rs
);
10528 gen_load_gpr(t1
, rt
);
10530 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10532 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10533 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10534 if (rs
== 0 || rs
== rt
) {
10535 /* OPC_BLEZALC, OPC_BGEZALC */
10536 /* OPC_BGTZALC, OPC_BLTZALC */
10537 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10539 gen_load_gpr(t0
, rs
);
10540 gen_load_gpr(t1
, rt
);
10542 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10546 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10551 /* OPC_BEQZC, OPC_BNEZC */
10552 gen_load_gpr(t0
, rs
);
10554 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10556 /* OPC_JIC, OPC_JIALC */
10557 TCGv tbase
= tcg_temp_new();
10558 TCGv toffset
= tcg_temp_new();
10560 gen_load_gpr(tbase
, rt
);
10561 tcg_gen_movi_tl(toffset
, offset
);
10562 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10563 tcg_temp_free(tbase
);
10564 tcg_temp_free(toffset
);
10568 MIPS_INVAL("Compact branch/jump");
10569 generate_exception_end(ctx
, EXCP_RI
);
10573 if (bcond_compute
== 0) {
10574 /* Uncoditional compact branch */
10577 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10580 ctx
->hflags
|= MIPS_HFLAG_BR
;
10583 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10586 ctx
->hflags
|= MIPS_HFLAG_B
;
10589 MIPS_INVAL("Compact branch/jump");
10590 generate_exception_end(ctx
, EXCP_RI
);
10594 /* Generating branch here as compact branches don't have delay slot */
10595 gen_branch(ctx
, 4);
10597 /* Conditional compact branch */
10598 TCGLabel
*fs
= gen_new_label();
10599 save_cpu_state(ctx
, 0);
10602 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10603 if (rs
== 0 && rt
!= 0) {
10605 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10606 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10608 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10611 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10614 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10615 if (rs
== 0 && rt
!= 0) {
10617 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10618 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10620 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10623 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10626 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10627 if (rs
== 0 && rt
!= 0) {
10629 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10630 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10632 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10635 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10638 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10639 if (rs
== 0 && rt
!= 0) {
10641 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10642 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10644 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10647 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10650 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10651 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10653 /* OPC_BOVC, OPC_BNVC */
10654 TCGv t2
= tcg_temp_new();
10655 TCGv t3
= tcg_temp_new();
10656 TCGv t4
= tcg_temp_new();
10657 TCGv input_overflow
= tcg_temp_new();
10659 gen_load_gpr(t0
, rs
);
10660 gen_load_gpr(t1
, rt
);
10661 tcg_gen_ext32s_tl(t2
, t0
);
10662 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10663 tcg_gen_ext32s_tl(t3
, t1
);
10664 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10665 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10667 tcg_gen_add_tl(t4
, t2
, t3
);
10668 tcg_gen_ext32s_tl(t4
, t4
);
10669 tcg_gen_xor_tl(t2
, t2
, t3
);
10670 tcg_gen_xor_tl(t3
, t4
, t3
);
10671 tcg_gen_andc_tl(t2
, t3
, t2
);
10672 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10673 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10674 if (opc
== OPC_BOVC
) {
10676 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10681 tcg_temp_free(input_overflow
);
10685 } else if (rs
< rt
&& rs
== 0) {
10686 /* OPC_BEQZALC, OPC_BNEZALC */
10687 if (opc
== OPC_BEQZALC
) {
10689 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10692 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10695 /* OPC_BEQC, OPC_BNEC */
10696 if (opc
== OPC_BEQC
) {
10698 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10701 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10706 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10709 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10712 MIPS_INVAL("Compact conditional branch/jump");
10713 generate_exception_end(ctx
, EXCP_RI
);
10717 /* Generating branch here as compact branches don't have delay slot */
10718 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10721 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10729 /* ISA extensions (ASEs) */
10730 /* MIPS16 extension to MIPS32 */
10732 /* MIPS16 major opcodes */
10734 M16_OPC_ADDIUSP
= 0x00,
10735 M16_OPC_ADDIUPC
= 0x01,
10737 M16_OPC_JAL
= 0x03,
10738 M16_OPC_BEQZ
= 0x04,
10739 M16_OPC_BNEQZ
= 0x05,
10740 M16_OPC_SHIFT
= 0x06,
10742 M16_OPC_RRIA
= 0x08,
10743 M16_OPC_ADDIU8
= 0x09,
10744 M16_OPC_SLTI
= 0x0a,
10745 M16_OPC_SLTIU
= 0x0b,
10748 M16_OPC_CMPI
= 0x0e,
10752 M16_OPC_LWSP
= 0x12,
10754 M16_OPC_LBU
= 0x14,
10755 M16_OPC_LHU
= 0x15,
10756 M16_OPC_LWPC
= 0x16,
10757 M16_OPC_LWU
= 0x17,
10760 M16_OPC_SWSP
= 0x1a,
10762 M16_OPC_RRR
= 0x1c,
10764 M16_OPC_EXTEND
= 0x1e,
10768 /* I8 funct field */
10787 /* RR funct field */
10821 /* I64 funct field */
10829 I64_DADDIUPC
= 0x6,
10833 /* RR ry field for CNVT */
10835 RR_RY_CNVT_ZEB
= 0x0,
10836 RR_RY_CNVT_ZEH
= 0x1,
10837 RR_RY_CNVT_ZEW
= 0x2,
10838 RR_RY_CNVT_SEB
= 0x4,
10839 RR_RY_CNVT_SEH
= 0x5,
10840 RR_RY_CNVT_SEW
= 0x6,
10843 static int xlat (int r
)
10845 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10850 static void gen_mips16_save (DisasContext
*ctx
,
10851 int xsregs
, int aregs
,
10852 int do_ra
, int do_s0
, int do_s1
,
10855 TCGv t0
= tcg_temp_new();
10856 TCGv t1
= tcg_temp_new();
10857 TCGv t2
= tcg_temp_new();
10887 generate_exception_end(ctx
, EXCP_RI
);
10893 gen_base_offset_addr(ctx
, t0
, 29, 12);
10894 gen_load_gpr(t1
, 7);
10895 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10898 gen_base_offset_addr(ctx
, t0
, 29, 8);
10899 gen_load_gpr(t1
, 6);
10900 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10903 gen_base_offset_addr(ctx
, t0
, 29, 4);
10904 gen_load_gpr(t1
, 5);
10905 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10908 gen_base_offset_addr(ctx
, t0
, 29, 0);
10909 gen_load_gpr(t1
, 4);
10910 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10913 gen_load_gpr(t0
, 29);
10915 #define DECR_AND_STORE(reg) do { \
10916 tcg_gen_movi_tl(t2, -4); \
10917 gen_op_addr_add(ctx, t0, t0, t2); \
10918 gen_load_gpr(t1, reg); \
10919 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10923 DECR_AND_STORE(31);
10928 DECR_AND_STORE(30);
10931 DECR_AND_STORE(23);
10934 DECR_AND_STORE(22);
10937 DECR_AND_STORE(21);
10940 DECR_AND_STORE(20);
10943 DECR_AND_STORE(19);
10946 DECR_AND_STORE(18);
10950 DECR_AND_STORE(17);
10953 DECR_AND_STORE(16);
10983 generate_exception_end(ctx
, EXCP_RI
);
10999 #undef DECR_AND_STORE
11001 tcg_gen_movi_tl(t2
, -framesize
);
11002 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11008 static void gen_mips16_restore (DisasContext
*ctx
,
11009 int xsregs
, int aregs
,
11010 int do_ra
, int do_s0
, int do_s1
,
11014 TCGv t0
= tcg_temp_new();
11015 TCGv t1
= tcg_temp_new();
11016 TCGv t2
= tcg_temp_new();
11018 tcg_gen_movi_tl(t2
, framesize
);
11019 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11021 #define DECR_AND_LOAD(reg) do { \
11022 tcg_gen_movi_tl(t2, -4); \
11023 gen_op_addr_add(ctx, t0, t0, t2); \
11024 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11025 gen_store_gpr(t1, reg); \
11089 generate_exception_end(ctx
, EXCP_RI
);
11105 #undef DECR_AND_LOAD
11107 tcg_gen_movi_tl(t2
, framesize
);
11108 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11114 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11115 int is_64_bit
, int extended
)
11119 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11120 generate_exception_end(ctx
, EXCP_RI
);
11124 t0
= tcg_temp_new();
11126 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11127 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11129 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11135 #if defined(TARGET_MIPS64)
11136 static void decode_i64_mips16 (DisasContext
*ctx
,
11137 int ry
, int funct
, int16_t offset
,
11142 check_insn(ctx
, ISA_MIPS3
);
11143 check_mips_64(ctx
);
11144 offset
= extended
? offset
: offset
<< 3;
11145 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11148 check_insn(ctx
, ISA_MIPS3
);
11149 check_mips_64(ctx
);
11150 offset
= extended
? offset
: offset
<< 3;
11151 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11154 check_insn(ctx
, ISA_MIPS3
);
11155 check_mips_64(ctx
);
11156 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11157 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11160 check_insn(ctx
, ISA_MIPS3
);
11161 check_mips_64(ctx
);
11162 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11163 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11166 check_insn(ctx
, ISA_MIPS3
);
11167 check_mips_64(ctx
);
11168 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11169 generate_exception_end(ctx
, EXCP_RI
);
11171 offset
= extended
? offset
: offset
<< 3;
11172 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11176 check_insn(ctx
, ISA_MIPS3
);
11177 check_mips_64(ctx
);
11178 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11179 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11182 check_insn(ctx
, ISA_MIPS3
);
11183 check_mips_64(ctx
);
11184 offset
= extended
? offset
: offset
<< 2;
11185 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11188 check_insn(ctx
, ISA_MIPS3
);
11189 check_mips_64(ctx
);
11190 offset
= extended
? offset
: offset
<< 2;
11191 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11197 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11199 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11200 int op
, rx
, ry
, funct
, sa
;
11201 int16_t imm
, offset
;
11203 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11204 op
= (ctx
->opcode
>> 11) & 0x1f;
11205 sa
= (ctx
->opcode
>> 22) & 0x1f;
11206 funct
= (ctx
->opcode
>> 8) & 0x7;
11207 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11208 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11209 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11210 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11211 | (ctx
->opcode
& 0x1f));
11213 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11216 case M16_OPC_ADDIUSP
:
11217 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11219 case M16_OPC_ADDIUPC
:
11220 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11223 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11224 /* No delay slot, so just process as a normal instruction */
11227 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11228 /* No delay slot, so just process as a normal instruction */
11230 case M16_OPC_BNEQZ
:
11231 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11232 /* No delay slot, so just process as a normal instruction */
11234 case M16_OPC_SHIFT
:
11235 switch (ctx
->opcode
& 0x3) {
11237 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11240 #if defined(TARGET_MIPS64)
11241 check_mips_64(ctx
);
11242 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11244 generate_exception_end(ctx
, EXCP_RI
);
11248 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11251 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11255 #if defined(TARGET_MIPS64)
11257 check_insn(ctx
, ISA_MIPS3
);
11258 check_mips_64(ctx
);
11259 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11263 imm
= ctx
->opcode
& 0xf;
11264 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11265 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11266 imm
= (int16_t) (imm
<< 1) >> 1;
11267 if ((ctx
->opcode
>> 4) & 0x1) {
11268 #if defined(TARGET_MIPS64)
11269 check_mips_64(ctx
);
11270 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11272 generate_exception_end(ctx
, EXCP_RI
);
11275 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11278 case M16_OPC_ADDIU8
:
11279 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11282 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11284 case M16_OPC_SLTIU
:
11285 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11290 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11293 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11296 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11299 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11302 check_insn(ctx
, ISA_MIPS32
);
11304 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11305 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11306 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11307 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11308 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11309 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11310 | (ctx
->opcode
& 0xf)) << 3;
11312 if (ctx
->opcode
& (1 << 7)) {
11313 gen_mips16_save(ctx
, xsregs
, aregs
,
11314 do_ra
, do_s0
, do_s1
,
11317 gen_mips16_restore(ctx
, xsregs
, aregs
,
11318 do_ra
, do_s0
, do_s1
,
11324 generate_exception_end(ctx
, EXCP_RI
);
11329 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11332 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11334 #if defined(TARGET_MIPS64)
11336 check_insn(ctx
, ISA_MIPS3
);
11337 check_mips_64(ctx
);
11338 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11342 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11345 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11348 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11351 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11354 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11357 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11360 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11362 #if defined(TARGET_MIPS64)
11364 check_insn(ctx
, ISA_MIPS3
);
11365 check_mips_64(ctx
);
11366 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11370 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11373 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11376 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11379 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11381 #if defined(TARGET_MIPS64)
11383 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11387 generate_exception_end(ctx
, EXCP_RI
);
11394 static inline bool is_uhi(int sdbbp_code
)
11396 #ifdef CONFIG_USER_ONLY
11399 return semihosting_enabled() && sdbbp_code
== 1;
11403 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11407 int op
, cnvt_op
, op1
, offset
;
11411 op
= (ctx
->opcode
>> 11) & 0x1f;
11412 sa
= (ctx
->opcode
>> 2) & 0x7;
11413 sa
= sa
== 0 ? 8 : sa
;
11414 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11415 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11416 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11417 op1
= offset
= ctx
->opcode
& 0x1f;
11422 case M16_OPC_ADDIUSP
:
11424 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11426 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11429 case M16_OPC_ADDIUPC
:
11430 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11433 offset
= (ctx
->opcode
& 0x7ff) << 1;
11434 offset
= (int16_t)(offset
<< 4) >> 4;
11435 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11436 /* No delay slot, so just process as a normal instruction */
11439 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11440 offset
= (((ctx
->opcode
& 0x1f) << 21)
11441 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11443 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11444 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11448 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11449 ((int8_t)ctx
->opcode
) << 1, 0);
11450 /* No delay slot, so just process as a normal instruction */
11452 case M16_OPC_BNEQZ
:
11453 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11454 ((int8_t)ctx
->opcode
) << 1, 0);
11455 /* No delay slot, so just process as a normal instruction */
11457 case M16_OPC_SHIFT
:
11458 switch (ctx
->opcode
& 0x3) {
11460 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11463 #if defined(TARGET_MIPS64)
11464 check_insn(ctx
, ISA_MIPS3
);
11465 check_mips_64(ctx
);
11466 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11468 generate_exception_end(ctx
, EXCP_RI
);
11472 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11475 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11479 #if defined(TARGET_MIPS64)
11481 check_insn(ctx
, ISA_MIPS3
);
11482 check_mips_64(ctx
);
11483 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11488 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11490 if ((ctx
->opcode
>> 4) & 1) {
11491 #if defined(TARGET_MIPS64)
11492 check_insn(ctx
, ISA_MIPS3
);
11493 check_mips_64(ctx
);
11494 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11496 generate_exception_end(ctx
, EXCP_RI
);
11499 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11503 case M16_OPC_ADDIU8
:
11505 int16_t imm
= (int8_t) ctx
->opcode
;
11507 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11512 int16_t imm
= (uint8_t) ctx
->opcode
;
11513 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11516 case M16_OPC_SLTIU
:
11518 int16_t imm
= (uint8_t) ctx
->opcode
;
11519 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11526 funct
= (ctx
->opcode
>> 8) & 0x7;
11529 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11530 ((int8_t)ctx
->opcode
) << 1, 0);
11533 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11534 ((int8_t)ctx
->opcode
) << 1, 0);
11537 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11540 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11541 ((int8_t)ctx
->opcode
) << 3);
11544 check_insn(ctx
, ISA_MIPS32
);
11546 int do_ra
= ctx
->opcode
& (1 << 6);
11547 int do_s0
= ctx
->opcode
& (1 << 5);
11548 int do_s1
= ctx
->opcode
& (1 << 4);
11549 int framesize
= ctx
->opcode
& 0xf;
11551 if (framesize
== 0) {
11554 framesize
= framesize
<< 3;
11557 if (ctx
->opcode
& (1 << 7)) {
11558 gen_mips16_save(ctx
, 0, 0,
11559 do_ra
, do_s0
, do_s1
, framesize
);
11561 gen_mips16_restore(ctx
, 0, 0,
11562 do_ra
, do_s0
, do_s1
, framesize
);
11568 int rz
= xlat(ctx
->opcode
& 0x7);
11570 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11571 ((ctx
->opcode
>> 5) & 0x7);
11572 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11576 reg32
= ctx
->opcode
& 0x1f;
11577 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11580 generate_exception_end(ctx
, EXCP_RI
);
11587 int16_t imm
= (uint8_t) ctx
->opcode
;
11589 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11594 int16_t imm
= (uint8_t) ctx
->opcode
;
11595 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11598 #if defined(TARGET_MIPS64)
11600 check_insn(ctx
, ISA_MIPS3
);
11601 check_mips_64(ctx
);
11602 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11606 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11609 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11612 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11615 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11618 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11621 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11624 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11626 #if defined (TARGET_MIPS64)
11628 check_insn(ctx
, ISA_MIPS3
);
11629 check_mips_64(ctx
);
11630 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11634 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11637 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11640 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11643 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11647 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11650 switch (ctx
->opcode
& 0x3) {
11652 mips32_op
= OPC_ADDU
;
11655 mips32_op
= OPC_SUBU
;
11657 #if defined(TARGET_MIPS64)
11659 mips32_op
= OPC_DADDU
;
11660 check_insn(ctx
, ISA_MIPS3
);
11661 check_mips_64(ctx
);
11664 mips32_op
= OPC_DSUBU
;
11665 check_insn(ctx
, ISA_MIPS3
);
11666 check_mips_64(ctx
);
11670 generate_exception_end(ctx
, EXCP_RI
);
11674 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11683 int nd
= (ctx
->opcode
>> 7) & 0x1;
11684 int link
= (ctx
->opcode
>> 6) & 0x1;
11685 int ra
= (ctx
->opcode
>> 5) & 0x1;
11688 check_insn(ctx
, ISA_MIPS32
);
11697 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11702 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11703 gen_helper_do_semihosting(cpu_env
);
11705 /* XXX: not clear which exception should be raised
11706 * when in debug mode...
11708 check_insn(ctx
, ISA_MIPS32
);
11709 generate_exception_end(ctx
, EXCP_DBp
);
11713 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11716 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11719 generate_exception_end(ctx
, EXCP_BREAK
);
11722 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11725 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11728 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11730 #if defined (TARGET_MIPS64)
11732 check_insn(ctx
, ISA_MIPS3
);
11733 check_mips_64(ctx
);
11734 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11738 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11741 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11744 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11747 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11750 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11753 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11756 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11759 check_insn(ctx
, ISA_MIPS32
);
11761 case RR_RY_CNVT_ZEB
:
11762 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11764 case RR_RY_CNVT_ZEH
:
11765 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11767 case RR_RY_CNVT_SEB
:
11768 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11770 case RR_RY_CNVT_SEH
:
11771 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11773 #if defined (TARGET_MIPS64)
11774 case RR_RY_CNVT_ZEW
:
11775 check_insn(ctx
, ISA_MIPS64
);
11776 check_mips_64(ctx
);
11777 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11779 case RR_RY_CNVT_SEW
:
11780 check_insn(ctx
, ISA_MIPS64
);
11781 check_mips_64(ctx
);
11782 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11786 generate_exception_end(ctx
, EXCP_RI
);
11791 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11793 #if defined (TARGET_MIPS64)
11795 check_insn(ctx
, ISA_MIPS3
);
11796 check_mips_64(ctx
);
11797 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11800 check_insn(ctx
, ISA_MIPS3
);
11801 check_mips_64(ctx
);
11802 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11805 check_insn(ctx
, ISA_MIPS3
);
11806 check_mips_64(ctx
);
11807 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11810 check_insn(ctx
, ISA_MIPS3
);
11811 check_mips_64(ctx
);
11812 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11816 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11819 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11822 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11825 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11827 #if defined (TARGET_MIPS64)
11829 check_insn(ctx
, ISA_MIPS3
);
11830 check_mips_64(ctx
);
11831 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11834 check_insn(ctx
, ISA_MIPS3
);
11835 check_mips_64(ctx
);
11836 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11839 check_insn(ctx
, ISA_MIPS3
);
11840 check_mips_64(ctx
);
11841 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11844 check_insn(ctx
, ISA_MIPS3
);
11845 check_mips_64(ctx
);
11846 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11850 generate_exception_end(ctx
, EXCP_RI
);
11854 case M16_OPC_EXTEND
:
11855 decode_extended_mips16_opc(env
, ctx
);
11858 #if defined(TARGET_MIPS64)
11860 funct
= (ctx
->opcode
>> 8) & 0x7;
11861 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11865 generate_exception_end(ctx
, EXCP_RI
);
11872 /* microMIPS extension to MIPS32/MIPS64 */
11875 * microMIPS32/microMIPS64 major opcodes
11877 * 1. MIPS Architecture for Programmers Volume II-B:
11878 * The microMIPS32 Instruction Set (Revision 3.05)
11880 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11882 * 2. MIPS Architecture For Programmers Volume II-A:
11883 * The MIPS64 Instruction Set (Revision 3.51)
11913 POOL32S
= 0x16, /* MIPS64 */
11914 DADDIU32
= 0x17, /* MIPS64 */
11943 /* 0x29 is reserved */
11956 /* 0x31 is reserved */
11969 SD32
= 0x36, /* MIPS64 */
11970 LD32
= 0x37, /* MIPS64 */
11972 /* 0x39 is reserved */
11988 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11998 /* POOL32A encoding of minor opcode field */
12001 /* These opcodes are distinguished only by bits 9..6; those bits are
12002 * what are recorded below. */
12039 /* The following can be distinguished by their lower 6 bits. */
12049 /* POOL32AXF encoding of minor opcode field extension */
12052 * 1. MIPS Architecture for Programmers Volume II-B:
12053 * The microMIPS32 Instruction Set (Revision 3.05)
12055 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12057 * 2. MIPS Architecture for Programmers VolumeIV-e:
12058 * The MIPS DSP Application-Specific Extension
12059 * to the microMIPS32 Architecture (Revision 2.34)
12061 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12076 /* begin of microMIPS32 DSP */
12078 /* bits 13..12 for 0x01 */
12084 /* bits 13..12 for 0x2a */
12090 /* bits 13..12 for 0x32 */
12094 /* end of microMIPS32 DSP */
12096 /* bits 15..12 for 0x2c */
12113 /* bits 15..12 for 0x34 */
12121 /* bits 15..12 for 0x3c */
12123 JR
= 0x0, /* alias */
12131 /* bits 15..12 for 0x05 */
12135 /* bits 15..12 for 0x0d */
12147 /* bits 15..12 for 0x15 */
12153 /* bits 15..12 for 0x1d */
12157 /* bits 15..12 for 0x2d */
12162 /* bits 15..12 for 0x35 */
12169 /* POOL32B encoding of minor opcode field (bits 15..12) */
12185 /* POOL32C encoding of minor opcode field (bits 15..12) */
12193 /* 0xa is reserved */
12200 /* 0x6 is reserved */
12206 /* POOL32F encoding of minor opcode field (bits 5..0) */
12209 /* These are the bit 7..6 values */
12218 /* These are the bit 8..6 values */
12243 MOVZ_FMT_05
= 0x05,
12277 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12284 /* POOL32Fxf encoding of minor opcode extension field */
12322 /* POOL32I encoding of minor opcode field (bits 25..21) */
12352 /* These overlap and are distinguished by bit16 of the instruction */
12361 /* POOL16A encoding of minor opcode field */
12368 /* POOL16B encoding of minor opcode field */
12375 /* POOL16C encoding of minor opcode field */
12395 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12415 /* POOL16D encoding of minor opcode field */
12422 /* POOL16E encoding of minor opcode field */
12429 static int mmreg (int r
)
12431 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12436 /* Used for 16-bit store instructions. */
12437 static int mmreg2 (int r
)
12439 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12444 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12445 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12446 #define uMIPS_RS2(op) uMIPS_RS(op)
12447 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12448 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12449 #define uMIPS_RS5(op) (op & 0x1f)
12451 /* Signed immediate */
12452 #define SIMM(op, start, width) \
12453 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12456 /* Zero-extended immediate */
12457 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12459 static void gen_addiur1sp(DisasContext
*ctx
)
12461 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12463 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12466 static void gen_addiur2(DisasContext
*ctx
)
12468 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12469 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12470 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12472 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12475 static void gen_addiusp(DisasContext
*ctx
)
12477 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12480 if (encoded
<= 1) {
12481 decoded
= 256 + encoded
;
12482 } else if (encoded
<= 255) {
12484 } else if (encoded
<= 509) {
12485 decoded
= encoded
- 512;
12487 decoded
= encoded
- 768;
12490 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12493 static void gen_addius5(DisasContext
*ctx
)
12495 int imm
= SIMM(ctx
->opcode
, 1, 4);
12496 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12498 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12501 static void gen_andi16(DisasContext
*ctx
)
12503 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12504 31, 32, 63, 64, 255, 32768, 65535 };
12505 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12506 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12507 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12509 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12512 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12513 int base
, int16_t offset
)
12518 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12519 generate_exception_end(ctx
, EXCP_RI
);
12523 t0
= tcg_temp_new();
12525 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12527 t1
= tcg_const_tl(reglist
);
12528 t2
= tcg_const_i32(ctx
->mem_idx
);
12530 save_cpu_state(ctx
, 1);
12533 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12536 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12538 #ifdef TARGET_MIPS64
12540 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12543 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12549 tcg_temp_free_i32(t2
);
12553 static void gen_pool16c_insn(DisasContext
*ctx
)
12555 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12556 int rs
= mmreg(ctx
->opcode
& 0x7);
12558 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12563 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12569 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12575 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12581 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12588 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12589 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12591 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12600 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12601 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12603 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12610 int reg
= ctx
->opcode
& 0x1f;
12612 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12618 int reg
= ctx
->opcode
& 0x1f;
12619 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12620 /* Let normal delay slot handling in our caller take us
12621 to the branch target. */
12626 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12627 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12631 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12632 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12636 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12640 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12643 generate_exception_end(ctx
, EXCP_BREAK
);
12646 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12647 gen_helper_do_semihosting(cpu_env
);
12649 /* XXX: not clear which exception should be raised
12650 * when in debug mode...
12652 check_insn(ctx
, ISA_MIPS32
);
12653 generate_exception_end(ctx
, EXCP_DBp
);
12656 case JRADDIUSP
+ 0:
12657 case JRADDIUSP
+ 1:
12659 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12660 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12661 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12662 /* Let normal delay slot handling in our caller take us
12663 to the branch target. */
12667 generate_exception_end(ctx
, EXCP_RI
);
12672 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12675 int rd
, rs
, re
, rt
;
12676 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12677 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12678 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12679 rd
= rd_enc
[enc_dest
];
12680 re
= re_enc
[enc_dest
];
12681 rs
= rs_rt_enc
[enc_rs
];
12682 rt
= rs_rt_enc
[enc_rt
];
12684 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12686 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12689 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12691 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12695 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12697 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12698 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12700 switch (ctx
->opcode
& 0xf) {
12702 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12705 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12709 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12710 int offset
= extract32(ctx
->opcode
, 4, 4);
12711 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12714 case R6_JRC16
: /* JRCADDIUSP */
12715 if ((ctx
->opcode
>> 4) & 1) {
12717 int imm
= extract32(ctx
->opcode
, 5, 5);
12718 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12719 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12722 int rs
= extract32(ctx
->opcode
, 5, 5);
12723 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12726 case MOVEP
... MOVEP_07
:
12727 case MOVEP_0C
... MOVEP_0F
:
12729 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12730 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12731 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12732 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12736 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12739 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12743 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12744 int offset
= extract32(ctx
->opcode
, 4, 4);
12745 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12748 case JALRC16
: /* BREAK16, SDBBP16 */
12749 switch (ctx
->opcode
& 0x3f) {
12751 case JALRC16
+ 0x20:
12753 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12758 generate_exception(ctx
, EXCP_BREAK
);
12762 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12763 gen_helper_do_semihosting(cpu_env
);
12765 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12766 generate_exception(ctx
, EXCP_RI
);
12768 generate_exception(ctx
, EXCP_DBp
);
12775 generate_exception(ctx
, EXCP_RI
);
12780 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12782 TCGv t0
= tcg_temp_new();
12783 TCGv t1
= tcg_temp_new();
12785 gen_load_gpr(t0
, base
);
12788 gen_load_gpr(t1
, index
);
12789 tcg_gen_shli_tl(t1
, t1
, 2);
12790 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12793 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12794 gen_store_gpr(t1
, rd
);
12800 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12801 int base
, int16_t offset
)
12805 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12806 generate_exception_end(ctx
, EXCP_RI
);
12810 t0
= tcg_temp_new();
12811 t1
= tcg_temp_new();
12813 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12818 generate_exception_end(ctx
, EXCP_RI
);
12821 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12822 gen_store_gpr(t1
, rd
);
12823 tcg_gen_movi_tl(t1
, 4);
12824 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12825 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12826 gen_store_gpr(t1
, rd
+1);
12829 gen_load_gpr(t1
, rd
);
12830 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12831 tcg_gen_movi_tl(t1
, 4);
12832 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12833 gen_load_gpr(t1
, rd
+1);
12834 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12836 #ifdef TARGET_MIPS64
12839 generate_exception_end(ctx
, EXCP_RI
);
12842 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12843 gen_store_gpr(t1
, rd
);
12844 tcg_gen_movi_tl(t1
, 8);
12845 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12846 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12847 gen_store_gpr(t1
, rd
+1);
12850 gen_load_gpr(t1
, rd
);
12851 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12852 tcg_gen_movi_tl(t1
, 8);
12853 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12854 gen_load_gpr(t1
, rd
+1);
12855 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12863 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12865 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12866 int minor
= (ctx
->opcode
>> 12) & 0xf;
12867 uint32_t mips32_op
;
12869 switch (extension
) {
12871 mips32_op
= OPC_TEQ
;
12874 mips32_op
= OPC_TGE
;
12877 mips32_op
= OPC_TGEU
;
12880 mips32_op
= OPC_TLT
;
12883 mips32_op
= OPC_TLTU
;
12886 mips32_op
= OPC_TNE
;
12888 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12890 #ifndef CONFIG_USER_ONLY
12893 check_cp0_enabled(ctx
);
12895 /* Treat as NOP. */
12898 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12902 check_cp0_enabled(ctx
);
12904 TCGv t0
= tcg_temp_new();
12906 gen_load_gpr(t0
, rt
);
12907 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12913 switch (minor
& 3) {
12915 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12918 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12921 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12924 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12927 goto pool32axf_invalid
;
12931 switch (minor
& 3) {
12933 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12936 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12939 goto pool32axf_invalid
;
12945 check_insn(ctx
, ISA_MIPS32R6
);
12946 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12949 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12952 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12955 mips32_op
= OPC_CLO
;
12958 mips32_op
= OPC_CLZ
;
12960 check_insn(ctx
, ISA_MIPS32
);
12961 gen_cl(ctx
, mips32_op
, rt
, rs
);
12964 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12965 gen_rdhwr(ctx
, rt
, rs
, 0);
12968 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12971 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12972 mips32_op
= OPC_MULT
;
12975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12976 mips32_op
= OPC_MULTU
;
12979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12980 mips32_op
= OPC_DIV
;
12983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12984 mips32_op
= OPC_DIVU
;
12987 check_insn(ctx
, ISA_MIPS32
);
12988 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12991 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12992 mips32_op
= OPC_MADD
;
12995 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12996 mips32_op
= OPC_MADDU
;
12999 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13000 mips32_op
= OPC_MSUB
;
13003 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13004 mips32_op
= OPC_MSUBU
;
13006 check_insn(ctx
, ISA_MIPS32
);
13007 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13010 goto pool32axf_invalid
;
13021 generate_exception_err(ctx
, EXCP_CpU
, 2);
13024 goto pool32axf_invalid
;
13029 case JALR
: /* JALRC */
13030 case JALR_HB
: /* JALRC_HB */
13031 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13032 /* JALRC, JALRC_HB */
13033 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13035 /* JALR, JALR_HB */
13036 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13037 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13042 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13043 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13044 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13047 goto pool32axf_invalid
;
13053 check_cp0_enabled(ctx
);
13054 check_insn(ctx
, ISA_MIPS32R2
);
13055 gen_load_srsgpr(rs
, rt
);
13058 check_cp0_enabled(ctx
);
13059 check_insn(ctx
, ISA_MIPS32R2
);
13060 gen_store_srsgpr(rs
, rt
);
13063 goto pool32axf_invalid
;
13066 #ifndef CONFIG_USER_ONLY
13070 mips32_op
= OPC_TLBP
;
13073 mips32_op
= OPC_TLBR
;
13076 mips32_op
= OPC_TLBWI
;
13079 mips32_op
= OPC_TLBWR
;
13082 mips32_op
= OPC_TLBINV
;
13085 mips32_op
= OPC_TLBINVF
;
13088 mips32_op
= OPC_WAIT
;
13091 mips32_op
= OPC_DERET
;
13094 mips32_op
= OPC_ERET
;
13096 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13099 goto pool32axf_invalid
;
13105 check_cp0_enabled(ctx
);
13107 TCGv t0
= tcg_temp_new();
13109 save_cpu_state(ctx
, 1);
13110 gen_helper_di(t0
, cpu_env
);
13111 gen_store_gpr(t0
, rs
);
13112 /* Stop translation as we may have switched the execution mode */
13113 ctx
->bstate
= BS_STOP
;
13118 check_cp0_enabled(ctx
);
13120 TCGv t0
= tcg_temp_new();
13122 save_cpu_state(ctx
, 1);
13123 gen_helper_ei(t0
, cpu_env
);
13124 gen_store_gpr(t0
, rs
);
13125 /* Stop translation as we may have switched the execution mode */
13126 ctx
->bstate
= BS_STOP
;
13131 goto pool32axf_invalid
;
13141 generate_exception_end(ctx
, EXCP_SYSCALL
);
13144 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13145 gen_helper_do_semihosting(cpu_env
);
13147 check_insn(ctx
, ISA_MIPS32
);
13148 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13149 generate_exception_end(ctx
, EXCP_RI
);
13151 generate_exception_end(ctx
, EXCP_DBp
);
13156 goto pool32axf_invalid
;
13160 switch (minor
& 3) {
13162 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13165 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13168 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13171 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13174 goto pool32axf_invalid
;
13178 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13181 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13184 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13187 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13190 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13193 goto pool32axf_invalid
;
13198 MIPS_INVAL("pool32axf");
13199 generate_exception_end(ctx
, EXCP_RI
);
13204 /* Values for microMIPS fmt field. Variable-width, depending on which
13205 formats the instruction supports. */
13224 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13226 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13227 uint32_t mips32_op
;
13229 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13230 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13231 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13233 switch (extension
) {
13234 case FLOAT_1BIT_FMT(CFC1
, 0):
13235 mips32_op
= OPC_CFC1
;
13237 case FLOAT_1BIT_FMT(CTC1
, 0):
13238 mips32_op
= OPC_CTC1
;
13240 case FLOAT_1BIT_FMT(MFC1
, 0):
13241 mips32_op
= OPC_MFC1
;
13243 case FLOAT_1BIT_FMT(MTC1
, 0):
13244 mips32_op
= OPC_MTC1
;
13246 case FLOAT_1BIT_FMT(MFHC1
, 0):
13247 mips32_op
= OPC_MFHC1
;
13249 case FLOAT_1BIT_FMT(MTHC1
, 0):
13250 mips32_op
= OPC_MTHC1
;
13252 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13255 /* Reciprocal square root */
13256 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13257 mips32_op
= OPC_RSQRT_S
;
13259 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13260 mips32_op
= OPC_RSQRT_D
;
13264 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13265 mips32_op
= OPC_SQRT_S
;
13267 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13268 mips32_op
= OPC_SQRT_D
;
13272 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13273 mips32_op
= OPC_RECIP_S
;
13275 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13276 mips32_op
= OPC_RECIP_D
;
13280 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13281 mips32_op
= OPC_FLOOR_L_S
;
13283 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13284 mips32_op
= OPC_FLOOR_L_D
;
13286 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13287 mips32_op
= OPC_FLOOR_W_S
;
13289 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13290 mips32_op
= OPC_FLOOR_W_D
;
13294 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13295 mips32_op
= OPC_CEIL_L_S
;
13297 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13298 mips32_op
= OPC_CEIL_L_D
;
13300 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13301 mips32_op
= OPC_CEIL_W_S
;
13303 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13304 mips32_op
= OPC_CEIL_W_D
;
13308 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13309 mips32_op
= OPC_TRUNC_L_S
;
13311 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13312 mips32_op
= OPC_TRUNC_L_D
;
13314 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13315 mips32_op
= OPC_TRUNC_W_S
;
13317 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13318 mips32_op
= OPC_TRUNC_W_D
;
13322 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13323 mips32_op
= OPC_ROUND_L_S
;
13325 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13326 mips32_op
= OPC_ROUND_L_D
;
13328 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13329 mips32_op
= OPC_ROUND_W_S
;
13331 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13332 mips32_op
= OPC_ROUND_W_D
;
13335 /* Integer to floating-point conversion */
13336 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13337 mips32_op
= OPC_CVT_L_S
;
13339 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13340 mips32_op
= OPC_CVT_L_D
;
13342 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13343 mips32_op
= OPC_CVT_W_S
;
13345 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13346 mips32_op
= OPC_CVT_W_D
;
13349 /* Paired-foo conversions */
13350 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13351 mips32_op
= OPC_CVT_S_PL
;
13353 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13354 mips32_op
= OPC_CVT_S_PU
;
13356 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13357 mips32_op
= OPC_CVT_PW_PS
;
13359 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13360 mips32_op
= OPC_CVT_PS_PW
;
13363 /* Floating-point moves */
13364 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13365 mips32_op
= OPC_MOV_S
;
13367 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13368 mips32_op
= OPC_MOV_D
;
13370 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13371 mips32_op
= OPC_MOV_PS
;
13374 /* Absolute value */
13375 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13376 mips32_op
= OPC_ABS_S
;
13378 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13379 mips32_op
= OPC_ABS_D
;
13381 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13382 mips32_op
= OPC_ABS_PS
;
13386 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13387 mips32_op
= OPC_NEG_S
;
13389 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13390 mips32_op
= OPC_NEG_D
;
13392 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13393 mips32_op
= OPC_NEG_PS
;
13396 /* Reciprocal square root step */
13397 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13398 mips32_op
= OPC_RSQRT1_S
;
13400 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13401 mips32_op
= OPC_RSQRT1_D
;
13403 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13404 mips32_op
= OPC_RSQRT1_PS
;
13407 /* Reciprocal step */
13408 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13409 mips32_op
= OPC_RECIP1_S
;
13411 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13412 mips32_op
= OPC_RECIP1_S
;
13414 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13415 mips32_op
= OPC_RECIP1_PS
;
13418 /* Conversions from double */
13419 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13420 mips32_op
= OPC_CVT_D_S
;
13422 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13423 mips32_op
= OPC_CVT_D_W
;
13425 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13426 mips32_op
= OPC_CVT_D_L
;
13429 /* Conversions from single */
13430 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13431 mips32_op
= OPC_CVT_S_D
;
13433 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13434 mips32_op
= OPC_CVT_S_W
;
13436 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13437 mips32_op
= OPC_CVT_S_L
;
13439 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13442 /* Conditional moves on floating-point codes */
13443 case COND_FLOAT_MOV(MOVT
, 0):
13444 case COND_FLOAT_MOV(MOVT
, 1):
13445 case COND_FLOAT_MOV(MOVT
, 2):
13446 case COND_FLOAT_MOV(MOVT
, 3):
13447 case COND_FLOAT_MOV(MOVT
, 4):
13448 case COND_FLOAT_MOV(MOVT
, 5):
13449 case COND_FLOAT_MOV(MOVT
, 6):
13450 case COND_FLOAT_MOV(MOVT
, 7):
13451 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13452 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13454 case COND_FLOAT_MOV(MOVF
, 0):
13455 case COND_FLOAT_MOV(MOVF
, 1):
13456 case COND_FLOAT_MOV(MOVF
, 2):
13457 case COND_FLOAT_MOV(MOVF
, 3):
13458 case COND_FLOAT_MOV(MOVF
, 4):
13459 case COND_FLOAT_MOV(MOVF
, 5):
13460 case COND_FLOAT_MOV(MOVF
, 6):
13461 case COND_FLOAT_MOV(MOVF
, 7):
13462 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13463 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13466 MIPS_INVAL("pool32fxf");
13467 generate_exception_end(ctx
, EXCP_RI
);
13472 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13476 int rt
, rs
, rd
, rr
;
13478 uint32_t op
, minor
, mips32_op
;
13479 uint32_t cond
, fmt
, cc
;
13481 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13482 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13484 rt
= (ctx
->opcode
>> 21) & 0x1f;
13485 rs
= (ctx
->opcode
>> 16) & 0x1f;
13486 rd
= (ctx
->opcode
>> 11) & 0x1f;
13487 rr
= (ctx
->opcode
>> 6) & 0x1f;
13488 imm
= (int16_t) ctx
->opcode
;
13490 op
= (ctx
->opcode
>> 26) & 0x3f;
13493 minor
= ctx
->opcode
& 0x3f;
13496 minor
= (ctx
->opcode
>> 6) & 0xf;
13499 mips32_op
= OPC_SLL
;
13502 mips32_op
= OPC_SRA
;
13505 mips32_op
= OPC_SRL
;
13508 mips32_op
= OPC_ROTR
;
13510 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13513 check_insn(ctx
, ISA_MIPS32R6
);
13514 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13517 check_insn(ctx
, ISA_MIPS32R6
);
13518 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13521 check_insn(ctx
, ISA_MIPS32R6
);
13522 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13525 goto pool32a_invalid
;
13529 minor
= (ctx
->opcode
>> 6) & 0xf;
13533 mips32_op
= OPC_ADD
;
13536 mips32_op
= OPC_ADDU
;
13539 mips32_op
= OPC_SUB
;
13542 mips32_op
= OPC_SUBU
;
13545 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13546 mips32_op
= OPC_MUL
;
13548 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13552 mips32_op
= OPC_SLLV
;
13555 mips32_op
= OPC_SRLV
;
13558 mips32_op
= OPC_SRAV
;
13561 mips32_op
= OPC_ROTRV
;
13563 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13565 /* Logical operations */
13567 mips32_op
= OPC_AND
;
13570 mips32_op
= OPC_OR
;
13573 mips32_op
= OPC_NOR
;
13576 mips32_op
= OPC_XOR
;
13578 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13580 /* Set less than */
13582 mips32_op
= OPC_SLT
;
13585 mips32_op
= OPC_SLTU
;
13587 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13590 goto pool32a_invalid
;
13594 minor
= (ctx
->opcode
>> 6) & 0xf;
13596 /* Conditional moves */
13597 case MOVN
: /* MUL */
13598 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13600 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13603 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13606 case MOVZ
: /* MUH */
13607 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13609 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13612 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13616 check_insn(ctx
, ISA_MIPS32R6
);
13617 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13620 check_insn(ctx
, ISA_MIPS32R6
);
13621 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13623 case LWXS
: /* DIV */
13624 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13626 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13629 gen_ldxs(ctx
, rs
, rt
, rd
);
13633 check_insn(ctx
, ISA_MIPS32R6
);
13634 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13637 check_insn(ctx
, ISA_MIPS32R6
);
13638 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13641 check_insn(ctx
, ISA_MIPS32R6
);
13642 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13645 goto pool32a_invalid
;
13649 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13652 check_insn(ctx
, ISA_MIPS32R6
);
13653 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13654 extract32(ctx
->opcode
, 9, 2));
13657 check_insn(ctx
, ISA_MIPS32R6
);
13658 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13659 extract32(ctx
->opcode
, 9, 2));
13662 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13665 gen_pool32axf(env
, ctx
, rt
, rs
);
13668 generate_exception_end(ctx
, EXCP_BREAK
);
13671 check_insn(ctx
, ISA_MIPS32R6
);
13672 generate_exception_end(ctx
, EXCP_RI
);
13676 MIPS_INVAL("pool32a");
13677 generate_exception_end(ctx
, EXCP_RI
);
13682 minor
= (ctx
->opcode
>> 12) & 0xf;
13685 check_cp0_enabled(ctx
);
13686 /* Treat as no-op. */
13690 /* COP2: Not implemented. */
13691 generate_exception_err(ctx
, EXCP_CpU
, 2);
13693 #ifdef TARGET_MIPS64
13696 check_insn(ctx
, ISA_MIPS3
);
13697 check_mips_64(ctx
);
13702 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13704 #ifdef TARGET_MIPS64
13707 check_insn(ctx
, ISA_MIPS3
);
13708 check_mips_64(ctx
);
13713 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13716 MIPS_INVAL("pool32b");
13717 generate_exception_end(ctx
, EXCP_RI
);
13722 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13723 minor
= ctx
->opcode
& 0x3f;
13724 check_cp1_enabled(ctx
);
13727 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13728 mips32_op
= OPC_ALNV_PS
;
13731 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13732 mips32_op
= OPC_MADD_S
;
13735 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13736 mips32_op
= OPC_MADD_D
;
13739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13740 mips32_op
= OPC_MADD_PS
;
13743 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13744 mips32_op
= OPC_MSUB_S
;
13747 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13748 mips32_op
= OPC_MSUB_D
;
13751 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13752 mips32_op
= OPC_MSUB_PS
;
13755 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13756 mips32_op
= OPC_NMADD_S
;
13759 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13760 mips32_op
= OPC_NMADD_D
;
13763 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13764 mips32_op
= OPC_NMADD_PS
;
13767 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13768 mips32_op
= OPC_NMSUB_S
;
13771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13772 mips32_op
= OPC_NMSUB_D
;
13775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13776 mips32_op
= OPC_NMSUB_PS
;
13778 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13780 case CABS_COND_FMT
:
13781 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13782 cond
= (ctx
->opcode
>> 6) & 0xf;
13783 cc
= (ctx
->opcode
>> 13) & 0x7;
13784 fmt
= (ctx
->opcode
>> 10) & 0x3;
13787 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13790 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13793 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13796 goto pool32f_invalid
;
13800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13801 cond
= (ctx
->opcode
>> 6) & 0xf;
13802 cc
= (ctx
->opcode
>> 13) & 0x7;
13803 fmt
= (ctx
->opcode
>> 10) & 0x3;
13806 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13809 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13812 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13815 goto pool32f_invalid
;
13819 check_insn(ctx
, ISA_MIPS32R6
);
13820 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13823 check_insn(ctx
, ISA_MIPS32R6
);
13824 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13827 gen_pool32fxf(ctx
, rt
, rs
);
13831 switch ((ctx
->opcode
>> 6) & 0x7) {
13833 mips32_op
= OPC_PLL_PS
;
13836 mips32_op
= OPC_PLU_PS
;
13839 mips32_op
= OPC_PUL_PS
;
13842 mips32_op
= OPC_PUU_PS
;
13845 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13846 mips32_op
= OPC_CVT_PS_S
;
13848 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13851 goto pool32f_invalid
;
13855 check_insn(ctx
, ISA_MIPS32R6
);
13856 switch ((ctx
->opcode
>> 9) & 0x3) {
13858 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13861 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13864 goto pool32f_invalid
;
13869 switch ((ctx
->opcode
>> 6) & 0x7) {
13871 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13872 mips32_op
= OPC_LWXC1
;
13875 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13876 mips32_op
= OPC_SWXC1
;
13879 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13880 mips32_op
= OPC_LDXC1
;
13883 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13884 mips32_op
= OPC_SDXC1
;
13887 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13888 mips32_op
= OPC_LUXC1
;
13891 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13892 mips32_op
= OPC_SUXC1
;
13894 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13897 goto pool32f_invalid
;
13901 check_insn(ctx
, ISA_MIPS32R6
);
13902 switch ((ctx
->opcode
>> 9) & 0x3) {
13904 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13907 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13910 goto pool32f_invalid
;
13915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13916 fmt
= (ctx
->opcode
>> 9) & 0x3;
13917 switch ((ctx
->opcode
>> 6) & 0x7) {
13921 mips32_op
= OPC_RSQRT2_S
;
13924 mips32_op
= OPC_RSQRT2_D
;
13927 mips32_op
= OPC_RSQRT2_PS
;
13930 goto pool32f_invalid
;
13936 mips32_op
= OPC_RECIP2_S
;
13939 mips32_op
= OPC_RECIP2_D
;
13942 mips32_op
= OPC_RECIP2_PS
;
13945 goto pool32f_invalid
;
13949 mips32_op
= OPC_ADDR_PS
;
13952 mips32_op
= OPC_MULR_PS
;
13954 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13957 goto pool32f_invalid
;
13961 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13962 cc
= (ctx
->opcode
>> 13) & 0x7;
13963 fmt
= (ctx
->opcode
>> 9) & 0x3;
13964 switch ((ctx
->opcode
>> 6) & 0x7) {
13965 case MOVF_FMT
: /* RINT_FMT */
13966 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13970 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13973 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13976 goto pool32f_invalid
;
13982 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13985 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13989 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13992 goto pool32f_invalid
;
13996 case MOVT_FMT
: /* CLASS_FMT */
13997 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14001 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14004 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14007 goto pool32f_invalid
;
14013 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14016 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14020 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14023 goto pool32f_invalid
;
14028 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14031 goto pool32f_invalid
;
14034 #define FINSN_3ARG_SDPS(prfx) \
14035 switch ((ctx->opcode >> 8) & 0x3) { \
14037 mips32_op = OPC_##prfx##_S; \
14040 mips32_op = OPC_##prfx##_D; \
14042 case FMT_SDPS_PS: \
14044 mips32_op = OPC_##prfx##_PS; \
14047 goto pool32f_invalid; \
14050 check_insn(ctx
, ISA_MIPS32R6
);
14051 switch ((ctx
->opcode
>> 9) & 0x3) {
14053 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14056 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14059 goto pool32f_invalid
;
14063 check_insn(ctx
, ISA_MIPS32R6
);
14064 switch ((ctx
->opcode
>> 9) & 0x3) {
14066 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14069 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14072 goto pool32f_invalid
;
14076 /* regular FP ops */
14077 switch ((ctx
->opcode
>> 6) & 0x3) {
14079 FINSN_3ARG_SDPS(ADD
);
14082 FINSN_3ARG_SDPS(SUB
);
14085 FINSN_3ARG_SDPS(MUL
);
14088 fmt
= (ctx
->opcode
>> 8) & 0x3;
14090 mips32_op
= OPC_DIV_D
;
14091 } else if (fmt
== 0) {
14092 mips32_op
= OPC_DIV_S
;
14094 goto pool32f_invalid
;
14098 goto pool32f_invalid
;
14103 switch ((ctx
->opcode
>> 6) & 0x7) {
14104 case MOVN_FMT
: /* SELNEZ_FMT */
14105 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14107 switch ((ctx
->opcode
>> 9) & 0x3) {
14109 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14112 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14115 goto pool32f_invalid
;
14119 FINSN_3ARG_SDPS(MOVN
);
14123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14124 FINSN_3ARG_SDPS(MOVN
);
14126 case MOVZ_FMT
: /* SELEQZ_FMT */
14127 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14129 switch ((ctx
->opcode
>> 9) & 0x3) {
14131 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14134 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14137 goto pool32f_invalid
;
14141 FINSN_3ARG_SDPS(MOVZ
);
14145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14146 FINSN_3ARG_SDPS(MOVZ
);
14149 check_insn(ctx
, ISA_MIPS32R6
);
14150 switch ((ctx
->opcode
>> 9) & 0x3) {
14152 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14155 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14158 goto pool32f_invalid
;
14162 check_insn(ctx
, ISA_MIPS32R6
);
14163 switch ((ctx
->opcode
>> 9) & 0x3) {
14165 mips32_op
= OPC_MADDF_S
;
14168 mips32_op
= OPC_MADDF_D
;
14171 goto pool32f_invalid
;
14175 check_insn(ctx
, ISA_MIPS32R6
);
14176 switch ((ctx
->opcode
>> 9) & 0x3) {
14178 mips32_op
= OPC_MSUBF_S
;
14181 mips32_op
= OPC_MSUBF_D
;
14184 goto pool32f_invalid
;
14188 goto pool32f_invalid
;
14192 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14196 MIPS_INVAL("pool32f");
14197 generate_exception_end(ctx
, EXCP_RI
);
14201 generate_exception_err(ctx
, EXCP_CpU
, 1);
14205 minor
= (ctx
->opcode
>> 21) & 0x1f;
14208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14209 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14213 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14214 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14217 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14218 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14219 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14222 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14223 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14226 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14227 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14228 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14232 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14233 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14236 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14237 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14240 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14241 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14245 case TLTI
: /* BC1EQZC */
14246 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14248 check_cp1_enabled(ctx
);
14249 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14252 mips32_op
= OPC_TLTI
;
14256 case TGEI
: /* BC1NEZC */
14257 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14259 check_cp1_enabled(ctx
);
14260 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14263 mips32_op
= OPC_TGEI
;
14268 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14269 mips32_op
= OPC_TLTIU
;
14272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14273 mips32_op
= OPC_TGEIU
;
14275 case TNEI
: /* SYNCI */
14276 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14278 /* Break the TB to be able to sync copied instructions
14280 ctx
->bstate
= BS_STOP
;
14283 mips32_op
= OPC_TNEI
;
14288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14289 mips32_op
= OPC_TEQI
;
14291 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14297 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14298 4, rs
, 0, imm
<< 1, 0);
14299 /* Compact branches don't have a delay slot, so just let
14300 the normal delay slot handling take us to the branch
14304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14305 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14309 /* Break the TB to be able to sync copied instructions
14311 ctx
->bstate
= BS_STOP
;
14315 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14316 /* COP2: Not implemented. */
14317 generate_exception_err(ctx
, EXCP_CpU
, 2);
14320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14321 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14325 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14329 mips32_op
= OPC_BC1FANY4
;
14332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14333 mips32_op
= OPC_BC1TANY4
;
14336 check_insn(ctx
, ASE_MIPS3D
);
14339 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14340 check_cp1_enabled(ctx
);
14341 gen_compute_branch1(ctx
, mips32_op
,
14342 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14344 generate_exception_err(ctx
, EXCP_CpU
, 1);
14349 /* MIPS DSP: not implemented */
14352 MIPS_INVAL("pool32i");
14353 generate_exception_end(ctx
, EXCP_RI
);
14358 minor
= (ctx
->opcode
>> 12) & 0xf;
14359 offset
= sextract32(ctx
->opcode
, 0,
14360 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14363 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14364 mips32_op
= OPC_LWL
;
14367 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14368 mips32_op
= OPC_SWL
;
14371 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14372 mips32_op
= OPC_LWR
;
14375 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14376 mips32_op
= OPC_SWR
;
14378 #if defined(TARGET_MIPS64)
14380 check_insn(ctx
, ISA_MIPS3
);
14381 check_mips_64(ctx
);
14382 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14383 mips32_op
= OPC_LDL
;
14386 check_insn(ctx
, ISA_MIPS3
);
14387 check_mips_64(ctx
);
14388 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14389 mips32_op
= OPC_SDL
;
14392 check_insn(ctx
, ISA_MIPS3
);
14393 check_mips_64(ctx
);
14394 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14395 mips32_op
= OPC_LDR
;
14398 check_insn(ctx
, ISA_MIPS3
);
14399 check_mips_64(ctx
);
14400 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14401 mips32_op
= OPC_SDR
;
14404 check_insn(ctx
, ISA_MIPS3
);
14405 check_mips_64(ctx
);
14406 mips32_op
= OPC_LWU
;
14409 check_insn(ctx
, ISA_MIPS3
);
14410 check_mips_64(ctx
);
14411 mips32_op
= OPC_LLD
;
14415 mips32_op
= OPC_LL
;
14418 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14421 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14424 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14426 #if defined(TARGET_MIPS64)
14428 check_insn(ctx
, ISA_MIPS3
);
14429 check_mips_64(ctx
);
14430 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14434 /* Treat as no-op */
14435 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14436 /* hint codes 24-31 are reserved and signal RI */
14437 generate_exception(ctx
, EXCP_RI
);
14441 MIPS_INVAL("pool32c");
14442 generate_exception_end(ctx
, EXCP_RI
);
14446 case ADDI32
: /* AUI, LUI */
14447 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14449 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14452 mips32_op
= OPC_ADDI
;
14457 mips32_op
= OPC_ADDIU
;
14459 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14462 /* Logical operations */
14464 mips32_op
= OPC_ORI
;
14467 mips32_op
= OPC_XORI
;
14470 mips32_op
= OPC_ANDI
;
14472 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14475 /* Set less than immediate */
14477 mips32_op
= OPC_SLTI
;
14480 mips32_op
= OPC_SLTIU
;
14482 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14485 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14486 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14487 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14488 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14490 case JALS32
: /* BOVC, BEQC, BEQZALC */
14491 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14494 mips32_op
= OPC_BOVC
;
14495 } else if (rs
< rt
&& rs
== 0) {
14497 mips32_op
= OPC_BEQZALC
;
14500 mips32_op
= OPC_BEQC
;
14502 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14505 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14506 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14507 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14510 case BEQ32
: /* BC */
14511 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14513 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14514 sextract32(ctx
->opcode
<< 1, 0, 27));
14517 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14520 case BNE32
: /* BALC */
14521 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14523 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14524 sextract32(ctx
->opcode
<< 1, 0, 27));
14527 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14530 case J32
: /* BGTZC, BLTZC, BLTC */
14531 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14532 if (rs
== 0 && rt
!= 0) {
14534 mips32_op
= OPC_BGTZC
;
14535 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14537 mips32_op
= OPC_BLTZC
;
14540 mips32_op
= OPC_BLTC
;
14542 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14545 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14546 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14549 case JAL32
: /* BLEZC, BGEZC, BGEC */
14550 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14551 if (rs
== 0 && rt
!= 0) {
14553 mips32_op
= OPC_BLEZC
;
14554 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14556 mips32_op
= OPC_BGEZC
;
14559 mips32_op
= OPC_BGEC
;
14561 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14564 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14565 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14566 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14569 /* Floating point (COP1) */
14571 mips32_op
= OPC_LWC1
;
14574 mips32_op
= OPC_LDC1
;
14577 mips32_op
= OPC_SWC1
;
14580 mips32_op
= OPC_SDC1
;
14582 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14584 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14585 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14586 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14587 switch ((ctx
->opcode
>> 16) & 0x1f) {
14588 case ADDIUPC_00
... ADDIUPC_07
:
14589 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14592 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14595 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14597 case LWPC_08
... LWPC_0F
:
14598 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14601 generate_exception(ctx
, EXCP_RI
);
14606 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14607 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14609 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14612 case BNVC
: /* BNEC, BNEZALC */
14613 check_insn(ctx
, ISA_MIPS32R6
);
14616 mips32_op
= OPC_BNVC
;
14617 } else if (rs
< rt
&& rs
== 0) {
14619 mips32_op
= OPC_BNEZALC
;
14622 mips32_op
= OPC_BNEC
;
14624 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14626 case R6_BNEZC
: /* JIALC */
14627 check_insn(ctx
, ISA_MIPS32R6
);
14630 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14631 sextract32(ctx
->opcode
<< 1, 0, 22));
14634 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14637 case R6_BEQZC
: /* JIC */
14638 check_insn(ctx
, ISA_MIPS32R6
);
14641 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14642 sextract32(ctx
->opcode
<< 1, 0, 22));
14645 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14648 case BLEZALC
: /* BGEZALC, BGEUC */
14649 check_insn(ctx
, ISA_MIPS32R6
);
14650 if (rs
== 0 && rt
!= 0) {
14652 mips32_op
= OPC_BLEZALC
;
14653 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14655 mips32_op
= OPC_BGEZALC
;
14658 mips32_op
= OPC_BGEUC
;
14660 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14662 case BGTZALC
: /* BLTZALC, BLTUC */
14663 check_insn(ctx
, ISA_MIPS32R6
);
14664 if (rs
== 0 && rt
!= 0) {
14666 mips32_op
= OPC_BGTZALC
;
14667 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14669 mips32_op
= OPC_BLTZALC
;
14672 mips32_op
= OPC_BLTUC
;
14674 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14676 /* Loads and stores */
14678 mips32_op
= OPC_LB
;
14681 mips32_op
= OPC_LBU
;
14684 mips32_op
= OPC_LH
;
14687 mips32_op
= OPC_LHU
;
14690 mips32_op
= OPC_LW
;
14692 #ifdef TARGET_MIPS64
14694 check_insn(ctx
, ISA_MIPS3
);
14695 check_mips_64(ctx
);
14696 mips32_op
= OPC_LD
;
14699 check_insn(ctx
, ISA_MIPS3
);
14700 check_mips_64(ctx
);
14701 mips32_op
= OPC_SD
;
14705 mips32_op
= OPC_SB
;
14708 mips32_op
= OPC_SH
;
14711 mips32_op
= OPC_SW
;
14714 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14717 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14720 generate_exception_end(ctx
, EXCP_RI
);
14725 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14729 /* make sure instructions are on a halfword boundary */
14730 if (ctx
->pc
& 0x1) {
14731 env
->CP0_BadVAddr
= ctx
->pc
;
14732 generate_exception_end(ctx
, EXCP_AdEL
);
14736 op
= (ctx
->opcode
>> 10) & 0x3f;
14737 /* Enforce properly-sized instructions in a delay slot */
14738 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14739 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14741 /* POOL32A, POOL32B, POOL32I, POOL32C */
14743 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14745 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14747 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14749 /* LB32, LH32, LWC132, LDC132, LW32 */
14750 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14751 generate_exception_end(ctx
, EXCP_RI
);
14756 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14758 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14760 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14761 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14762 generate_exception_end(ctx
, EXCP_RI
);
14772 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14773 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14774 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14777 switch (ctx
->opcode
& 0x1) {
14785 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14786 /* In the Release 6 the register number location in
14787 * the instruction encoding has changed.
14789 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14791 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14797 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14798 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14799 int amount
= (ctx
->opcode
>> 1) & 0x7;
14801 amount
= amount
== 0 ? 8 : amount
;
14803 switch (ctx
->opcode
& 0x1) {
14812 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14816 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14817 gen_pool16c_r6_insn(ctx
);
14819 gen_pool16c_insn(ctx
);
14824 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14825 int rb
= 28; /* GP */
14826 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14828 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14833 if (ctx
->opcode
& 1) {
14834 generate_exception_end(ctx
, EXCP_RI
);
14837 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14838 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14839 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14840 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14845 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14846 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14847 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14848 offset
= (offset
== 0xf ? -1 : offset
);
14850 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14855 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14856 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14857 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14859 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14864 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14865 int rb
= 29; /* SP */
14866 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14868 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14873 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14874 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14875 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14877 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14882 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14883 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14884 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14886 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14891 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14892 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14893 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14895 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14900 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14901 int rb
= 29; /* SP */
14902 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14904 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14909 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14910 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14911 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14913 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14918 int rd
= uMIPS_RD5(ctx
->opcode
);
14919 int rs
= uMIPS_RS5(ctx
->opcode
);
14921 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14928 switch (ctx
->opcode
& 0x1) {
14938 switch (ctx
->opcode
& 0x1) {
14943 gen_addiur1sp(ctx
);
14947 case B16
: /* BC16 */
14948 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14949 sextract32(ctx
->opcode
, 0, 10) << 1,
14950 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14952 case BNEZ16
: /* BNEZC16 */
14953 case BEQZ16
: /* BEQZC16 */
14954 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14955 mmreg(uMIPS_RD(ctx
->opcode
)),
14956 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14957 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14962 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14963 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14965 imm
= (imm
== 0x7f ? -1 : imm
);
14966 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14972 generate_exception_end(ctx
, EXCP_RI
);
14975 decode_micromips32_opc(env
, ctx
);
14982 /* SmartMIPS extension to MIPS32 */
14984 #if defined(TARGET_MIPS64)
14986 /* MDMX extension to MIPS64 */
14990 /* MIPSDSP functions. */
14991 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14992 int rd
, int base
, int offset
)
14997 t0
= tcg_temp_new();
15000 gen_load_gpr(t0
, offset
);
15001 } else if (offset
== 0) {
15002 gen_load_gpr(t0
, base
);
15004 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15009 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15010 gen_store_gpr(t0
, rd
);
15013 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15014 gen_store_gpr(t0
, rd
);
15017 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15018 gen_store_gpr(t0
, rd
);
15020 #if defined(TARGET_MIPS64)
15022 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15023 gen_store_gpr(t0
, rd
);
15030 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15031 int ret
, int v1
, int v2
)
15037 /* Treat as NOP. */
15041 v1_t
= tcg_temp_new();
15042 v2_t
= tcg_temp_new();
15044 gen_load_gpr(v1_t
, v1
);
15045 gen_load_gpr(v2_t
, v2
);
15048 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15049 case OPC_MULT_G_2E
:
15053 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15055 case OPC_ADDUH_R_QB
:
15056 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15059 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15061 case OPC_ADDQH_R_PH
:
15062 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15065 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15067 case OPC_ADDQH_R_W
:
15068 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15071 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15073 case OPC_SUBUH_R_QB
:
15074 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15077 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15079 case OPC_SUBQH_R_PH
:
15080 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15083 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15085 case OPC_SUBQH_R_W
:
15086 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15090 case OPC_ABSQ_S_PH_DSP
:
15092 case OPC_ABSQ_S_QB
:
15094 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15096 case OPC_ABSQ_S_PH
:
15098 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15102 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15104 case OPC_PRECEQ_W_PHL
:
15106 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15107 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15109 case OPC_PRECEQ_W_PHR
:
15111 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15112 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15113 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15115 case OPC_PRECEQU_PH_QBL
:
15117 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15119 case OPC_PRECEQU_PH_QBR
:
15121 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15123 case OPC_PRECEQU_PH_QBLA
:
15125 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15127 case OPC_PRECEQU_PH_QBRA
:
15129 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15131 case OPC_PRECEU_PH_QBL
:
15133 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15135 case OPC_PRECEU_PH_QBR
:
15137 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15139 case OPC_PRECEU_PH_QBLA
:
15141 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15143 case OPC_PRECEU_PH_QBRA
:
15145 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15149 case OPC_ADDU_QB_DSP
:
15153 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15155 case OPC_ADDQ_S_PH
:
15157 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15161 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15165 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15167 case OPC_ADDU_S_QB
:
15169 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15173 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15175 case OPC_ADDU_S_PH
:
15177 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15181 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15183 case OPC_SUBQ_S_PH
:
15185 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15189 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15193 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15195 case OPC_SUBU_S_QB
:
15197 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15201 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15203 case OPC_SUBU_S_PH
:
15205 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15209 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15213 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15217 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15219 case OPC_RADDU_W_QB
:
15221 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15225 case OPC_CMPU_EQ_QB_DSP
:
15227 case OPC_PRECR_QB_PH
:
15229 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15231 case OPC_PRECRQ_QB_PH
:
15233 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15235 case OPC_PRECR_SRA_PH_W
:
15238 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15239 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15241 tcg_temp_free_i32(sa_t
);
15244 case OPC_PRECR_SRA_R_PH_W
:
15247 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15248 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15250 tcg_temp_free_i32(sa_t
);
15253 case OPC_PRECRQ_PH_W
:
15255 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15257 case OPC_PRECRQ_RS_PH_W
:
15259 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15261 case OPC_PRECRQU_S_QB_PH
:
15263 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15267 #ifdef TARGET_MIPS64
15268 case OPC_ABSQ_S_QH_DSP
:
15270 case OPC_PRECEQ_L_PWL
:
15272 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15274 case OPC_PRECEQ_L_PWR
:
15276 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15278 case OPC_PRECEQ_PW_QHL
:
15280 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15282 case OPC_PRECEQ_PW_QHR
:
15284 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15286 case OPC_PRECEQ_PW_QHLA
:
15288 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15290 case OPC_PRECEQ_PW_QHRA
:
15292 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15294 case OPC_PRECEQU_QH_OBL
:
15296 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15298 case OPC_PRECEQU_QH_OBR
:
15300 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15302 case OPC_PRECEQU_QH_OBLA
:
15304 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15306 case OPC_PRECEQU_QH_OBRA
:
15308 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15310 case OPC_PRECEU_QH_OBL
:
15312 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15314 case OPC_PRECEU_QH_OBR
:
15316 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15318 case OPC_PRECEU_QH_OBLA
:
15320 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15322 case OPC_PRECEU_QH_OBRA
:
15324 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15326 case OPC_ABSQ_S_OB
:
15328 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15330 case OPC_ABSQ_S_PW
:
15332 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15334 case OPC_ABSQ_S_QH
:
15336 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15340 case OPC_ADDU_OB_DSP
:
15342 case OPC_RADDU_L_OB
:
15344 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15348 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15350 case OPC_SUBQ_S_PW
:
15352 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15358 case OPC_SUBQ_S_QH
:
15360 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15364 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15366 case OPC_SUBU_S_OB
:
15368 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15372 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15374 case OPC_SUBU_S_QH
:
15376 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15380 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15382 case OPC_SUBUH_R_OB
:
15384 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15388 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15390 case OPC_ADDQ_S_PW
:
15392 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15396 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15398 case OPC_ADDQ_S_QH
:
15400 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15404 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15406 case OPC_ADDU_S_OB
:
15408 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15412 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15414 case OPC_ADDU_S_QH
:
15416 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15420 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15422 case OPC_ADDUH_R_OB
:
15424 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15428 case OPC_CMPU_EQ_OB_DSP
:
15430 case OPC_PRECR_OB_QH
:
15432 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15434 case OPC_PRECR_SRA_QH_PW
:
15437 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15438 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15439 tcg_temp_free_i32(ret_t
);
15442 case OPC_PRECR_SRA_R_QH_PW
:
15445 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15446 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15447 tcg_temp_free_i32(sa_v
);
15450 case OPC_PRECRQ_OB_QH
:
15452 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15454 case OPC_PRECRQ_PW_L
:
15456 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15458 case OPC_PRECRQ_QH_PW
:
15460 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15462 case OPC_PRECRQ_RS_QH_PW
:
15464 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15466 case OPC_PRECRQU_S_OB_QH
:
15468 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15475 tcg_temp_free(v1_t
);
15476 tcg_temp_free(v2_t
);
15479 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15480 int ret
, int v1
, int v2
)
15488 /* Treat as NOP. */
15492 t0
= tcg_temp_new();
15493 v1_t
= tcg_temp_new();
15494 v2_t
= tcg_temp_new();
15496 tcg_gen_movi_tl(t0
, v1
);
15497 gen_load_gpr(v1_t
, v1
);
15498 gen_load_gpr(v2_t
, v2
);
15501 case OPC_SHLL_QB_DSP
:
15503 op2
= MASK_SHLL_QB(ctx
->opcode
);
15507 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15511 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15515 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15519 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15521 case OPC_SHLL_S_PH
:
15523 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15525 case OPC_SHLLV_S_PH
:
15527 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15531 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15533 case OPC_SHLLV_S_W
:
15535 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15539 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15543 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15547 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15551 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15555 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15557 case OPC_SHRA_R_QB
:
15559 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15563 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15565 case OPC_SHRAV_R_QB
:
15567 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15571 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15573 case OPC_SHRA_R_PH
:
15575 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15579 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15581 case OPC_SHRAV_R_PH
:
15583 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15587 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15589 case OPC_SHRAV_R_W
:
15591 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15593 default: /* Invalid */
15594 MIPS_INVAL("MASK SHLL.QB");
15595 generate_exception_end(ctx
, EXCP_RI
);
15600 #ifdef TARGET_MIPS64
15601 case OPC_SHLL_OB_DSP
:
15602 op2
= MASK_SHLL_OB(ctx
->opcode
);
15606 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15610 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15612 case OPC_SHLL_S_PW
:
15614 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15616 case OPC_SHLLV_S_PW
:
15618 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15622 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15626 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15630 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15634 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15636 case OPC_SHLL_S_QH
:
15638 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15640 case OPC_SHLLV_S_QH
:
15642 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15646 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15650 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15652 case OPC_SHRA_R_OB
:
15654 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15656 case OPC_SHRAV_R_OB
:
15658 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15662 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15666 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15668 case OPC_SHRA_R_PW
:
15670 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15672 case OPC_SHRAV_R_PW
:
15674 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15678 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15682 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15684 case OPC_SHRA_R_QH
:
15686 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15688 case OPC_SHRAV_R_QH
:
15690 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15694 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15698 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15702 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15706 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15708 default: /* Invalid */
15709 MIPS_INVAL("MASK SHLL.OB");
15710 generate_exception_end(ctx
, EXCP_RI
);
15718 tcg_temp_free(v1_t
);
15719 tcg_temp_free(v2_t
);
15722 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15723 int ret
, int v1
, int v2
, int check_ret
)
15729 if ((ret
== 0) && (check_ret
== 1)) {
15730 /* Treat as NOP. */
15734 t0
= tcg_temp_new_i32();
15735 v1_t
= tcg_temp_new();
15736 v2_t
= tcg_temp_new();
15738 tcg_gen_movi_i32(t0
, ret
);
15739 gen_load_gpr(v1_t
, v1
);
15740 gen_load_gpr(v2_t
, v2
);
15743 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15744 * the same mask and op1. */
15745 case OPC_MULT_G_2E
:
15749 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15752 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15755 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15757 case OPC_MULQ_RS_W
:
15758 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15762 case OPC_DPA_W_PH_DSP
:
15764 case OPC_DPAU_H_QBL
:
15766 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15768 case OPC_DPAU_H_QBR
:
15770 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15772 case OPC_DPSU_H_QBL
:
15774 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15776 case OPC_DPSU_H_QBR
:
15778 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15782 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15784 case OPC_DPAX_W_PH
:
15786 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15788 case OPC_DPAQ_S_W_PH
:
15790 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15792 case OPC_DPAQX_S_W_PH
:
15794 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15796 case OPC_DPAQX_SA_W_PH
:
15798 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15802 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15804 case OPC_DPSX_W_PH
:
15806 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15808 case OPC_DPSQ_S_W_PH
:
15810 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15812 case OPC_DPSQX_S_W_PH
:
15814 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15816 case OPC_DPSQX_SA_W_PH
:
15818 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15820 case OPC_MULSAQ_S_W_PH
:
15822 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15824 case OPC_DPAQ_SA_L_W
:
15826 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15828 case OPC_DPSQ_SA_L_W
:
15830 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15832 case OPC_MAQ_S_W_PHL
:
15834 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15836 case OPC_MAQ_S_W_PHR
:
15838 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15840 case OPC_MAQ_SA_W_PHL
:
15842 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15844 case OPC_MAQ_SA_W_PHR
:
15846 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15848 case OPC_MULSA_W_PH
:
15850 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15854 #ifdef TARGET_MIPS64
15855 case OPC_DPAQ_W_QH_DSP
:
15857 int ac
= ret
& 0x03;
15858 tcg_gen_movi_i32(t0
, ac
);
15863 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15867 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15871 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15875 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15879 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15881 case OPC_DPAQ_S_W_QH
:
15883 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15885 case OPC_DPAQ_SA_L_PW
:
15887 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15889 case OPC_DPAU_H_OBL
:
15891 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15893 case OPC_DPAU_H_OBR
:
15895 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15899 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15901 case OPC_DPSQ_S_W_QH
:
15903 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15905 case OPC_DPSQ_SA_L_PW
:
15907 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15909 case OPC_DPSU_H_OBL
:
15911 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15913 case OPC_DPSU_H_OBR
:
15915 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15917 case OPC_MAQ_S_L_PWL
:
15919 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15921 case OPC_MAQ_S_L_PWR
:
15923 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15925 case OPC_MAQ_S_W_QHLL
:
15927 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15929 case OPC_MAQ_SA_W_QHLL
:
15931 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15933 case OPC_MAQ_S_W_QHLR
:
15935 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15937 case OPC_MAQ_SA_W_QHLR
:
15939 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15941 case OPC_MAQ_S_W_QHRL
:
15943 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15945 case OPC_MAQ_SA_W_QHRL
:
15947 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15949 case OPC_MAQ_S_W_QHRR
:
15951 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15953 case OPC_MAQ_SA_W_QHRR
:
15955 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15957 case OPC_MULSAQ_S_L_PW
:
15959 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15961 case OPC_MULSAQ_S_W_QH
:
15963 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15969 case OPC_ADDU_QB_DSP
:
15971 case OPC_MULEU_S_PH_QBL
:
15973 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15975 case OPC_MULEU_S_PH_QBR
:
15977 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15979 case OPC_MULQ_RS_PH
:
15981 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15983 case OPC_MULEQ_S_W_PHL
:
15985 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15987 case OPC_MULEQ_S_W_PHR
:
15989 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15991 case OPC_MULQ_S_PH
:
15993 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15997 #ifdef TARGET_MIPS64
15998 case OPC_ADDU_OB_DSP
:
16000 case OPC_MULEQ_S_PW_QHL
:
16002 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16004 case OPC_MULEQ_S_PW_QHR
:
16006 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16008 case OPC_MULEU_S_QH_OBL
:
16010 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16012 case OPC_MULEU_S_QH_OBR
:
16014 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16016 case OPC_MULQ_RS_QH
:
16018 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16025 tcg_temp_free_i32(t0
);
16026 tcg_temp_free(v1_t
);
16027 tcg_temp_free(v2_t
);
16030 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16038 /* Treat as NOP. */
16042 t0
= tcg_temp_new();
16043 val_t
= tcg_temp_new();
16044 gen_load_gpr(val_t
, val
);
16047 case OPC_ABSQ_S_PH_DSP
:
16051 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16056 target_long result
;
16057 imm
= (ctx
->opcode
>> 16) & 0xFF;
16058 result
= (uint32_t)imm
<< 24 |
16059 (uint32_t)imm
<< 16 |
16060 (uint32_t)imm
<< 8 |
16062 result
= (int32_t)result
;
16063 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16068 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16069 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16070 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16071 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16072 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16073 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16078 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16079 imm
= (int16_t)(imm
<< 6) >> 6;
16080 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16081 (target_long
)((int32_t)imm
<< 16 | \
16087 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16088 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16089 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16090 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16094 #ifdef TARGET_MIPS64
16095 case OPC_ABSQ_S_QH_DSP
:
16102 imm
= (ctx
->opcode
>> 16) & 0xFF;
16103 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16104 temp
= (temp
<< 16) | temp
;
16105 temp
= (temp
<< 32) | temp
;
16106 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16114 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16115 imm
= (int16_t)(imm
<< 6) >> 6;
16116 temp
= ((target_long
)imm
<< 32) \
16117 | ((target_long
)imm
& 0xFFFFFFFF);
16118 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16126 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16127 imm
= (int16_t)(imm
<< 6) >> 6;
16129 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16130 ((uint64_t)(uint16_t)imm
<< 32) |
16131 ((uint64_t)(uint16_t)imm
<< 16) |
16132 (uint64_t)(uint16_t)imm
;
16133 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16138 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16139 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16140 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16141 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16142 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16143 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16144 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16148 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16149 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16150 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16154 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16155 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16156 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16157 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16158 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16165 tcg_temp_free(val_t
);
16168 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16169 uint32_t op1
, uint32_t op2
,
16170 int ret
, int v1
, int v2
, int check_ret
)
16176 if ((ret
== 0) && (check_ret
== 1)) {
16177 /* Treat as NOP. */
16181 t1
= tcg_temp_new();
16182 v1_t
= tcg_temp_new();
16183 v2_t
= tcg_temp_new();
16185 gen_load_gpr(v1_t
, v1
);
16186 gen_load_gpr(v2_t
, v2
);
16189 case OPC_CMPU_EQ_QB_DSP
:
16191 case OPC_CMPU_EQ_QB
:
16193 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16195 case OPC_CMPU_LT_QB
:
16197 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16199 case OPC_CMPU_LE_QB
:
16201 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16203 case OPC_CMPGU_EQ_QB
:
16205 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16207 case OPC_CMPGU_LT_QB
:
16209 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16211 case OPC_CMPGU_LE_QB
:
16213 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16215 case OPC_CMPGDU_EQ_QB
:
16217 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16218 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16219 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16220 tcg_gen_shli_tl(t1
, t1
, 24);
16221 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16223 case OPC_CMPGDU_LT_QB
:
16225 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16226 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16227 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16228 tcg_gen_shli_tl(t1
, t1
, 24);
16229 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16231 case OPC_CMPGDU_LE_QB
:
16233 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16234 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16235 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16236 tcg_gen_shli_tl(t1
, t1
, 24);
16237 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16239 case OPC_CMP_EQ_PH
:
16241 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16243 case OPC_CMP_LT_PH
:
16245 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16247 case OPC_CMP_LE_PH
:
16249 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16253 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16257 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16259 case OPC_PACKRL_PH
:
16261 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16265 #ifdef TARGET_MIPS64
16266 case OPC_CMPU_EQ_OB_DSP
:
16268 case OPC_CMP_EQ_PW
:
16270 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16272 case OPC_CMP_LT_PW
:
16274 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16276 case OPC_CMP_LE_PW
:
16278 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16280 case OPC_CMP_EQ_QH
:
16282 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16284 case OPC_CMP_LT_QH
:
16286 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16288 case OPC_CMP_LE_QH
:
16290 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16292 case OPC_CMPGDU_EQ_OB
:
16294 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16296 case OPC_CMPGDU_LT_OB
:
16298 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16300 case OPC_CMPGDU_LE_OB
:
16302 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16304 case OPC_CMPGU_EQ_OB
:
16306 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16308 case OPC_CMPGU_LT_OB
:
16310 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16312 case OPC_CMPGU_LE_OB
:
16314 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16316 case OPC_CMPU_EQ_OB
:
16318 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16320 case OPC_CMPU_LT_OB
:
16322 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16324 case OPC_CMPU_LE_OB
:
16326 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16328 case OPC_PACKRL_PW
:
16330 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16334 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16338 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16342 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16350 tcg_temp_free(v1_t
);
16351 tcg_temp_free(v2_t
);
16354 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16355 uint32_t op1
, int rt
, int rs
, int sa
)
16362 /* Treat as NOP. */
16366 t0
= tcg_temp_new();
16367 gen_load_gpr(t0
, rs
);
16370 case OPC_APPEND_DSP
:
16371 switch (MASK_APPEND(ctx
->opcode
)) {
16374 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16376 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16380 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16381 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16382 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16383 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16385 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16389 if (sa
!= 0 && sa
!= 2) {
16390 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16391 tcg_gen_ext32u_tl(t0
, t0
);
16392 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16393 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16395 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16397 default: /* Invalid */
16398 MIPS_INVAL("MASK APPEND");
16399 generate_exception_end(ctx
, EXCP_RI
);
16403 #ifdef TARGET_MIPS64
16404 case OPC_DAPPEND_DSP
:
16405 switch (MASK_DAPPEND(ctx
->opcode
)) {
16408 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16412 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16413 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16414 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16418 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16419 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16420 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16425 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16426 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16427 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16428 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16431 default: /* Invalid */
16432 MIPS_INVAL("MASK DAPPEND");
16433 generate_exception_end(ctx
, EXCP_RI
);
16442 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16443 int ret
, int v1
, int v2
, int check_ret
)
16452 if ((ret
== 0) && (check_ret
== 1)) {
16453 /* Treat as NOP. */
16457 t0
= tcg_temp_new();
16458 t1
= tcg_temp_new();
16459 v1_t
= tcg_temp_new();
16460 v2_t
= tcg_temp_new();
16462 gen_load_gpr(v1_t
, v1
);
16463 gen_load_gpr(v2_t
, v2
);
16466 case OPC_EXTR_W_DSP
:
16470 tcg_gen_movi_tl(t0
, v2
);
16471 tcg_gen_movi_tl(t1
, v1
);
16472 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16475 tcg_gen_movi_tl(t0
, v2
);
16476 tcg_gen_movi_tl(t1
, v1
);
16477 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16479 case OPC_EXTR_RS_W
:
16480 tcg_gen_movi_tl(t0
, v2
);
16481 tcg_gen_movi_tl(t1
, v1
);
16482 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16485 tcg_gen_movi_tl(t0
, v2
);
16486 tcg_gen_movi_tl(t1
, v1
);
16487 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16489 case OPC_EXTRV_S_H
:
16490 tcg_gen_movi_tl(t0
, v2
);
16491 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16494 tcg_gen_movi_tl(t0
, v2
);
16495 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16497 case OPC_EXTRV_R_W
:
16498 tcg_gen_movi_tl(t0
, v2
);
16499 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16501 case OPC_EXTRV_RS_W
:
16502 tcg_gen_movi_tl(t0
, v2
);
16503 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16506 tcg_gen_movi_tl(t0
, v2
);
16507 tcg_gen_movi_tl(t1
, v1
);
16508 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16511 tcg_gen_movi_tl(t0
, v2
);
16512 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16515 tcg_gen_movi_tl(t0
, v2
);
16516 tcg_gen_movi_tl(t1
, v1
);
16517 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16520 tcg_gen_movi_tl(t0
, v2
);
16521 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16524 imm
= (ctx
->opcode
>> 20) & 0x3F;
16525 tcg_gen_movi_tl(t0
, ret
);
16526 tcg_gen_movi_tl(t1
, imm
);
16527 gen_helper_shilo(t0
, t1
, cpu_env
);
16530 tcg_gen_movi_tl(t0
, ret
);
16531 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16534 tcg_gen_movi_tl(t0
, ret
);
16535 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16538 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16539 tcg_gen_movi_tl(t0
, imm
);
16540 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16543 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16544 tcg_gen_movi_tl(t0
, imm
);
16545 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16549 #ifdef TARGET_MIPS64
16550 case OPC_DEXTR_W_DSP
:
16554 tcg_gen_movi_tl(t0
, ret
);
16555 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16559 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16560 int ac
= (ctx
->opcode
>> 11) & 0x03;
16561 tcg_gen_movi_tl(t0
, shift
);
16562 tcg_gen_movi_tl(t1
, ac
);
16563 gen_helper_dshilo(t0
, t1
, cpu_env
);
16568 int ac
= (ctx
->opcode
>> 11) & 0x03;
16569 tcg_gen_movi_tl(t0
, ac
);
16570 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16574 tcg_gen_movi_tl(t0
, v2
);
16575 tcg_gen_movi_tl(t1
, v1
);
16577 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16580 tcg_gen_movi_tl(t0
, v2
);
16581 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16584 tcg_gen_movi_tl(t0
, v2
);
16585 tcg_gen_movi_tl(t1
, v1
);
16586 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16589 tcg_gen_movi_tl(t0
, v2
);
16590 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16593 tcg_gen_movi_tl(t0
, v2
);
16594 tcg_gen_movi_tl(t1
, v1
);
16595 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16597 case OPC_DEXTR_R_L
:
16598 tcg_gen_movi_tl(t0
, v2
);
16599 tcg_gen_movi_tl(t1
, v1
);
16600 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16602 case OPC_DEXTR_RS_L
:
16603 tcg_gen_movi_tl(t0
, v2
);
16604 tcg_gen_movi_tl(t1
, v1
);
16605 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16608 tcg_gen_movi_tl(t0
, v2
);
16609 tcg_gen_movi_tl(t1
, v1
);
16610 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16612 case OPC_DEXTR_R_W
:
16613 tcg_gen_movi_tl(t0
, v2
);
16614 tcg_gen_movi_tl(t1
, v1
);
16615 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16617 case OPC_DEXTR_RS_W
:
16618 tcg_gen_movi_tl(t0
, v2
);
16619 tcg_gen_movi_tl(t1
, v1
);
16620 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16622 case OPC_DEXTR_S_H
:
16623 tcg_gen_movi_tl(t0
, v2
);
16624 tcg_gen_movi_tl(t1
, v1
);
16625 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16627 case OPC_DEXTRV_S_H
:
16628 tcg_gen_movi_tl(t0
, v2
);
16629 tcg_gen_movi_tl(t1
, v1
);
16630 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16633 tcg_gen_movi_tl(t0
, v2
);
16634 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16636 case OPC_DEXTRV_R_L
:
16637 tcg_gen_movi_tl(t0
, v2
);
16638 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16640 case OPC_DEXTRV_RS_L
:
16641 tcg_gen_movi_tl(t0
, v2
);
16642 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16645 tcg_gen_movi_tl(t0
, v2
);
16646 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16648 case OPC_DEXTRV_R_W
:
16649 tcg_gen_movi_tl(t0
, v2
);
16650 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16652 case OPC_DEXTRV_RS_W
:
16653 tcg_gen_movi_tl(t0
, v2
);
16654 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16663 tcg_temp_free(v1_t
);
16664 tcg_temp_free(v2_t
);
16667 /* End MIPSDSP functions. */
16669 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16671 int rs
, rt
, rd
, sa
;
16674 rs
= (ctx
->opcode
>> 21) & 0x1f;
16675 rt
= (ctx
->opcode
>> 16) & 0x1f;
16676 rd
= (ctx
->opcode
>> 11) & 0x1f;
16677 sa
= (ctx
->opcode
>> 6) & 0x1f;
16679 op1
= MASK_SPECIAL(ctx
->opcode
);
16682 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16684 case OPC_MULT
... OPC_DIVU
:
16685 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16695 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16698 MIPS_INVAL("special_r6 muldiv");
16699 generate_exception_end(ctx
, EXCP_RI
);
16705 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16709 if (rt
== 0 && sa
== 1) {
16710 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16711 We need additionally to check other fields */
16712 gen_cl(ctx
, op1
, rd
, rs
);
16714 generate_exception_end(ctx
, EXCP_RI
);
16718 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16719 gen_helper_do_semihosting(cpu_env
);
16721 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16722 generate_exception_end(ctx
, EXCP_RI
);
16724 generate_exception_end(ctx
, EXCP_DBp
);
16728 #if defined(TARGET_MIPS64)
16730 check_mips_64(ctx
);
16731 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16735 if (rt
== 0 && sa
== 1) {
16736 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16737 We need additionally to check other fields */
16738 check_mips_64(ctx
);
16739 gen_cl(ctx
, op1
, rd
, rs
);
16741 generate_exception_end(ctx
, EXCP_RI
);
16744 case OPC_DMULT
... OPC_DDIVU
:
16745 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16755 check_mips_64(ctx
);
16756 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16759 MIPS_INVAL("special_r6 muldiv");
16760 generate_exception_end(ctx
, EXCP_RI
);
16765 default: /* Invalid */
16766 MIPS_INVAL("special_r6");
16767 generate_exception_end(ctx
, EXCP_RI
);
16772 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16774 int rs
, rt
, rd
, sa
;
16777 rs
= (ctx
->opcode
>> 21) & 0x1f;
16778 rt
= (ctx
->opcode
>> 16) & 0x1f;
16779 rd
= (ctx
->opcode
>> 11) & 0x1f;
16780 sa
= (ctx
->opcode
>> 6) & 0x1f;
16782 op1
= MASK_SPECIAL(ctx
->opcode
);
16784 case OPC_MOVN
: /* Conditional move */
16786 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16787 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16788 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16790 case OPC_MFHI
: /* Move from HI/LO */
16792 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16795 case OPC_MTLO
: /* Move to HI/LO */
16796 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16799 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16800 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16801 check_cp1_enabled(ctx
);
16802 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16803 (ctx
->opcode
>> 16) & 1);
16805 generate_exception_err(ctx
, EXCP_CpU
, 1);
16811 check_insn(ctx
, INSN_VR54XX
);
16812 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16813 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16815 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16820 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16822 #if defined(TARGET_MIPS64)
16823 case OPC_DMULT
... OPC_DDIVU
:
16824 check_insn(ctx
, ISA_MIPS3
);
16825 check_mips_64(ctx
);
16826 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16830 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16833 #ifdef MIPS_STRICT_STANDARD
16834 MIPS_INVAL("SPIM");
16835 generate_exception_end(ctx
, EXCP_RI
);
16837 /* Implemented as RI exception for now. */
16838 MIPS_INVAL("spim (unofficial)");
16839 generate_exception_end(ctx
, EXCP_RI
);
16842 default: /* Invalid */
16843 MIPS_INVAL("special_legacy");
16844 generate_exception_end(ctx
, EXCP_RI
);
16849 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16851 int rs
, rt
, rd
, sa
;
16854 rs
= (ctx
->opcode
>> 21) & 0x1f;
16855 rt
= (ctx
->opcode
>> 16) & 0x1f;
16856 rd
= (ctx
->opcode
>> 11) & 0x1f;
16857 sa
= (ctx
->opcode
>> 6) & 0x1f;
16859 op1
= MASK_SPECIAL(ctx
->opcode
);
16861 case OPC_SLL
: /* Shift with immediate */
16862 if (sa
== 5 && rd
== 0 &&
16863 rs
== 0 && rt
== 0) { /* PAUSE */
16864 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16865 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16866 generate_exception_end(ctx
, EXCP_RI
);
16872 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16875 switch ((ctx
->opcode
>> 21) & 0x1f) {
16877 /* rotr is decoded as srl on non-R2 CPUs */
16878 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16883 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16886 generate_exception_end(ctx
, EXCP_RI
);
16890 case OPC_ADD
... OPC_SUBU
:
16891 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16893 case OPC_SLLV
: /* Shifts */
16895 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16898 switch ((ctx
->opcode
>> 6) & 0x1f) {
16900 /* rotrv is decoded as srlv on non-R2 CPUs */
16901 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16906 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16909 generate_exception_end(ctx
, EXCP_RI
);
16913 case OPC_SLT
: /* Set on less than */
16915 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16917 case OPC_AND
: /* Logic*/
16921 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16924 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16926 case OPC_TGE
... OPC_TEQ
: /* Traps */
16928 check_insn(ctx
, ISA_MIPS2
);
16929 gen_trap(ctx
, op1
, rs
, rt
, -1);
16931 case OPC_LSA
: /* OPC_PMON */
16932 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16933 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16934 decode_opc_special_r6(env
, ctx
);
16936 /* Pmon entry point, also R4010 selsl */
16937 #ifdef MIPS_STRICT_STANDARD
16938 MIPS_INVAL("PMON / selsl");
16939 generate_exception_end(ctx
, EXCP_RI
);
16941 gen_helper_0e0i(pmon
, sa
);
16946 generate_exception_end(ctx
, EXCP_SYSCALL
);
16949 generate_exception_end(ctx
, EXCP_BREAK
);
16952 check_insn(ctx
, ISA_MIPS2
);
16953 /* Treat as NOP. */
16956 #if defined(TARGET_MIPS64)
16957 /* MIPS64 specific opcodes */
16962 check_insn(ctx
, ISA_MIPS3
);
16963 check_mips_64(ctx
);
16964 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16967 switch ((ctx
->opcode
>> 21) & 0x1f) {
16969 /* drotr is decoded as dsrl on non-R2 CPUs */
16970 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16975 check_insn(ctx
, ISA_MIPS3
);
16976 check_mips_64(ctx
);
16977 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16980 generate_exception_end(ctx
, EXCP_RI
);
16985 switch ((ctx
->opcode
>> 21) & 0x1f) {
16987 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16988 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16993 check_insn(ctx
, ISA_MIPS3
);
16994 check_mips_64(ctx
);
16995 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16998 generate_exception_end(ctx
, EXCP_RI
);
17002 case OPC_DADD
... OPC_DSUBU
:
17003 check_insn(ctx
, ISA_MIPS3
);
17004 check_mips_64(ctx
);
17005 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17009 check_insn(ctx
, ISA_MIPS3
);
17010 check_mips_64(ctx
);
17011 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17014 switch ((ctx
->opcode
>> 6) & 0x1f) {
17016 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17017 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17022 check_insn(ctx
, ISA_MIPS3
);
17023 check_mips_64(ctx
);
17024 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17027 generate_exception_end(ctx
, EXCP_RI
);
17032 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17033 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17034 decode_opc_special_r6(env
, ctx
);
17039 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17040 decode_opc_special_r6(env
, ctx
);
17042 decode_opc_special_legacy(env
, ctx
);
17047 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17052 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17054 rs
= (ctx
->opcode
>> 21) & 0x1f;
17055 rt
= (ctx
->opcode
>> 16) & 0x1f;
17056 rd
= (ctx
->opcode
>> 11) & 0x1f;
17058 op1
= MASK_SPECIAL2(ctx
->opcode
);
17060 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17061 case OPC_MSUB
... OPC_MSUBU
:
17062 check_insn(ctx
, ISA_MIPS32
);
17063 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17066 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17069 case OPC_DIVU_G_2F
:
17070 case OPC_MULT_G_2F
:
17071 case OPC_MULTU_G_2F
:
17073 case OPC_MODU_G_2F
:
17074 check_insn(ctx
, INSN_LOONGSON2F
);
17075 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17079 check_insn(ctx
, ISA_MIPS32
);
17080 gen_cl(ctx
, op1
, rd
, rs
);
17083 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17084 gen_helper_do_semihosting(cpu_env
);
17086 /* XXX: not clear which exception should be raised
17087 * when in debug mode...
17089 check_insn(ctx
, ISA_MIPS32
);
17090 generate_exception_end(ctx
, EXCP_DBp
);
17093 #if defined(TARGET_MIPS64)
17096 check_insn(ctx
, ISA_MIPS64
);
17097 check_mips_64(ctx
);
17098 gen_cl(ctx
, op1
, rd
, rs
);
17100 case OPC_DMULT_G_2F
:
17101 case OPC_DMULTU_G_2F
:
17102 case OPC_DDIV_G_2F
:
17103 case OPC_DDIVU_G_2F
:
17104 case OPC_DMOD_G_2F
:
17105 case OPC_DMODU_G_2F
:
17106 check_insn(ctx
, INSN_LOONGSON2F
);
17107 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17110 default: /* Invalid */
17111 MIPS_INVAL("special2_legacy");
17112 generate_exception_end(ctx
, EXCP_RI
);
17117 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17119 int rs
, rt
, rd
, sa
;
17123 rs
= (ctx
->opcode
>> 21) & 0x1f;
17124 rt
= (ctx
->opcode
>> 16) & 0x1f;
17125 rd
= (ctx
->opcode
>> 11) & 0x1f;
17126 sa
= (ctx
->opcode
>> 6) & 0x1f;
17127 imm
= (int16_t)ctx
->opcode
>> 7;
17129 op1
= MASK_SPECIAL3(ctx
->opcode
);
17133 /* hint codes 24-31 are reserved and signal RI */
17134 generate_exception_end(ctx
, EXCP_RI
);
17136 /* Treat as NOP. */
17139 /* Treat as NOP. */
17142 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17145 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17150 /* Treat as NOP. */
17153 op2
= MASK_BSHFL(ctx
->opcode
);
17155 case OPC_ALIGN
... OPC_ALIGN_END
:
17156 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17159 gen_bitswap(ctx
, op2
, rd
, rt
);
17164 #if defined(TARGET_MIPS64)
17166 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17169 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17172 check_mips_64(ctx
);
17175 /* Treat as NOP. */
17178 op2
= MASK_DBSHFL(ctx
->opcode
);
17180 case OPC_DALIGN
... OPC_DALIGN_END
:
17181 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17184 gen_bitswap(ctx
, op2
, rd
, rt
);
17191 default: /* Invalid */
17192 MIPS_INVAL("special3_r6");
17193 generate_exception_end(ctx
, EXCP_RI
);
17198 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17203 rs
= (ctx
->opcode
>> 21) & 0x1f;
17204 rt
= (ctx
->opcode
>> 16) & 0x1f;
17205 rd
= (ctx
->opcode
>> 11) & 0x1f;
17207 op1
= MASK_SPECIAL3(ctx
->opcode
);
17209 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17210 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17211 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17212 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17213 * the same mask and op1. */
17214 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17215 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17218 case OPC_ADDUH_R_QB
:
17220 case OPC_ADDQH_R_PH
:
17222 case OPC_ADDQH_R_W
:
17224 case OPC_SUBUH_R_QB
:
17226 case OPC_SUBQH_R_PH
:
17228 case OPC_SUBQH_R_W
:
17229 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17234 case OPC_MULQ_RS_W
:
17235 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17238 MIPS_INVAL("MASK ADDUH.QB");
17239 generate_exception_end(ctx
, EXCP_RI
);
17242 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17243 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17245 generate_exception_end(ctx
, EXCP_RI
);
17249 op2
= MASK_LX(ctx
->opcode
);
17251 #if defined(TARGET_MIPS64)
17257 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17259 default: /* Invalid */
17260 MIPS_INVAL("MASK LX");
17261 generate_exception_end(ctx
, EXCP_RI
);
17265 case OPC_ABSQ_S_PH_DSP
:
17266 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17268 case OPC_ABSQ_S_QB
:
17269 case OPC_ABSQ_S_PH
:
17271 case OPC_PRECEQ_W_PHL
:
17272 case OPC_PRECEQ_W_PHR
:
17273 case OPC_PRECEQU_PH_QBL
:
17274 case OPC_PRECEQU_PH_QBR
:
17275 case OPC_PRECEQU_PH_QBLA
:
17276 case OPC_PRECEQU_PH_QBRA
:
17277 case OPC_PRECEU_PH_QBL
:
17278 case OPC_PRECEU_PH_QBR
:
17279 case OPC_PRECEU_PH_QBLA
:
17280 case OPC_PRECEU_PH_QBRA
:
17281 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17288 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17291 MIPS_INVAL("MASK ABSQ_S.PH");
17292 generate_exception_end(ctx
, EXCP_RI
);
17296 case OPC_ADDU_QB_DSP
:
17297 op2
= MASK_ADDU_QB(ctx
->opcode
);
17300 case OPC_ADDQ_S_PH
:
17303 case OPC_ADDU_S_QB
:
17305 case OPC_ADDU_S_PH
:
17307 case OPC_SUBQ_S_PH
:
17310 case OPC_SUBU_S_QB
:
17312 case OPC_SUBU_S_PH
:
17316 case OPC_RADDU_W_QB
:
17317 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17319 case OPC_MULEU_S_PH_QBL
:
17320 case OPC_MULEU_S_PH_QBR
:
17321 case OPC_MULQ_RS_PH
:
17322 case OPC_MULEQ_S_W_PHL
:
17323 case OPC_MULEQ_S_W_PHR
:
17324 case OPC_MULQ_S_PH
:
17325 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17327 default: /* Invalid */
17328 MIPS_INVAL("MASK ADDU.QB");
17329 generate_exception_end(ctx
, EXCP_RI
);
17334 case OPC_CMPU_EQ_QB_DSP
:
17335 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17337 case OPC_PRECR_SRA_PH_W
:
17338 case OPC_PRECR_SRA_R_PH_W
:
17339 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17341 case OPC_PRECR_QB_PH
:
17342 case OPC_PRECRQ_QB_PH
:
17343 case OPC_PRECRQ_PH_W
:
17344 case OPC_PRECRQ_RS_PH_W
:
17345 case OPC_PRECRQU_S_QB_PH
:
17346 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17348 case OPC_CMPU_EQ_QB
:
17349 case OPC_CMPU_LT_QB
:
17350 case OPC_CMPU_LE_QB
:
17351 case OPC_CMP_EQ_PH
:
17352 case OPC_CMP_LT_PH
:
17353 case OPC_CMP_LE_PH
:
17354 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17356 case OPC_CMPGU_EQ_QB
:
17357 case OPC_CMPGU_LT_QB
:
17358 case OPC_CMPGU_LE_QB
:
17359 case OPC_CMPGDU_EQ_QB
:
17360 case OPC_CMPGDU_LT_QB
:
17361 case OPC_CMPGDU_LE_QB
:
17364 case OPC_PACKRL_PH
:
17365 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17367 default: /* Invalid */
17368 MIPS_INVAL("MASK CMPU.EQ.QB");
17369 generate_exception_end(ctx
, EXCP_RI
);
17373 case OPC_SHLL_QB_DSP
:
17374 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17376 case OPC_DPA_W_PH_DSP
:
17377 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17379 case OPC_DPAU_H_QBL
:
17380 case OPC_DPAU_H_QBR
:
17381 case OPC_DPSU_H_QBL
:
17382 case OPC_DPSU_H_QBR
:
17384 case OPC_DPAX_W_PH
:
17385 case OPC_DPAQ_S_W_PH
:
17386 case OPC_DPAQX_S_W_PH
:
17387 case OPC_DPAQX_SA_W_PH
:
17389 case OPC_DPSX_W_PH
:
17390 case OPC_DPSQ_S_W_PH
:
17391 case OPC_DPSQX_S_W_PH
:
17392 case OPC_DPSQX_SA_W_PH
:
17393 case OPC_MULSAQ_S_W_PH
:
17394 case OPC_DPAQ_SA_L_W
:
17395 case OPC_DPSQ_SA_L_W
:
17396 case OPC_MAQ_S_W_PHL
:
17397 case OPC_MAQ_S_W_PHR
:
17398 case OPC_MAQ_SA_W_PHL
:
17399 case OPC_MAQ_SA_W_PHR
:
17400 case OPC_MULSA_W_PH
:
17401 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17403 default: /* Invalid */
17404 MIPS_INVAL("MASK DPAW.PH");
17405 generate_exception_end(ctx
, EXCP_RI
);
17410 op2
= MASK_INSV(ctx
->opcode
);
17421 t0
= tcg_temp_new();
17422 t1
= tcg_temp_new();
17424 gen_load_gpr(t0
, rt
);
17425 gen_load_gpr(t1
, rs
);
17427 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17433 default: /* Invalid */
17434 MIPS_INVAL("MASK INSV");
17435 generate_exception_end(ctx
, EXCP_RI
);
17439 case OPC_APPEND_DSP
:
17440 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17442 case OPC_EXTR_W_DSP
:
17443 op2
= MASK_EXTR_W(ctx
->opcode
);
17447 case OPC_EXTR_RS_W
:
17449 case OPC_EXTRV_S_H
:
17451 case OPC_EXTRV_R_W
:
17452 case OPC_EXTRV_RS_W
:
17457 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17460 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17466 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17468 default: /* Invalid */
17469 MIPS_INVAL("MASK EXTR.W");
17470 generate_exception_end(ctx
, EXCP_RI
);
17474 #if defined(TARGET_MIPS64)
17475 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17476 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17477 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17478 check_insn(ctx
, INSN_LOONGSON2E
);
17479 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17481 case OPC_ABSQ_S_QH_DSP
:
17482 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17484 case OPC_PRECEQ_L_PWL
:
17485 case OPC_PRECEQ_L_PWR
:
17486 case OPC_PRECEQ_PW_QHL
:
17487 case OPC_PRECEQ_PW_QHR
:
17488 case OPC_PRECEQ_PW_QHLA
:
17489 case OPC_PRECEQ_PW_QHRA
:
17490 case OPC_PRECEQU_QH_OBL
:
17491 case OPC_PRECEQU_QH_OBR
:
17492 case OPC_PRECEQU_QH_OBLA
:
17493 case OPC_PRECEQU_QH_OBRA
:
17494 case OPC_PRECEU_QH_OBL
:
17495 case OPC_PRECEU_QH_OBR
:
17496 case OPC_PRECEU_QH_OBLA
:
17497 case OPC_PRECEU_QH_OBRA
:
17498 case OPC_ABSQ_S_OB
:
17499 case OPC_ABSQ_S_PW
:
17500 case OPC_ABSQ_S_QH
:
17501 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17509 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17511 default: /* Invalid */
17512 MIPS_INVAL("MASK ABSQ_S.QH");
17513 generate_exception_end(ctx
, EXCP_RI
);
17517 case OPC_ADDU_OB_DSP
:
17518 op2
= MASK_ADDU_OB(ctx
->opcode
);
17520 case OPC_RADDU_L_OB
:
17522 case OPC_SUBQ_S_PW
:
17524 case OPC_SUBQ_S_QH
:
17526 case OPC_SUBU_S_OB
:
17528 case OPC_SUBU_S_QH
:
17530 case OPC_SUBUH_R_OB
:
17532 case OPC_ADDQ_S_PW
:
17534 case OPC_ADDQ_S_QH
:
17536 case OPC_ADDU_S_OB
:
17538 case OPC_ADDU_S_QH
:
17540 case OPC_ADDUH_R_OB
:
17541 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17543 case OPC_MULEQ_S_PW_QHL
:
17544 case OPC_MULEQ_S_PW_QHR
:
17545 case OPC_MULEU_S_QH_OBL
:
17546 case OPC_MULEU_S_QH_OBR
:
17547 case OPC_MULQ_RS_QH
:
17548 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17550 default: /* Invalid */
17551 MIPS_INVAL("MASK ADDU.OB");
17552 generate_exception_end(ctx
, EXCP_RI
);
17556 case OPC_CMPU_EQ_OB_DSP
:
17557 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17559 case OPC_PRECR_SRA_QH_PW
:
17560 case OPC_PRECR_SRA_R_QH_PW
:
17561 /* Return value is rt. */
17562 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17564 case OPC_PRECR_OB_QH
:
17565 case OPC_PRECRQ_OB_QH
:
17566 case OPC_PRECRQ_PW_L
:
17567 case OPC_PRECRQ_QH_PW
:
17568 case OPC_PRECRQ_RS_QH_PW
:
17569 case OPC_PRECRQU_S_OB_QH
:
17570 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17572 case OPC_CMPU_EQ_OB
:
17573 case OPC_CMPU_LT_OB
:
17574 case OPC_CMPU_LE_OB
:
17575 case OPC_CMP_EQ_QH
:
17576 case OPC_CMP_LT_QH
:
17577 case OPC_CMP_LE_QH
:
17578 case OPC_CMP_EQ_PW
:
17579 case OPC_CMP_LT_PW
:
17580 case OPC_CMP_LE_PW
:
17581 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17583 case OPC_CMPGDU_EQ_OB
:
17584 case OPC_CMPGDU_LT_OB
:
17585 case OPC_CMPGDU_LE_OB
:
17586 case OPC_CMPGU_EQ_OB
:
17587 case OPC_CMPGU_LT_OB
:
17588 case OPC_CMPGU_LE_OB
:
17589 case OPC_PACKRL_PW
:
17593 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17595 default: /* Invalid */
17596 MIPS_INVAL("MASK CMPU_EQ.OB");
17597 generate_exception_end(ctx
, EXCP_RI
);
17601 case OPC_DAPPEND_DSP
:
17602 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17604 case OPC_DEXTR_W_DSP
:
17605 op2
= MASK_DEXTR_W(ctx
->opcode
);
17612 case OPC_DEXTR_R_L
:
17613 case OPC_DEXTR_RS_L
:
17615 case OPC_DEXTR_R_W
:
17616 case OPC_DEXTR_RS_W
:
17617 case OPC_DEXTR_S_H
:
17619 case OPC_DEXTRV_R_L
:
17620 case OPC_DEXTRV_RS_L
:
17621 case OPC_DEXTRV_S_H
:
17623 case OPC_DEXTRV_R_W
:
17624 case OPC_DEXTRV_RS_W
:
17625 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17630 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17632 default: /* Invalid */
17633 MIPS_INVAL("MASK EXTR.W");
17634 generate_exception_end(ctx
, EXCP_RI
);
17638 case OPC_DPAQ_W_QH_DSP
:
17639 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17641 case OPC_DPAU_H_OBL
:
17642 case OPC_DPAU_H_OBR
:
17643 case OPC_DPSU_H_OBL
:
17644 case OPC_DPSU_H_OBR
:
17646 case OPC_DPAQ_S_W_QH
:
17648 case OPC_DPSQ_S_W_QH
:
17649 case OPC_MULSAQ_S_W_QH
:
17650 case OPC_DPAQ_SA_L_PW
:
17651 case OPC_DPSQ_SA_L_PW
:
17652 case OPC_MULSAQ_S_L_PW
:
17653 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17655 case OPC_MAQ_S_W_QHLL
:
17656 case OPC_MAQ_S_W_QHLR
:
17657 case OPC_MAQ_S_W_QHRL
:
17658 case OPC_MAQ_S_W_QHRR
:
17659 case OPC_MAQ_SA_W_QHLL
:
17660 case OPC_MAQ_SA_W_QHLR
:
17661 case OPC_MAQ_SA_W_QHRL
:
17662 case OPC_MAQ_SA_W_QHRR
:
17663 case OPC_MAQ_S_L_PWL
:
17664 case OPC_MAQ_S_L_PWR
:
17669 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17671 default: /* Invalid */
17672 MIPS_INVAL("MASK DPAQ.W.QH");
17673 generate_exception_end(ctx
, EXCP_RI
);
17677 case OPC_DINSV_DSP
:
17678 op2
= MASK_INSV(ctx
->opcode
);
17689 t0
= tcg_temp_new();
17690 t1
= tcg_temp_new();
17692 gen_load_gpr(t0
, rt
);
17693 gen_load_gpr(t1
, rs
);
17695 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17701 default: /* Invalid */
17702 MIPS_INVAL("MASK DINSV");
17703 generate_exception_end(ctx
, EXCP_RI
);
17707 case OPC_SHLL_OB_DSP
:
17708 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17711 default: /* Invalid */
17712 MIPS_INVAL("special3_legacy");
17713 generate_exception_end(ctx
, EXCP_RI
);
17718 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17720 int rs
, rt
, rd
, sa
;
17723 rs
= (ctx
->opcode
>> 21) & 0x1f;
17724 rt
= (ctx
->opcode
>> 16) & 0x1f;
17725 rd
= (ctx
->opcode
>> 11) & 0x1f;
17726 sa
= (ctx
->opcode
>> 6) & 0x1f;
17728 op1
= MASK_SPECIAL3(ctx
->opcode
);
17732 check_insn(ctx
, ISA_MIPS32R2
);
17733 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17736 op2
= MASK_BSHFL(ctx
->opcode
);
17738 case OPC_ALIGN
... OPC_ALIGN_END
:
17740 check_insn(ctx
, ISA_MIPS32R6
);
17741 decode_opc_special3_r6(env
, ctx
);
17744 check_insn(ctx
, ISA_MIPS32R2
);
17745 gen_bshfl(ctx
, op2
, rt
, rd
);
17749 #if defined(TARGET_MIPS64)
17750 case OPC_DEXTM
... OPC_DEXT
:
17751 case OPC_DINSM
... OPC_DINS
:
17752 check_insn(ctx
, ISA_MIPS64R2
);
17753 check_mips_64(ctx
);
17754 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17757 op2
= MASK_DBSHFL(ctx
->opcode
);
17759 case OPC_DALIGN
... OPC_DALIGN_END
:
17761 check_insn(ctx
, ISA_MIPS32R6
);
17762 decode_opc_special3_r6(env
, ctx
);
17765 check_insn(ctx
, ISA_MIPS64R2
);
17766 check_mips_64(ctx
);
17767 op2
= MASK_DBSHFL(ctx
->opcode
);
17768 gen_bshfl(ctx
, op2
, rt
, rd
);
17774 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17777 check_insn(ctx
, ASE_MT
);
17779 TCGv t0
= tcg_temp_new();
17780 TCGv t1
= tcg_temp_new();
17782 gen_load_gpr(t0
, rt
);
17783 gen_load_gpr(t1
, rs
);
17784 gen_helper_fork(t0
, t1
);
17790 check_insn(ctx
, ASE_MT
);
17792 TCGv t0
= tcg_temp_new();
17794 gen_load_gpr(t0
, rs
);
17795 gen_helper_yield(t0
, cpu_env
, t0
);
17796 gen_store_gpr(t0
, rd
);
17801 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17802 decode_opc_special3_r6(env
, ctx
);
17804 decode_opc_special3_legacy(env
, ctx
);
17809 /* MIPS SIMD Architecture (MSA) */
17810 static inline int check_msa_access(DisasContext
*ctx
)
17812 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17813 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17814 generate_exception_end(ctx
, EXCP_RI
);
17818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17819 if (ctx
->insn_flags
& ASE_MSA
) {
17820 generate_exception_end(ctx
, EXCP_MSADIS
);
17823 generate_exception_end(ctx
, EXCP_RI
);
17830 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17832 /* generates tcg ops to check if any element is 0 */
17833 /* Note this function only works with MSA_WRLEN = 128 */
17834 uint64_t eval_zero_or_big
= 0;
17835 uint64_t eval_big
= 0;
17836 TCGv_i64 t0
= tcg_temp_new_i64();
17837 TCGv_i64 t1
= tcg_temp_new_i64();
17840 eval_zero_or_big
= 0x0101010101010101ULL
;
17841 eval_big
= 0x8080808080808080ULL
;
17844 eval_zero_or_big
= 0x0001000100010001ULL
;
17845 eval_big
= 0x8000800080008000ULL
;
17848 eval_zero_or_big
= 0x0000000100000001ULL
;
17849 eval_big
= 0x8000000080000000ULL
;
17852 eval_zero_or_big
= 0x0000000000000001ULL
;
17853 eval_big
= 0x8000000000000000ULL
;
17856 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17857 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17858 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17859 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17860 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17861 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17862 tcg_gen_or_i64(t0
, t0
, t1
);
17863 /* if all bits are zero then all elements are not zero */
17864 /* if some bit is non-zero then some element is zero */
17865 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17866 tcg_gen_trunc_i64_tl(tresult
, t0
);
17867 tcg_temp_free_i64(t0
);
17868 tcg_temp_free_i64(t1
);
17871 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17873 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17874 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17875 int64_t s16
= (int16_t)ctx
->opcode
;
17877 check_msa_access(ctx
);
17879 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17880 generate_exception_end(ctx
, EXCP_RI
);
17887 TCGv_i64 t0
= tcg_temp_new_i64();
17888 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17889 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17890 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17891 tcg_gen_trunc_i64_tl(bcond
, t0
);
17892 tcg_temp_free_i64(t0
);
17899 gen_check_zero_element(bcond
, df
, wt
);
17905 gen_check_zero_element(bcond
, df
, wt
);
17906 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17910 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17912 ctx
->hflags
|= MIPS_HFLAG_BC
;
17913 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17916 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17918 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17919 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17920 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17921 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17923 TCGv_i32 twd
= tcg_const_i32(wd
);
17924 TCGv_i32 tws
= tcg_const_i32(ws
);
17925 TCGv_i32 ti8
= tcg_const_i32(i8
);
17927 switch (MASK_MSA_I8(ctx
->opcode
)) {
17929 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17932 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17935 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17938 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17941 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17944 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17947 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17953 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17954 if (df
== DF_DOUBLE
) {
17955 generate_exception_end(ctx
, EXCP_RI
);
17957 TCGv_i32 tdf
= tcg_const_i32(df
);
17958 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17959 tcg_temp_free_i32(tdf
);
17964 MIPS_INVAL("MSA instruction");
17965 generate_exception_end(ctx
, EXCP_RI
);
17969 tcg_temp_free_i32(twd
);
17970 tcg_temp_free_i32(tws
);
17971 tcg_temp_free_i32(ti8
);
17974 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17976 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17977 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17978 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17979 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17980 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17981 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17983 TCGv_i32 tdf
= tcg_const_i32(df
);
17984 TCGv_i32 twd
= tcg_const_i32(wd
);
17985 TCGv_i32 tws
= tcg_const_i32(ws
);
17986 TCGv_i32 timm
= tcg_temp_new_i32();
17987 tcg_gen_movi_i32(timm
, u5
);
17989 switch (MASK_MSA_I5(ctx
->opcode
)) {
17991 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17994 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17996 case OPC_MAXI_S_df
:
17997 tcg_gen_movi_i32(timm
, s5
);
17998 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18000 case OPC_MAXI_U_df
:
18001 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18003 case OPC_MINI_S_df
:
18004 tcg_gen_movi_i32(timm
, s5
);
18005 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18007 case OPC_MINI_U_df
:
18008 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18011 tcg_gen_movi_i32(timm
, s5
);
18012 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18014 case OPC_CLTI_S_df
:
18015 tcg_gen_movi_i32(timm
, s5
);
18016 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18018 case OPC_CLTI_U_df
:
18019 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18021 case OPC_CLEI_S_df
:
18022 tcg_gen_movi_i32(timm
, s5
);
18023 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18025 case OPC_CLEI_U_df
:
18026 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18030 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18031 tcg_gen_movi_i32(timm
, s10
);
18032 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18036 MIPS_INVAL("MSA instruction");
18037 generate_exception_end(ctx
, EXCP_RI
);
18041 tcg_temp_free_i32(tdf
);
18042 tcg_temp_free_i32(twd
);
18043 tcg_temp_free_i32(tws
);
18044 tcg_temp_free_i32(timm
);
18047 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18049 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18050 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18051 uint32_t df
= 0, m
= 0;
18052 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18053 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18060 if ((dfm
& 0x40) == 0x00) {
18063 } else if ((dfm
& 0x60) == 0x40) {
18066 } else if ((dfm
& 0x70) == 0x60) {
18069 } else if ((dfm
& 0x78) == 0x70) {
18073 generate_exception_end(ctx
, EXCP_RI
);
18077 tdf
= tcg_const_i32(df
);
18078 tm
= tcg_const_i32(m
);
18079 twd
= tcg_const_i32(wd
);
18080 tws
= tcg_const_i32(ws
);
18082 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18084 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18087 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18090 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18093 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18096 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18099 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18101 case OPC_BINSLI_df
:
18102 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18104 case OPC_BINSRI_df
:
18105 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18108 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18111 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18114 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18117 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18120 MIPS_INVAL("MSA instruction");
18121 generate_exception_end(ctx
, EXCP_RI
);
18125 tcg_temp_free_i32(tdf
);
18126 tcg_temp_free_i32(tm
);
18127 tcg_temp_free_i32(twd
);
18128 tcg_temp_free_i32(tws
);
18131 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18133 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18134 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18135 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18136 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18137 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18139 TCGv_i32 tdf
= tcg_const_i32(df
);
18140 TCGv_i32 twd
= tcg_const_i32(wd
);
18141 TCGv_i32 tws
= tcg_const_i32(ws
);
18142 TCGv_i32 twt
= tcg_const_i32(wt
);
18144 switch (MASK_MSA_3R(ctx
->opcode
)) {
18146 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18149 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18152 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18155 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18157 case OPC_SUBS_S_df
:
18158 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18161 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18164 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18167 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18170 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18173 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18175 case OPC_ADDS_A_df
:
18176 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18178 case OPC_SUBS_U_df
:
18179 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18182 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18185 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18188 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18191 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18194 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18197 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18199 case OPC_ADDS_S_df
:
18200 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18202 case OPC_SUBSUS_U_df
:
18203 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18206 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18209 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18212 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18215 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18218 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18221 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18223 case OPC_ADDS_U_df
:
18224 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 case OPC_SUBSUU_S_df
:
18227 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18230 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18233 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18236 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18239 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18242 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18244 case OPC_ASUB_S_df
:
18245 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18248 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18251 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18254 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18257 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18260 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18263 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18265 case OPC_ASUB_U_df
:
18266 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18269 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18272 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18275 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18278 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18280 case OPC_AVER_S_df
:
18281 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18284 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18287 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18290 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18293 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18295 case OPC_AVER_U_df
:
18296 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18299 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18302 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18305 case OPC_DOTP_S_df
:
18306 case OPC_DOTP_U_df
:
18307 case OPC_DPADD_S_df
:
18308 case OPC_DPADD_U_df
:
18309 case OPC_DPSUB_S_df
:
18310 case OPC_HADD_S_df
:
18311 case OPC_DPSUB_U_df
:
18312 case OPC_HADD_U_df
:
18313 case OPC_HSUB_S_df
:
18314 case OPC_HSUB_U_df
:
18315 if (df
== DF_BYTE
) {
18316 generate_exception_end(ctx
, EXCP_RI
);
18319 switch (MASK_MSA_3R(ctx
->opcode
)) {
18320 case OPC_DOTP_S_df
:
18321 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18323 case OPC_DOTP_U_df
:
18324 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18326 case OPC_DPADD_S_df
:
18327 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18329 case OPC_DPADD_U_df
:
18330 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18332 case OPC_DPSUB_S_df
:
18333 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18335 case OPC_HADD_S_df
:
18336 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18338 case OPC_DPSUB_U_df
:
18339 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18341 case OPC_HADD_U_df
:
18342 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18344 case OPC_HSUB_S_df
:
18345 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18347 case OPC_HSUB_U_df
:
18348 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18353 MIPS_INVAL("MSA instruction");
18354 generate_exception_end(ctx
, EXCP_RI
);
18357 tcg_temp_free_i32(twd
);
18358 tcg_temp_free_i32(tws
);
18359 tcg_temp_free_i32(twt
);
18360 tcg_temp_free_i32(tdf
);
18363 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18365 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18366 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18367 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18368 TCGv telm
= tcg_temp_new();
18369 TCGv_i32 tsr
= tcg_const_i32(source
);
18370 TCGv_i32 tdt
= tcg_const_i32(dest
);
18372 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18374 gen_load_gpr(telm
, source
);
18375 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18378 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18379 gen_store_gpr(telm
, dest
);
18382 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18385 MIPS_INVAL("MSA instruction");
18386 generate_exception_end(ctx
, EXCP_RI
);
18390 tcg_temp_free(telm
);
18391 tcg_temp_free_i32(tdt
);
18392 tcg_temp_free_i32(tsr
);
18395 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18398 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18399 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18400 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18402 TCGv_i32 tws
= tcg_const_i32(ws
);
18403 TCGv_i32 twd
= tcg_const_i32(wd
);
18404 TCGv_i32 tn
= tcg_const_i32(n
);
18405 TCGv_i32 tdf
= tcg_const_i32(df
);
18407 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18409 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18411 case OPC_SPLATI_df
:
18412 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18415 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18417 case OPC_COPY_S_df
:
18418 case OPC_COPY_U_df
:
18419 case OPC_INSERT_df
:
18420 #if !defined(TARGET_MIPS64)
18421 /* Double format valid only for MIPS64 */
18422 if (df
== DF_DOUBLE
) {
18423 generate_exception_end(ctx
, EXCP_RI
);
18427 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18428 case OPC_COPY_S_df
:
18429 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18431 case OPC_COPY_U_df
:
18432 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18434 case OPC_INSERT_df
:
18435 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18440 MIPS_INVAL("MSA instruction");
18441 generate_exception_end(ctx
, EXCP_RI
);
18443 tcg_temp_free_i32(twd
);
18444 tcg_temp_free_i32(tws
);
18445 tcg_temp_free_i32(tn
);
18446 tcg_temp_free_i32(tdf
);
18449 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18451 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18452 uint32_t df
= 0, n
= 0;
18454 if ((dfn
& 0x30) == 0x00) {
18457 } else if ((dfn
& 0x38) == 0x20) {
18460 } else if ((dfn
& 0x3c) == 0x30) {
18463 } else if ((dfn
& 0x3e) == 0x38) {
18466 } else if (dfn
== 0x3E) {
18467 /* CTCMSA, CFCMSA, MOVE.V */
18468 gen_msa_elm_3e(env
, ctx
);
18471 generate_exception_end(ctx
, EXCP_RI
);
18475 gen_msa_elm_df(env
, ctx
, df
, n
);
18478 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18480 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18481 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18482 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18483 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18484 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18486 TCGv_i32 twd
= tcg_const_i32(wd
);
18487 TCGv_i32 tws
= tcg_const_i32(ws
);
18488 TCGv_i32 twt
= tcg_const_i32(wt
);
18489 TCGv_i32 tdf
= tcg_temp_new_i32();
18491 /* adjust df value for floating-point instruction */
18492 tcg_gen_movi_i32(tdf
, df
+ 2);
18494 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18496 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18499 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18502 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18505 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18508 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18511 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18514 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18517 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18520 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18523 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18529 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18535 tcg_gen_movi_i32(tdf
, df
+ 1);
18536 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18539 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18542 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18544 case OPC_MADD_Q_df
:
18545 tcg_gen_movi_i32(tdf
, df
+ 1);
18546 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18551 case OPC_MSUB_Q_df
:
18552 tcg_gen_movi_i32(tdf
, df
+ 1);
18553 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18556 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18559 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18562 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18565 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18568 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18571 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18574 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18577 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18580 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18583 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18586 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18589 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18592 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18594 case OPC_MULR_Q_df
:
18595 tcg_gen_movi_i32(tdf
, df
+ 1);
18596 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18599 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18601 case OPC_FMIN_A_df
:
18602 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18604 case OPC_MADDR_Q_df
:
18605 tcg_gen_movi_i32(tdf
, df
+ 1);
18606 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18612 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18614 case OPC_MSUBR_Q_df
:
18615 tcg_gen_movi_i32(tdf
, df
+ 1);
18616 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18619 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18621 case OPC_FMAX_A_df
:
18622 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18625 MIPS_INVAL("MSA instruction");
18626 generate_exception_end(ctx
, EXCP_RI
);
18630 tcg_temp_free_i32(twd
);
18631 tcg_temp_free_i32(tws
);
18632 tcg_temp_free_i32(twt
);
18633 tcg_temp_free_i32(tdf
);
18636 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18638 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18639 (op & (0x7 << 18)))
18640 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18641 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18642 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18643 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18644 TCGv_i32 twd
= tcg_const_i32(wd
);
18645 TCGv_i32 tws
= tcg_const_i32(ws
);
18646 TCGv_i32 twt
= tcg_const_i32(wt
);
18647 TCGv_i32 tdf
= tcg_const_i32(df
);
18649 switch (MASK_MSA_2R(ctx
->opcode
)) {
18651 #if !defined(TARGET_MIPS64)
18652 /* Double format valid only for MIPS64 */
18653 if (df
== DF_DOUBLE
) {
18654 generate_exception_end(ctx
, EXCP_RI
);
18658 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18661 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18664 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18667 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18670 MIPS_INVAL("MSA instruction");
18671 generate_exception_end(ctx
, EXCP_RI
);
18675 tcg_temp_free_i32(twd
);
18676 tcg_temp_free_i32(tws
);
18677 tcg_temp_free_i32(twt
);
18678 tcg_temp_free_i32(tdf
);
18681 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18683 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18684 (op & (0xf << 17)))
18685 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18686 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18687 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18688 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18689 TCGv_i32 twd
= tcg_const_i32(wd
);
18690 TCGv_i32 tws
= tcg_const_i32(ws
);
18691 TCGv_i32 twt
= tcg_const_i32(wt
);
18692 /* adjust df value for floating-point instruction */
18693 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18695 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18696 case OPC_FCLASS_df
:
18697 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18699 case OPC_FTRUNC_S_df
:
18700 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18702 case OPC_FTRUNC_U_df
:
18703 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18706 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18708 case OPC_FRSQRT_df
:
18709 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18712 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18715 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18718 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18720 case OPC_FEXUPL_df
:
18721 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18723 case OPC_FEXUPR_df
:
18724 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18727 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18730 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18732 case OPC_FTINT_S_df
:
18733 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18735 case OPC_FTINT_U_df
:
18736 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18738 case OPC_FFINT_S_df
:
18739 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18741 case OPC_FFINT_U_df
:
18742 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18746 tcg_temp_free_i32(twd
);
18747 tcg_temp_free_i32(tws
);
18748 tcg_temp_free_i32(twt
);
18749 tcg_temp_free_i32(tdf
);
18752 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18754 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18755 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18756 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18757 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18758 TCGv_i32 twd
= tcg_const_i32(wd
);
18759 TCGv_i32 tws
= tcg_const_i32(ws
);
18760 TCGv_i32 twt
= tcg_const_i32(wt
);
18762 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18764 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18767 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18770 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18773 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18776 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18779 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18782 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18785 MIPS_INVAL("MSA instruction");
18786 generate_exception_end(ctx
, EXCP_RI
);
18790 tcg_temp_free_i32(twd
);
18791 tcg_temp_free_i32(tws
);
18792 tcg_temp_free_i32(twt
);
18795 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18797 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18805 gen_msa_vec_v(env
, ctx
);
18808 gen_msa_2r(env
, ctx
);
18811 gen_msa_2rf(env
, ctx
);
18814 MIPS_INVAL("MSA instruction");
18815 generate_exception_end(ctx
, EXCP_RI
);
18820 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18822 uint32_t opcode
= ctx
->opcode
;
18823 check_insn(ctx
, ASE_MSA
);
18824 check_msa_access(ctx
);
18826 switch (MASK_MSA_MINOR(opcode
)) {
18827 case OPC_MSA_I8_00
:
18828 case OPC_MSA_I8_01
:
18829 case OPC_MSA_I8_02
:
18830 gen_msa_i8(env
, ctx
);
18832 case OPC_MSA_I5_06
:
18833 case OPC_MSA_I5_07
:
18834 gen_msa_i5(env
, ctx
);
18836 case OPC_MSA_BIT_09
:
18837 case OPC_MSA_BIT_0A
:
18838 gen_msa_bit(env
, ctx
);
18840 case OPC_MSA_3R_0D
:
18841 case OPC_MSA_3R_0E
:
18842 case OPC_MSA_3R_0F
:
18843 case OPC_MSA_3R_10
:
18844 case OPC_MSA_3R_11
:
18845 case OPC_MSA_3R_12
:
18846 case OPC_MSA_3R_13
:
18847 case OPC_MSA_3R_14
:
18848 case OPC_MSA_3R_15
:
18849 gen_msa_3r(env
, ctx
);
18852 gen_msa_elm(env
, ctx
);
18854 case OPC_MSA_3RF_1A
:
18855 case OPC_MSA_3RF_1B
:
18856 case OPC_MSA_3RF_1C
:
18857 gen_msa_3rf(env
, ctx
);
18860 gen_msa_vec(env
, ctx
);
18871 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18872 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18873 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18874 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18876 TCGv_i32 twd
= tcg_const_i32(wd
);
18877 TCGv taddr
= tcg_temp_new();
18878 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18880 switch (MASK_MSA_MINOR(opcode
)) {
18882 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18885 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18888 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18891 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18894 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18897 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18900 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18903 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18907 tcg_temp_free_i32(twd
);
18908 tcg_temp_free(taddr
);
18912 MIPS_INVAL("MSA instruction");
18913 generate_exception_end(ctx
, EXCP_RI
);
18919 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18922 int rs
, rt
, rd
, sa
;
18926 /* make sure instructions are on a word boundary */
18927 if (ctx
->pc
& 0x3) {
18928 env
->CP0_BadVAddr
= ctx
->pc
;
18929 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18933 /* Handle blikely not taken case */
18934 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18935 TCGLabel
*l1
= gen_new_label();
18937 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18938 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18939 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18943 op
= MASK_OP_MAJOR(ctx
->opcode
);
18944 rs
= (ctx
->opcode
>> 21) & 0x1f;
18945 rt
= (ctx
->opcode
>> 16) & 0x1f;
18946 rd
= (ctx
->opcode
>> 11) & 0x1f;
18947 sa
= (ctx
->opcode
>> 6) & 0x1f;
18948 imm
= (int16_t)ctx
->opcode
;
18951 decode_opc_special(env
, ctx
);
18954 decode_opc_special2_legacy(env
, ctx
);
18957 decode_opc_special3(env
, ctx
);
18960 op1
= MASK_REGIMM(ctx
->opcode
);
18962 case OPC_BLTZL
: /* REGIMM branches */
18966 check_insn(ctx
, ISA_MIPS2
);
18967 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18971 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18975 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18977 /* OPC_NAL, OPC_BAL */
18978 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18980 generate_exception_end(ctx
, EXCP_RI
);
18983 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18986 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18988 check_insn(ctx
, ISA_MIPS2
);
18989 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18990 gen_trap(ctx
, op1
, rs
, -1, imm
);
18993 check_insn(ctx
, ISA_MIPS32R6
);
18994 generate_exception_end(ctx
, EXCP_RI
);
18997 check_insn(ctx
, ISA_MIPS32R2
);
18998 /* Break the TB to be able to sync copied instructions
19000 ctx
->bstate
= BS_STOP
;
19002 case OPC_BPOSGE32
: /* MIPS DSP branch */
19003 #if defined(TARGET_MIPS64)
19007 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19009 #if defined(TARGET_MIPS64)
19011 check_insn(ctx
, ISA_MIPS32R6
);
19012 check_mips_64(ctx
);
19014 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19018 check_insn(ctx
, ISA_MIPS32R6
);
19019 check_mips_64(ctx
);
19021 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19025 default: /* Invalid */
19026 MIPS_INVAL("regimm");
19027 generate_exception_end(ctx
, EXCP_RI
);
19032 check_cp0_enabled(ctx
);
19033 op1
= MASK_CP0(ctx
->opcode
);
19041 #if defined(TARGET_MIPS64)
19045 #ifndef CONFIG_USER_ONLY
19046 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19047 #endif /* !CONFIG_USER_ONLY */
19049 case OPC_C0_FIRST
... OPC_C0_LAST
:
19050 #ifndef CONFIG_USER_ONLY
19051 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19052 #endif /* !CONFIG_USER_ONLY */
19055 #ifndef CONFIG_USER_ONLY
19058 TCGv t0
= tcg_temp_new();
19060 op2
= MASK_MFMC0(ctx
->opcode
);
19063 check_insn(ctx
, ASE_MT
);
19064 gen_helper_dmt(t0
);
19065 gen_store_gpr(t0
, rt
);
19068 check_insn(ctx
, ASE_MT
);
19069 gen_helper_emt(t0
);
19070 gen_store_gpr(t0
, rt
);
19073 check_insn(ctx
, ASE_MT
);
19074 gen_helper_dvpe(t0
, cpu_env
);
19075 gen_store_gpr(t0
, rt
);
19078 check_insn(ctx
, ASE_MT
);
19079 gen_helper_evpe(t0
, cpu_env
);
19080 gen_store_gpr(t0
, rt
);
19083 check_insn(ctx
, ISA_MIPS32R2
);
19084 save_cpu_state(ctx
, 1);
19085 gen_helper_di(t0
, cpu_env
);
19086 gen_store_gpr(t0
, rt
);
19087 /* Stop translation as we may have switched
19088 the execution mode. */
19089 ctx
->bstate
= BS_STOP
;
19092 check_insn(ctx
, ISA_MIPS32R2
);
19093 save_cpu_state(ctx
, 1);
19094 gen_helper_ei(t0
, cpu_env
);
19095 gen_store_gpr(t0
, rt
);
19096 /* Stop translation as we may have switched
19097 the execution mode. */
19098 ctx
->bstate
= BS_STOP
;
19100 default: /* Invalid */
19101 MIPS_INVAL("mfmc0");
19102 generate_exception_end(ctx
, EXCP_RI
);
19107 #endif /* !CONFIG_USER_ONLY */
19110 check_insn(ctx
, ISA_MIPS32R2
);
19111 gen_load_srsgpr(rt
, rd
);
19114 check_insn(ctx
, ISA_MIPS32R2
);
19115 gen_store_srsgpr(rt
, rd
);
19119 generate_exception_end(ctx
, EXCP_RI
);
19123 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19124 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19125 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19126 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19129 /* Arithmetic with immediate opcode */
19130 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19134 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19136 case OPC_SLTI
: /* Set on less than with immediate opcode */
19138 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19140 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19141 case OPC_LUI
: /* OPC_AUI */
19144 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19146 case OPC_J
... OPC_JAL
: /* Jump */
19147 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19148 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19151 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19152 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19154 generate_exception_end(ctx
, EXCP_RI
);
19157 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19158 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19161 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19164 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19165 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19167 generate_exception_end(ctx
, EXCP_RI
);
19170 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19171 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19174 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19177 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19180 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19182 check_insn(ctx
, ISA_MIPS32R6
);
19183 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19184 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19187 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19190 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19192 check_insn(ctx
, ISA_MIPS32R6
);
19193 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19194 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19199 check_insn(ctx
, ISA_MIPS2
);
19200 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19204 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19206 case OPC_LL
: /* Load and stores */
19207 check_insn(ctx
, ISA_MIPS2
);
19211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19213 case OPC_LB
... OPC_LH
:
19214 case OPC_LW
... OPC_LHU
:
19215 gen_ld(ctx
, op
, rt
, rs
, imm
);
19219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19221 case OPC_SB
... OPC_SH
:
19223 gen_st(ctx
, op
, rt
, rs
, imm
);
19226 check_insn(ctx
, ISA_MIPS2
);
19227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19228 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19232 check_cp0_enabled(ctx
);
19233 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19234 /* Treat as NOP. */
19237 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19238 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19239 /* Treat as NOP. */
19242 /* Floating point (COP1). */
19247 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19251 op1
= MASK_CP1(ctx
->opcode
);
19256 check_cp1_enabled(ctx
);
19257 check_insn(ctx
, ISA_MIPS32R2
);
19262 check_cp1_enabled(ctx
);
19263 gen_cp1(ctx
, op1
, rt
, rd
);
19265 #if defined(TARGET_MIPS64)
19268 check_cp1_enabled(ctx
);
19269 check_insn(ctx
, ISA_MIPS3
);
19270 check_mips_64(ctx
);
19271 gen_cp1(ctx
, op1
, rt
, rd
);
19274 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19275 check_cp1_enabled(ctx
);
19276 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19278 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19283 check_insn(ctx
, ASE_MIPS3D
);
19284 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19285 (rt
>> 2) & 0x7, imm
<< 2);
19289 check_cp1_enabled(ctx
);
19290 check_insn(ctx
, ISA_MIPS32R6
);
19291 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19295 check_cp1_enabled(ctx
);
19296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19298 check_insn(ctx
, ASE_MIPS3D
);
19301 check_cp1_enabled(ctx
);
19302 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19303 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19304 (rt
>> 2) & 0x7, imm
<< 2);
19311 check_cp1_enabled(ctx
);
19312 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19318 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19319 check_cp1_enabled(ctx
);
19320 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19322 case R6_OPC_CMP_AF_S
:
19323 case R6_OPC_CMP_UN_S
:
19324 case R6_OPC_CMP_EQ_S
:
19325 case R6_OPC_CMP_UEQ_S
:
19326 case R6_OPC_CMP_LT_S
:
19327 case R6_OPC_CMP_ULT_S
:
19328 case R6_OPC_CMP_LE_S
:
19329 case R6_OPC_CMP_ULE_S
:
19330 case R6_OPC_CMP_SAF_S
:
19331 case R6_OPC_CMP_SUN_S
:
19332 case R6_OPC_CMP_SEQ_S
:
19333 case R6_OPC_CMP_SEUQ_S
:
19334 case R6_OPC_CMP_SLT_S
:
19335 case R6_OPC_CMP_SULT_S
:
19336 case R6_OPC_CMP_SLE_S
:
19337 case R6_OPC_CMP_SULE_S
:
19338 case R6_OPC_CMP_OR_S
:
19339 case R6_OPC_CMP_UNE_S
:
19340 case R6_OPC_CMP_NE_S
:
19341 case R6_OPC_CMP_SOR_S
:
19342 case R6_OPC_CMP_SUNE_S
:
19343 case R6_OPC_CMP_SNE_S
:
19344 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19346 case R6_OPC_CMP_AF_D
:
19347 case R6_OPC_CMP_UN_D
:
19348 case R6_OPC_CMP_EQ_D
:
19349 case R6_OPC_CMP_UEQ_D
:
19350 case R6_OPC_CMP_LT_D
:
19351 case R6_OPC_CMP_ULT_D
:
19352 case R6_OPC_CMP_LE_D
:
19353 case R6_OPC_CMP_ULE_D
:
19354 case R6_OPC_CMP_SAF_D
:
19355 case R6_OPC_CMP_SUN_D
:
19356 case R6_OPC_CMP_SEQ_D
:
19357 case R6_OPC_CMP_SEUQ_D
:
19358 case R6_OPC_CMP_SLT_D
:
19359 case R6_OPC_CMP_SULT_D
:
19360 case R6_OPC_CMP_SLE_D
:
19361 case R6_OPC_CMP_SULE_D
:
19362 case R6_OPC_CMP_OR_D
:
19363 case R6_OPC_CMP_UNE_D
:
19364 case R6_OPC_CMP_NE_D
:
19365 case R6_OPC_CMP_SOR_D
:
19366 case R6_OPC_CMP_SUNE_D
:
19367 case R6_OPC_CMP_SNE_D
:
19368 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19371 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19372 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19377 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19392 check_insn(ctx
, ASE_MSA
);
19393 gen_msa_branch(env
, ctx
, op1
);
19397 generate_exception_end(ctx
, EXCP_RI
);
19402 /* Compact branches [R6] and COP2 [non-R6] */
19403 case OPC_BC
: /* OPC_LWC2 */
19404 case OPC_BALC
: /* OPC_SWC2 */
19405 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19406 /* OPC_BC, OPC_BALC */
19407 gen_compute_compact_branch(ctx
, op
, 0, 0,
19408 sextract32(ctx
->opcode
<< 2, 0, 28));
19410 /* OPC_LWC2, OPC_SWC2 */
19411 /* COP2: Not implemented. */
19412 generate_exception_err(ctx
, EXCP_CpU
, 2);
19415 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19416 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19417 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19419 /* OPC_BEQZC, OPC_BNEZC */
19420 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19421 sextract32(ctx
->opcode
<< 2, 0, 23));
19423 /* OPC_JIC, OPC_JIALC */
19424 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19427 /* OPC_LWC2, OPC_SWC2 */
19428 /* COP2: Not implemented. */
19429 generate_exception_err(ctx
, EXCP_CpU
, 2);
19433 check_insn(ctx
, INSN_LOONGSON2F
);
19434 /* Note that these instructions use different fields. */
19435 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19439 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19440 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19441 check_cp1_enabled(ctx
);
19442 op1
= MASK_CP3(ctx
->opcode
);
19446 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19452 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19453 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19456 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19457 /* Treat as NOP. */
19460 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19474 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19475 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19479 generate_exception_end(ctx
, EXCP_RI
);
19483 generate_exception_err(ctx
, EXCP_CpU
, 1);
19487 #if defined(TARGET_MIPS64)
19488 /* MIPS64 opcodes */
19489 case OPC_LDL
... OPC_LDR
:
19491 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19495 check_insn(ctx
, ISA_MIPS3
);
19496 check_mips_64(ctx
);
19497 gen_ld(ctx
, op
, rt
, rs
, imm
);
19499 case OPC_SDL
... OPC_SDR
:
19500 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19503 check_insn(ctx
, ISA_MIPS3
);
19504 check_mips_64(ctx
);
19505 gen_st(ctx
, op
, rt
, rs
, imm
);
19508 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19509 check_insn(ctx
, ISA_MIPS3
);
19510 check_mips_64(ctx
);
19511 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19513 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19514 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19515 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19516 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19519 check_insn(ctx
, ISA_MIPS3
);
19520 check_mips_64(ctx
);
19521 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19525 check_insn(ctx
, ISA_MIPS3
);
19526 check_mips_64(ctx
);
19527 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19530 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19531 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19532 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19534 MIPS_INVAL("major opcode");
19535 generate_exception_end(ctx
, EXCP_RI
);
19539 case OPC_DAUI
: /* OPC_JALX */
19540 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19541 #if defined(TARGET_MIPS64)
19543 check_mips_64(ctx
);
19545 generate_exception(ctx
, EXCP_RI
);
19546 } else if (rt
!= 0) {
19547 TCGv t0
= tcg_temp_new();
19548 gen_load_gpr(t0
, rs
);
19549 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19553 generate_exception_end(ctx
, EXCP_RI
);
19554 MIPS_INVAL("major opcode");
19558 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19559 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19560 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19563 case OPC_MSA
: /* OPC_MDMX */
19564 /* MDMX: Not implemented. */
19568 check_insn(ctx
, ISA_MIPS32R6
);
19569 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19571 default: /* Invalid */
19572 MIPS_INVAL("major opcode");
19573 generate_exception_end(ctx
, EXCP_RI
);
19578 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19580 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19581 CPUState
*cs
= CPU(cpu
);
19583 target_ulong pc_start
;
19584 target_ulong next_page_start
;
19591 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19594 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19595 ctx
.insn_flags
= env
->insn_flags
;
19596 ctx
.CP0_Config1
= env
->CP0_Config1
;
19598 ctx
.bstate
= BS_NONE
;
19600 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19601 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19602 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19603 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19604 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19605 ctx
.PAMask
= env
->PAMask
;
19606 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19607 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19608 /* Restore delay slot state from the tb context. */
19609 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19610 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19611 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19612 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19613 restore_cpu_state(env
, &ctx
);
19614 #ifdef CONFIG_USER_ONLY
19615 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19617 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19619 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19620 MO_UNALN
: MO_ALIGN
;
19622 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19623 if (max_insns
== 0) {
19624 max_insns
= CF_COUNT_MASK
;
19626 if (max_insns
> TCG_MAX_INSNS
) {
19627 max_insns
= TCG_MAX_INSNS
;
19630 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19632 while (ctx
.bstate
== BS_NONE
) {
19633 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19636 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19637 save_cpu_state(&ctx
, 1);
19638 ctx
.bstate
= BS_BRANCH
;
19639 gen_helper_raise_exception_debug(cpu_env
);
19640 /* The address covered by the breakpoint must be included in
19641 [tb->pc, tb->pc + tb->size) in order to for it to be
19642 properly cleared -- thus we increment the PC here so that
19643 the logic setting tb->size below does the right thing. */
19645 goto done_generating
;
19648 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19652 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19653 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19654 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19656 decode_opc(env
, &ctx
);
19657 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19658 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19659 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19660 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19661 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19662 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19664 generate_exception_end(&ctx
, EXCP_RI
);
19668 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19669 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19670 MIPS_HFLAG_FBNSLOT
))) {
19671 /* force to generate branch as there is neither delay nor
19675 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19676 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19677 /* Force to generate branch as microMIPS R6 doesn't restrict
19678 branches in the forbidden slot. */
19683 gen_branch(&ctx
, insn_bytes
);
19685 ctx
.pc
+= insn_bytes
;
19687 /* Execute a branch and its delay slot as a single instruction.
19688 This is what GDB expects and is consistent with what the
19689 hardware does (e.g. if a delay slot instruction faults, the
19690 reported PC is the PC of the branch). */
19691 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19695 if (ctx
.pc
>= next_page_start
) {
19699 if (tcg_op_buf_full()) {
19703 if (num_insns
>= max_insns
)
19709 if (tb
->cflags
& CF_LAST_IO
) {
19712 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19713 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19714 gen_helper_raise_exception_debug(cpu_env
);
19716 switch (ctx
.bstate
) {
19718 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19721 save_cpu_state(&ctx
, 0);
19722 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19725 tcg_gen_exit_tb(0);
19733 gen_tb_end(tb
, num_insns
);
19735 tb
->size
= ctx
.pc
- pc_start
;
19736 tb
->icount
= num_insns
;
19740 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19741 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19742 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19748 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19752 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19754 #define printfpr(fp) \
19757 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19758 " fd:%13g fs:%13g psu: %13g\n", \
19759 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19760 (double)(fp)->fd, \
19761 (double)(fp)->fs[FP_ENDIAN_IDX], \
19762 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19765 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19766 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19767 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19768 " fd:%13g fs:%13g psu:%13g\n", \
19769 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19771 (double)tmp.fs[FP_ENDIAN_IDX], \
19772 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19777 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19778 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19779 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19780 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19781 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19782 printfpr(&env
->active_fpu
.fpr
[i
]);
19788 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19791 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19792 CPUMIPSState
*env
= &cpu
->env
;
19795 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19796 " LO=0x" TARGET_FMT_lx
" ds %04x "
19797 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19798 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19799 env
->hflags
, env
->btarget
, env
->bcond
);
19800 for (i
= 0; i
< 32; i
++) {
19802 cpu_fprintf(f
, "GPR%02d:", i
);
19803 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19805 cpu_fprintf(f
, "\n");
19808 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19809 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19810 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19812 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19813 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19814 env
->CP0_Config2
, env
->CP0_Config3
);
19815 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19816 env
->CP0_Config4
, env
->CP0_Config5
);
19817 if (env
->hflags
& MIPS_HFLAG_FPU
)
19818 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19821 void mips_tcg_init(void)
19826 /* Initialize various static tables. */
19830 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19831 TCGV_UNUSED(cpu_gpr
[0]);
19832 for (i
= 1; i
< 32; i
++)
19833 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
19834 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19837 for (i
= 0; i
< 32; i
++) {
19838 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19840 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2]);
19841 /* The scalar floating-point unit (FPU) registers are mapped on
19842 * the MSA vector registers. */
19843 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19844 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19845 msa_wr_d
[i
* 2 + 1] =
19846 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2 + 1]);
19849 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
19850 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19851 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19852 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
19853 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19855 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
19856 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19859 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
19860 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19862 bcond
= tcg_global_mem_new(TCG_AREG0
,
19863 offsetof(CPUMIPSState
, bcond
), "bcond");
19864 btarget
= tcg_global_mem_new(TCG_AREG0
,
19865 offsetof(CPUMIPSState
, btarget
), "btarget");
19866 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
19867 offsetof(CPUMIPSState
, hflags
), "hflags");
19869 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
19870 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19872 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
19873 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19879 #include "translate_init.c"
19881 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19885 const mips_def_t
*def
;
19887 def
= cpu_mips_find_by_name(cpu_model
);
19890 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19892 env
->cpu_model
= def
;
19894 #ifndef CONFIG_USER_ONLY
19895 mmu_init(env
, def
);
19897 fpu_init(env
, def
);
19898 mvp_init(env
, def
);
19900 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19905 void cpu_state_reset(CPUMIPSState
*env
)
19907 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19908 CPUState
*cs
= CPU(cpu
);
19910 /* Reset registers to their default values */
19911 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19912 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19913 #ifdef TARGET_WORDS_BIGENDIAN
19914 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19916 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19917 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19918 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19919 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19920 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19921 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19922 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19923 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19924 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19925 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19926 << env
->cpu_model
->CP0_LLAddr_shift
;
19927 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19928 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19929 env
->CCRes
= env
->cpu_model
->CCRes
;
19930 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19931 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19932 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19933 env
->current_tc
= 0;
19934 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19935 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19936 #if defined(TARGET_MIPS64)
19937 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19938 env
->SEGMask
|= 3ULL << 62;
19941 env
->PABITS
= env
->cpu_model
->PABITS
;
19942 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19943 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19944 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19945 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19946 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19947 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19948 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19949 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19950 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19951 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19952 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19953 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19954 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19955 env
->msair
= env
->cpu_model
->MSAIR
;
19956 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19958 #if defined(CONFIG_USER_ONLY)
19959 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19960 # ifdef TARGET_MIPS64
19961 /* Enable 64-bit register mode. */
19962 env
->CP0_Status
|= (1 << CP0St_PX
);
19964 # ifdef TARGET_ABI_MIPSN64
19965 /* Enable 64-bit address mode. */
19966 env
->CP0_Status
|= (1 << CP0St_UX
);
19968 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19969 hardware registers. */
19970 env
->CP0_HWREna
|= 0x0000000F;
19971 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19972 env
->CP0_Status
|= (1 << CP0St_CU1
);
19974 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19975 env
->CP0_Status
|= (1 << CP0St_MX
);
19977 # if defined(TARGET_MIPS64)
19978 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19979 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19980 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19981 env
->CP0_Status
|= (1 << CP0St_FR
);
19985 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19986 /* If the exception was raised from a delay slot,
19987 come back to the jump. */
19988 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
19989 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
19991 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
19993 env
->active_tc
.PC
= (int32_t)0xBFC00000;
19994 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
19995 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
19996 env
->CP0_Wired
= 0;
19997 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
19998 if (kvm_enabled()) {
19999 env
->CP0_EBase
|= 0x40000000;
20001 env
->CP0_EBase
|= 0x80000000;
20003 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20004 /* vectored interrupts not implemented, timer on int 7,
20005 no performance counters. */
20006 env
->CP0_IntCtl
= 0xe0000000;
20010 for (i
= 0; i
< 7; i
++) {
20011 env
->CP0_WatchLo
[i
] = 0;
20012 env
->CP0_WatchHi
[i
] = 0x80000000;
20014 env
->CP0_WatchLo
[7] = 0;
20015 env
->CP0_WatchHi
[7] = 0;
20017 /* Count register increments in debug mode, EJTAG version 1 */
20018 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20020 cpu_mips_store_count(env
, 1);
20022 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20025 /* Only TC0 on VPE 0 starts as active. */
20026 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20027 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20028 env
->tcs
[i
].CP0_TCHalt
= 1;
20030 env
->active_tc
.CP0_TCHalt
= 1;
20033 if (cs
->cpu_index
== 0) {
20034 /* VPE0 starts up enabled. */
20035 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20036 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20038 /* TC0 starts up unhalted. */
20040 env
->active_tc
.CP0_TCHalt
= 0;
20041 env
->tcs
[0].CP0_TCHalt
= 0;
20042 /* With thread 0 active. */
20043 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20044 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20048 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20049 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20050 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20051 env
->CP0_Status
|= (1 << CP0St_FR
);
20055 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20059 compute_hflags(env
);
20060 restore_rounding_mode(env
);
20061 restore_flush_mode(env
);
20062 restore_pamask(env
);
20063 cs
->exception_index
= EXCP_NONE
;
20065 if (semihosting_get_argc()) {
20066 /* UHI interface can be used to obtain argc and argv */
20067 env
->active_tc
.gpr
[4] = -1;
20071 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20072 target_ulong
*data
)
20074 env
->active_tc
.PC
= data
[0];
20075 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20076 env
->hflags
|= data
[1];
20077 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20078 case MIPS_HFLAG_BR
:
20080 case MIPS_HFLAG_BC
:
20081 case MIPS_HFLAG_BL
:
20083 env
->btarget
= data
[2];