2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "disas/disas.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "sysemu/kvm.h"
32 #include "exec/semihost.h"
34 #include "trace-tcg.h"
36 #define MIPS_DEBUG_DISAS 0
38 /* MIPS major opcodes */
39 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
42 /* indirect opcode tables */
43 OPC_SPECIAL
= (0x00 << 26),
44 OPC_REGIMM
= (0x01 << 26),
45 OPC_CP0
= (0x10 << 26),
46 OPC_CP1
= (0x11 << 26),
47 OPC_CP2
= (0x12 << 26),
48 OPC_CP3
= (0x13 << 26),
49 OPC_SPECIAL2
= (0x1C << 26),
50 OPC_SPECIAL3
= (0x1F << 26),
51 /* arithmetic with immediate */
52 OPC_ADDI
= (0x08 << 26),
53 OPC_ADDIU
= (0x09 << 26),
54 OPC_SLTI
= (0x0A << 26),
55 OPC_SLTIU
= (0x0B << 26),
56 /* logic with immediate */
57 OPC_ANDI
= (0x0C << 26),
58 OPC_ORI
= (0x0D << 26),
59 OPC_XORI
= (0x0E << 26),
60 OPC_LUI
= (0x0F << 26),
61 /* arithmetic with immediate */
62 OPC_DADDI
= (0x18 << 26),
63 OPC_DADDIU
= (0x19 << 26),
64 /* Jump and branches */
66 OPC_JAL
= (0x03 << 26),
67 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
68 OPC_BEQL
= (0x14 << 26),
69 OPC_BNE
= (0x05 << 26),
70 OPC_BNEL
= (0x15 << 26),
71 OPC_BLEZ
= (0x06 << 26),
72 OPC_BLEZL
= (0x16 << 26),
73 OPC_BGTZ
= (0x07 << 26),
74 OPC_BGTZL
= (0x17 << 26),
75 OPC_JALX
= (0x1D << 26),
76 OPC_DAUI
= (0x1D << 26),
78 OPC_LDL
= (0x1A << 26),
79 OPC_LDR
= (0x1B << 26),
80 OPC_LB
= (0x20 << 26),
81 OPC_LH
= (0x21 << 26),
82 OPC_LWL
= (0x22 << 26),
83 OPC_LW
= (0x23 << 26),
84 OPC_LWPC
= OPC_LW
| 0x5,
85 OPC_LBU
= (0x24 << 26),
86 OPC_LHU
= (0x25 << 26),
87 OPC_LWR
= (0x26 << 26),
88 OPC_LWU
= (0x27 << 26),
89 OPC_SB
= (0x28 << 26),
90 OPC_SH
= (0x29 << 26),
91 OPC_SWL
= (0x2A << 26),
92 OPC_SW
= (0x2B << 26),
93 OPC_SDL
= (0x2C << 26),
94 OPC_SDR
= (0x2D << 26),
95 OPC_SWR
= (0x2E << 26),
96 OPC_LL
= (0x30 << 26),
97 OPC_LLD
= (0x34 << 26),
98 OPC_LD
= (0x37 << 26),
99 OPC_LDPC
= OPC_LD
| 0x5,
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* Compact Branches */
113 OPC_BLEZALC
= (0x06 << 26),
114 OPC_BGEZALC
= (0x06 << 26),
115 OPC_BGEUC
= (0x06 << 26),
116 OPC_BGTZALC
= (0x07 << 26),
117 OPC_BLTZALC
= (0x07 << 26),
118 OPC_BLTUC
= (0x07 << 26),
119 OPC_BOVC
= (0x08 << 26),
120 OPC_BEQZALC
= (0x08 << 26),
121 OPC_BEQC
= (0x08 << 26),
122 OPC_BLEZC
= (0x16 << 26),
123 OPC_BGEZC
= (0x16 << 26),
124 OPC_BGEC
= (0x16 << 26),
125 OPC_BGTZC
= (0x17 << 26),
126 OPC_BLTZC
= (0x17 << 26),
127 OPC_BLTC
= (0x17 << 26),
128 OPC_BNVC
= (0x18 << 26),
129 OPC_BNEZALC
= (0x18 << 26),
130 OPC_BNEC
= (0x18 << 26),
131 OPC_BC
= (0x32 << 26),
132 OPC_BEQZC
= (0x36 << 26),
133 OPC_JIC
= (0x36 << 26),
134 OPC_BALC
= (0x3A << 26),
135 OPC_BNEZC
= (0x3E << 26),
136 OPC_JIALC
= (0x3E << 26),
137 /* MDMX ASE specific */
138 OPC_MDMX
= (0x1E << 26),
139 /* MSA ASE, same as MDMX */
141 /* Cache and prefetch */
142 OPC_CACHE
= (0x2F << 26),
143 OPC_PREF
= (0x33 << 26),
144 /* PC-relative address computation / loads */
145 OPC_PCREL
= (0x3B << 26),
148 /* PC-relative address computation / loads */
149 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
150 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
152 /* Instructions determined by bits 19 and 20 */
153 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
154 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
155 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
157 /* Instructions determined by bits 16 ... 20 */
158 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
159 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
162 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
165 /* MIPS special opcodes */
166 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
170 OPC_SLL
= 0x00 | OPC_SPECIAL
,
171 /* NOP is SLL r0, r0, 0 */
172 /* SSNOP is SLL r0, r0, 1 */
173 /* EHB is SLL r0, r0, 3 */
174 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
175 OPC_ROTR
= OPC_SRL
| (1 << 21),
176 OPC_SRA
= 0x03 | OPC_SPECIAL
,
177 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
178 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
179 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
180 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
181 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
182 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
183 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
184 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
185 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
186 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
187 OPC_DROTR
= OPC_DSRL
| (1 << 21),
188 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
189 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
190 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
191 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
192 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
193 /* Multiplication / division */
194 OPC_MULT
= 0x18 | OPC_SPECIAL
,
195 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
196 OPC_DIV
= 0x1A | OPC_SPECIAL
,
197 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
198 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
199 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
200 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
201 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
203 /* 2 registers arithmetic / logic */
204 OPC_ADD
= 0x20 | OPC_SPECIAL
,
205 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
206 OPC_SUB
= 0x22 | OPC_SPECIAL
,
207 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
208 OPC_AND
= 0x24 | OPC_SPECIAL
,
209 OPC_OR
= 0x25 | OPC_SPECIAL
,
210 OPC_XOR
= 0x26 | OPC_SPECIAL
,
211 OPC_NOR
= 0x27 | OPC_SPECIAL
,
212 OPC_SLT
= 0x2A | OPC_SPECIAL
,
213 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
214 OPC_DADD
= 0x2C | OPC_SPECIAL
,
215 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
216 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
217 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
219 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
220 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
222 OPC_TGE
= 0x30 | OPC_SPECIAL
,
223 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
224 OPC_TLT
= 0x32 | OPC_SPECIAL
,
225 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
226 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
227 OPC_TNE
= 0x36 | OPC_SPECIAL
,
228 /* HI / LO registers load & stores */
229 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
230 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
231 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
232 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
233 /* Conditional moves */
234 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
235 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
237 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
238 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
240 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
243 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
244 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
245 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
246 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
247 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
249 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
250 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
251 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
252 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
255 /* R6 Multiply and Divide instructions have the same Opcode
256 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
257 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
260 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
261 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
262 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
263 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
264 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
265 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
266 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
267 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
269 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
270 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
271 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
272 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
273 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
274 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
275 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
276 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
278 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
279 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
280 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
281 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
282 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
284 OPC_LSA
= 0x05 | OPC_SPECIAL
,
285 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
288 /* Multiplication variants of the vr54xx. */
289 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
292 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
293 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
294 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
295 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
297 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
301 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
302 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
303 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
304 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
305 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
308 /* REGIMM (rt field) opcodes */
309 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
312 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
313 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
314 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
315 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
316 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
317 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
318 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
319 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
320 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
321 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
322 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
323 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
324 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
325 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
326 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
327 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
329 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
330 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
333 /* Special2 opcodes */
334 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
337 /* Multiply & xxx operations */
338 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
339 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
340 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
341 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
342 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
344 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
345 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
346 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
347 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
348 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
349 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
350 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
351 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
352 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
353 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
354 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
355 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
357 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
358 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
359 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
360 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
362 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
365 /* Special3 opcodes */
366 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
369 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
370 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
371 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
372 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
373 OPC_INS
= 0x04 | OPC_SPECIAL3
,
374 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
375 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
376 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
377 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
378 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
379 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
380 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
381 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
384 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
385 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
386 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
387 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
388 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
389 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
390 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
391 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
392 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
393 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
394 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
395 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
398 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
399 /* MIPS DSP Arithmetic */
400 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
401 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
402 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
403 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
404 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
405 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
406 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
407 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
408 /* MIPS DSP GPR-Based Shift Sub-class */
409 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
410 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
411 /* MIPS DSP Multiply Sub-class insns */
412 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
413 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
414 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
415 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
416 /* DSP Bit/Manipulation Sub-class */
417 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
418 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
419 /* MIPS DSP Append Sub-class */
420 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
421 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
422 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
423 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
424 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
427 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
428 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
429 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
430 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
431 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
432 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
436 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
439 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
440 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
441 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
442 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
443 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
444 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
448 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
451 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
452 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
453 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
454 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
455 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
458 /* MIPS DSP REGIMM opcodes */
460 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
461 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
464 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
467 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
468 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
469 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
470 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
473 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
475 /* MIPS DSP Arithmetic Sub-class */
476 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
477 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
478 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
483 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
484 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
490 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
494 /* MIPS DSP Multiply Sub-class insns */
495 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
497 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
503 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
504 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
506 /* MIPS DSP Arithmetic Sub-class */
507 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
508 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
509 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
519 /* MIPS DSP Multiply Sub-class insns */
520 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
526 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
528 /* MIPS DSP Arithmetic Sub-class */
529 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
530 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
531 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
542 /* DSP Bit/Manipulation Sub-class */
543 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
550 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
554 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
555 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
560 /* DSP Compare-Pick Sub-class */
561 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
578 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
580 /* MIPS DSP GPR-Based Shift Sub-class */
581 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
582 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
583 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
605 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
607 /* MIPS DSP Multiply Sub-class insns */
608 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
609 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
610 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
632 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
634 /* DSP Bit/Manipulation Sub-class */
635 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
638 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
640 /* MIPS DSP Append Sub-class */
641 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
642 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
643 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
646 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
648 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
649 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
650 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
651 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
661 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
662 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
663 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
664 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
665 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
668 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
670 /* MIPS DSP Arithmetic Sub-class */
671 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
672 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
673 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
688 /* DSP Bit/Manipulation Sub-class */
689 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
697 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
699 /* MIPS DSP Multiply Sub-class insns */
700 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
701 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
702 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
705 /* MIPS DSP Arithmetic Sub-class */
706 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
717 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
729 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* DSP Compare-Pick Sub-class */
732 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
733 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
734 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
751 /* MIPS DSP Arithmetic Sub-class */
752 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
762 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
764 /* DSP Append Sub-class */
765 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
766 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
767 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
768 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
771 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
773 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
774 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
775 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
776 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
797 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
799 /* DSP Bit/Manipulation Sub-class */
800 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
803 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Multiply Sub-class insns */
806 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
807 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
808 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
834 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
836 /* MIPS DSP GPR-Based Shift Sub-class */
837 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
838 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
839 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
865 /* Coprocessor 0 (rs field) */
866 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
869 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
870 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
871 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
872 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
873 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
874 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
875 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
876 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
877 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
878 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
879 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
880 OPC_C0
= (0x10 << 21) | OPC_CP0
,
881 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
882 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
886 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
889 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
890 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
891 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
892 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
893 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
894 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 /* Coprocessor 0 (with rs == C0) */
898 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
901 OPC_TLBR
= 0x01 | OPC_C0
,
902 OPC_TLBWI
= 0x02 | OPC_C0
,
903 OPC_TLBINV
= 0x03 | OPC_C0
,
904 OPC_TLBINVF
= 0x04 | OPC_C0
,
905 OPC_TLBWR
= 0x06 | OPC_C0
,
906 OPC_TLBP
= 0x08 | OPC_C0
,
907 OPC_RFE
= 0x10 | OPC_C0
,
908 OPC_ERET
= 0x18 | OPC_C0
,
909 OPC_DERET
= 0x1F | OPC_C0
,
910 OPC_WAIT
= 0x20 | OPC_C0
,
913 /* Coprocessor 1 (rs field) */
914 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
916 /* Values for the fmt field in FP instructions */
918 /* 0 - 15 are reserved */
919 FMT_S
= 16, /* single fp */
920 FMT_D
= 17, /* double fp */
921 FMT_E
= 18, /* extended fp */
922 FMT_Q
= 19, /* quad fp */
923 FMT_W
= 20, /* 32-bit fixed */
924 FMT_L
= 21, /* 64-bit fixed */
925 FMT_PS
= 22, /* paired single fp */
926 /* 23 - 31 are reserved */
930 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
931 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
932 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
933 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
934 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
935 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
936 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
937 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
938 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
939 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
940 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
941 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
942 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
943 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
944 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
945 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
946 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
947 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
948 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
949 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
950 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
951 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
952 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
953 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
954 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
955 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
956 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
957 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
958 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
959 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
962 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
963 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
966 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
967 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
968 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
969 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
973 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
974 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
978 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
979 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
982 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
985 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
986 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
987 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
988 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
989 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
990 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
991 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
992 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
993 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
994 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
995 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
998 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1001 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1002 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1003 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1011 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1012 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1020 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1021 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1022 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1023 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1024 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1025 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1028 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1029 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1030 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1038 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1039 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1045 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1046 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1052 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1053 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1059 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1060 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1066 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1067 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1073 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1074 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1080 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1081 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1087 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1088 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1095 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1098 OPC_LWXC1
= 0x00 | OPC_CP3
,
1099 OPC_LDXC1
= 0x01 | OPC_CP3
,
1100 OPC_LUXC1
= 0x05 | OPC_CP3
,
1101 OPC_SWXC1
= 0x08 | OPC_CP3
,
1102 OPC_SDXC1
= 0x09 | OPC_CP3
,
1103 OPC_SUXC1
= 0x0D | OPC_CP3
,
1104 OPC_PREFX
= 0x0F | OPC_CP3
,
1105 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1106 OPC_MADD_S
= 0x20 | OPC_CP3
,
1107 OPC_MADD_D
= 0x21 | OPC_CP3
,
1108 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1109 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1110 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1111 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1112 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1113 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1114 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1115 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1116 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1117 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1121 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1123 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1124 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1125 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1126 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1127 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1128 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1129 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1130 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1131 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1132 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1133 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1134 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1135 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1136 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1137 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1138 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1139 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1140 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1141 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1142 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1143 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1145 /* MI10 instruction */
1146 OPC_LD_B
= (0x20) | OPC_MSA
,
1147 OPC_LD_H
= (0x21) | OPC_MSA
,
1148 OPC_LD_W
= (0x22) | OPC_MSA
,
1149 OPC_LD_D
= (0x23) | OPC_MSA
,
1150 OPC_ST_B
= (0x24) | OPC_MSA
,
1151 OPC_ST_H
= (0x25) | OPC_MSA
,
1152 OPC_ST_W
= (0x26) | OPC_MSA
,
1153 OPC_ST_D
= (0x27) | OPC_MSA
,
1157 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1158 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1159 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1160 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1161 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1162 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1163 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1164 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1165 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1167 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1169 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1171 /* I8 instruction */
1172 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1173 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1174 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1175 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1176 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1177 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1178 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1179 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1180 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1181 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1183 /* VEC/2R/2RF instruction */
1184 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1185 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1186 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1187 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1188 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1189 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1190 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1192 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1193 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1195 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1196 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1197 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1198 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1199 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1201 /* 2RF instruction df(bit 16) = _w, _d */
1202 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1203 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1204 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1205 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1206 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1207 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1208 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1209 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1210 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1211 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1212 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1213 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1214 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1215 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1216 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1217 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1219 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1220 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1221 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1222 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1223 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1224 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1225 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1226 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1227 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1228 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1229 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1230 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1231 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1232 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1233 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1234 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1235 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1236 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1237 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1238 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1239 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1240 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1241 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1242 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1243 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1244 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1245 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1246 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1247 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1248 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1249 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1250 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1251 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1252 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1253 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1254 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1255 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1256 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1257 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1258 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1259 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1260 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1261 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1262 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1263 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1264 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1265 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1266 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1267 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1268 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1269 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1270 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1271 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1272 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1273 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1274 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1275 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1276 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1277 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1278 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1279 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1280 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1281 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1282 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1284 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1285 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1286 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1287 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1288 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1289 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 /* 3RF instruction _df(bit 21) = _w, _d */
1296 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1297 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1298 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1299 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1300 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1301 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1302 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1303 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1304 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1305 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1306 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1307 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1308 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1309 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1310 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1311 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1312 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1313 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1314 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1317 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1319 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1321 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1324 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1328 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1331 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1332 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1333 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1334 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1335 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1338 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1339 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1340 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1341 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1342 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1343 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1348 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1349 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1350 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1353 /* global register indices */
1354 static TCGv_ptr cpu_env
;
1355 static TCGv cpu_gpr
[32], cpu_PC
;
1356 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1357 static TCGv cpu_dspctrl
, btarget
, bcond
;
1358 static TCGv_i32 hflags
;
1359 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1360 static TCGv_i64 fpu_f64
[32];
1361 static TCGv_i64 msa_wr_d
[64];
1363 #include "exec/gen-icount.h"
1365 #define gen_helper_0e0i(name, arg) do { \
1366 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1367 gen_helper_##name(cpu_env, helper_tmp); \
1368 tcg_temp_free_i32(helper_tmp); \
1371 #define gen_helper_0e1i(name, arg1, arg2) do { \
1372 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1373 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1374 tcg_temp_free_i32(helper_tmp); \
1377 #define gen_helper_1e0i(name, ret, arg1) do { \
1378 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1379 gen_helper_##name(ret, cpu_env, helper_tmp); \
1380 tcg_temp_free_i32(helper_tmp); \
1383 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1384 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1385 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1386 tcg_temp_free_i32(helper_tmp); \
1389 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1390 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1391 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1392 tcg_temp_free_i32(helper_tmp); \
1395 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1396 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1397 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1398 tcg_temp_free_i32(helper_tmp); \
1401 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1402 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1403 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1404 tcg_temp_free_i32(helper_tmp); \
1407 typedef struct DisasContext
{
1408 struct TranslationBlock
*tb
;
1409 target_ulong pc
, saved_pc
;
1411 int singlestep_enabled
;
1413 int32_t CP0_Config1
;
1414 /* Routine used to access memory */
1416 TCGMemOp default_tcg_memop_mask
;
1417 uint32_t hflags
, saved_hflags
;
1419 target_ulong btarget
;
1428 int CP0_LLAddr_shift
;
1433 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1434 * exception condition */
1435 BS_STOP
= 1, /* We want to stop translation for any reason */
1436 BS_BRANCH
= 2, /* We reached a branch condition */
1437 BS_EXCP
= 3, /* We reached an exception condition */
1440 static const char * const regnames
[] = {
1441 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1442 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1443 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1444 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1447 static const char * const regnames_HI
[] = {
1448 "HI0", "HI1", "HI2", "HI3",
1451 static const char * const regnames_LO
[] = {
1452 "LO0", "LO1", "LO2", "LO3",
1455 static const char * const fregnames
[] = {
1456 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1457 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1458 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1459 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1462 static const char * const msaregnames
[] = {
1463 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1464 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1465 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1466 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1467 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1468 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1469 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1470 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1471 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1472 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1473 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1474 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1475 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1476 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1477 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1478 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1481 #define LOG_DISAS(...) \
1483 if (MIPS_DEBUG_DISAS) { \
1484 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1488 #define MIPS_INVAL(op) \
1490 if (MIPS_DEBUG_DISAS) { \
1491 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1492 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1493 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1494 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1498 /* General purpose registers moves. */
1499 static inline void gen_load_gpr (TCGv t
, int reg
)
1502 tcg_gen_movi_tl(t
, 0);
1504 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1507 static inline void gen_store_gpr (TCGv t
, int reg
)
1510 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1513 /* Moves to/from shadow registers. */
1514 static inline void gen_load_srsgpr (int from
, int to
)
1516 TCGv t0
= tcg_temp_new();
1519 tcg_gen_movi_tl(t0
, 0);
1521 TCGv_i32 t2
= tcg_temp_new_i32();
1522 TCGv_ptr addr
= tcg_temp_new_ptr();
1524 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1525 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1526 tcg_gen_andi_i32(t2
, t2
, 0xf);
1527 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1528 tcg_gen_ext_i32_ptr(addr
, t2
);
1529 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1531 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1532 tcg_temp_free_ptr(addr
);
1533 tcg_temp_free_i32(t2
);
1535 gen_store_gpr(t0
, to
);
1539 static inline void gen_store_srsgpr (int from
, int to
)
1542 TCGv t0
= tcg_temp_new();
1543 TCGv_i32 t2
= tcg_temp_new_i32();
1544 TCGv_ptr addr
= tcg_temp_new_ptr();
1546 gen_load_gpr(t0
, from
);
1547 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1548 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1549 tcg_gen_andi_i32(t2
, t2
, 0xf);
1550 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1551 tcg_gen_ext_i32_ptr(addr
, t2
);
1552 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1554 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1555 tcg_temp_free_ptr(addr
);
1556 tcg_temp_free_i32(t2
);
1562 static inline void gen_save_pc(target_ulong pc
)
1564 tcg_gen_movi_tl(cpu_PC
, pc
);
1567 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1569 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1570 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1571 gen_save_pc(ctx
->pc
);
1572 ctx
->saved_pc
= ctx
->pc
;
1574 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1575 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1576 ctx
->saved_hflags
= ctx
->hflags
;
1577 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1583 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1589 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1591 ctx
->saved_hflags
= ctx
->hflags
;
1592 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1598 ctx
->btarget
= env
->btarget
;
1603 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1605 TCGv_i32 texcp
= tcg_const_i32(excp
);
1606 TCGv_i32 terr
= tcg_const_i32(err
);
1607 save_cpu_state(ctx
, 1);
1608 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1609 tcg_temp_free_i32(terr
);
1610 tcg_temp_free_i32(texcp
);
1611 ctx
->bstate
= BS_EXCP
;
1614 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1616 gen_helper_0e0i(raise_exception
, excp
);
1619 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1621 generate_exception_err(ctx
, excp
, 0);
1624 /* Floating point register moves. */
1625 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1627 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1628 generate_exception(ctx
, EXCP_RI
);
1630 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1633 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1636 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1637 generate_exception(ctx
, EXCP_RI
);
1639 t64
= tcg_temp_new_i64();
1640 tcg_gen_extu_i32_i64(t64
, t
);
1641 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1642 tcg_temp_free_i64(t64
);
1645 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1647 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1648 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1650 gen_load_fpr32(ctx
, t
, reg
| 1);
1654 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1656 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1657 TCGv_i64 t64
= tcg_temp_new_i64();
1658 tcg_gen_extu_i32_i64(t64
, t
);
1659 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1660 tcg_temp_free_i64(t64
);
1662 gen_store_fpr32(ctx
, t
, reg
| 1);
1666 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1668 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1669 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1671 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1675 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1677 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1678 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1681 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1682 t0
= tcg_temp_new_i64();
1683 tcg_gen_shri_i64(t0
, t
, 32);
1684 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1685 tcg_temp_free_i64(t0
);
1689 static inline int get_fp_bit (int cc
)
1697 /* Addresses computation */
1698 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1700 tcg_gen_add_tl(ret
, arg0
, arg1
);
1702 #if defined(TARGET_MIPS64)
1703 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1704 tcg_gen_ext32s_i64(ret
, ret
);
1709 /* Addresses computation (translation time) */
1710 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1713 target_long sum
= base
+ offset
;
1715 #if defined(TARGET_MIPS64)
1716 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1723 /* Sign-extract the low 32-bits to a target_long. */
1724 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1726 #if defined(TARGET_MIPS64)
1727 tcg_gen_ext32s_i64(ret
, arg
);
1729 tcg_gen_extrl_i64_i32(ret
, arg
);
1733 /* Sign-extract the high 32-bits to a target_long. */
1734 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1736 #if defined(TARGET_MIPS64)
1737 tcg_gen_sari_i64(ret
, arg
, 32);
1739 tcg_gen_extrh_i64_i32(ret
, arg
);
1743 static inline void check_cp0_enabled(DisasContext
*ctx
)
1745 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1746 generate_exception_err(ctx
, EXCP_CpU
, 0);
1749 static inline void check_cp1_enabled(DisasContext
*ctx
)
1751 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1752 generate_exception_err(ctx
, EXCP_CpU
, 1);
1755 /* Verify that the processor is running with COP1X instructions enabled.
1756 This is associated with the nabla symbol in the MIPS32 and MIPS64
1759 static inline void check_cop1x(DisasContext
*ctx
)
1761 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1762 generate_exception_end(ctx
, EXCP_RI
);
1765 /* Verify that the processor is running with 64-bit floating-point
1766 operations enabled. */
1768 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1770 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1771 generate_exception_end(ctx
, EXCP_RI
);
1775 * Verify if floating point register is valid; an operation is not defined
1776 * if bit 0 of any register specification is set and the FR bit in the
1777 * Status register equals zero, since the register numbers specify an
1778 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1779 * in the Status register equals one, both even and odd register numbers
1780 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1782 * Multiple 64 bit wide registers can be checked by calling
1783 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1785 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1787 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1788 generate_exception_end(ctx
, EXCP_RI
);
1791 /* Verify that the processor is running with DSP instructions enabled.
1792 This is enabled by CP0 Status register MX(24) bit.
1795 static inline void check_dsp(DisasContext
*ctx
)
1797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1798 if (ctx
->insn_flags
& ASE_DSP
) {
1799 generate_exception_end(ctx
, EXCP_DSPDIS
);
1801 generate_exception_end(ctx
, EXCP_RI
);
1806 static inline void check_dspr2(DisasContext
*ctx
)
1808 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1809 if (ctx
->insn_flags
& ASE_DSP
) {
1810 generate_exception_end(ctx
, EXCP_DSPDIS
);
1812 generate_exception_end(ctx
, EXCP_RI
);
1817 /* This code generates a "reserved instruction" exception if the
1818 CPU does not support the instruction set corresponding to flags. */
1819 static inline void check_insn(DisasContext
*ctx
, int flags
)
1821 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1822 generate_exception_end(ctx
, EXCP_RI
);
1826 /* This code generates a "reserved instruction" exception if the
1827 CPU has corresponding flag set which indicates that the instruction
1828 has been removed. */
1829 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1831 if (unlikely(ctx
->insn_flags
& flags
)) {
1832 generate_exception_end(ctx
, EXCP_RI
);
1836 /* This code generates a "reserved instruction" exception if the
1837 CPU does not support 64-bit paired-single (PS) floating point data type */
1838 static inline void check_ps(DisasContext
*ctx
)
1840 if (unlikely(!ctx
->ps
)) {
1841 generate_exception(ctx
, EXCP_RI
);
1843 check_cp1_64bitmode(ctx
);
1846 #ifdef TARGET_MIPS64
1847 /* This code generates a "reserved instruction" exception if 64-bit
1848 instructions are not enabled. */
1849 static inline void check_mips_64(DisasContext
*ctx
)
1851 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1852 generate_exception_end(ctx
, EXCP_RI
);
1856 #ifndef CONFIG_USER_ONLY
1857 static inline void check_mvh(DisasContext
*ctx
)
1859 if (unlikely(!ctx
->mvh
)) {
1860 generate_exception(ctx
, EXCP_RI
);
1865 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1866 calling interface for 32 and 64-bit FPRs. No sense in changing
1867 all callers for gen_load_fpr32 when we need the CTX parameter for
1869 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1870 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1871 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1872 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1873 int ft, int fs, int cc) \
1875 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1876 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1885 check_cp1_registers(ctx, fs | ft); \
1893 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1894 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1896 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1897 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1898 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1899 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1900 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1901 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1902 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1903 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1904 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1905 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1906 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1907 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1908 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1909 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1910 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1911 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1914 tcg_temp_free_i##bits (fp0); \
1915 tcg_temp_free_i##bits (fp1); \
1918 FOP_CONDS(, 0, d
, FMT_D
, 64)
1919 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1920 FOP_CONDS(, 0, s
, FMT_S
, 32)
1921 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1922 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1923 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1926 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1927 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1928 int ft, int fs, int fd) \
1930 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1931 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1932 if (ifmt == FMT_D) { \
1933 check_cp1_registers(ctx, fs | ft | fd); \
1935 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1936 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1939 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1942 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1945 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1948 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1951 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1954 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1957 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1960 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1963 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1966 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2002 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2008 tcg_temp_free_i ## bits (fp0); \
2009 tcg_temp_free_i ## bits (fp1); \
2012 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2013 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2015 #undef gen_ldcmp_fpr32
2016 #undef gen_ldcmp_fpr64
2018 /* load/store instructions. */
2019 #ifdef CONFIG_USER_ONLY
2020 #define OP_LD_ATOMIC(insn,fname) \
2021 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2023 TCGv t0 = tcg_temp_new(); \
2024 tcg_gen_mov_tl(t0, arg1); \
2025 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2026 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2027 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2028 tcg_temp_free(t0); \
2031 #define OP_LD_ATOMIC(insn,fname) \
2032 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2034 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2037 OP_LD_ATOMIC(ll
,ld32s
);
2038 #if defined(TARGET_MIPS64)
2039 OP_LD_ATOMIC(lld
,ld64
);
2043 #ifdef CONFIG_USER_ONLY
2044 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2045 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2047 TCGv t0 = tcg_temp_new(); \
2048 TCGLabel *l1 = gen_new_label(); \
2049 TCGLabel *l2 = gen_new_label(); \
2051 tcg_gen_andi_tl(t0, arg2, almask); \
2052 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2053 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2054 generate_exception(ctx, EXCP_AdES); \
2055 gen_set_label(l1); \
2056 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2057 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2058 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2059 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2060 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2061 generate_exception_end(ctx, EXCP_SC); \
2062 gen_set_label(l2); \
2063 tcg_gen_movi_tl(t0, 0); \
2064 gen_store_gpr(t0, rt); \
2065 tcg_temp_free(t0); \
2068 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2069 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2071 TCGv t0 = tcg_temp_new(); \
2072 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2073 gen_store_gpr(t0, rt); \
2074 tcg_temp_free(t0); \
2077 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2078 #if defined(TARGET_MIPS64)
2079 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2083 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2084 int base
, int16_t offset
)
2087 tcg_gen_movi_tl(addr
, offset
);
2088 } else if (offset
== 0) {
2089 gen_load_gpr(addr
, base
);
2091 tcg_gen_movi_tl(addr
, offset
);
2092 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2096 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2098 target_ulong pc
= ctx
->pc
;
2100 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2101 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2106 pc
&= ~(target_ulong
)3;
2111 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2112 int rt
, int base
, int16_t offset
)
2116 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2117 /* Loongson CPU uses a load to zero register for prefetch.
2118 We emulate it as a NOP. On other CPU we must perform the
2119 actual memory access. */
2123 t0
= tcg_temp_new();
2124 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2127 #if defined(TARGET_MIPS64)
2129 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2130 ctx
->default_tcg_memop_mask
);
2131 gen_store_gpr(t0
, rt
);
2134 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2135 ctx
->default_tcg_memop_mask
);
2136 gen_store_gpr(t0
, rt
);
2140 op_ld_lld(t0
, t0
, ctx
);
2141 gen_store_gpr(t0
, rt
);
2144 t1
= tcg_temp_new();
2145 /* Do a byte access to possibly trigger a page
2146 fault with the unaligned address. */
2147 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2148 tcg_gen_andi_tl(t1
, t0
, 7);
2149 #ifndef TARGET_WORDS_BIGENDIAN
2150 tcg_gen_xori_tl(t1
, t1
, 7);
2152 tcg_gen_shli_tl(t1
, t1
, 3);
2153 tcg_gen_andi_tl(t0
, t0
, ~7);
2154 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2155 tcg_gen_shl_tl(t0
, t0
, t1
);
2156 t2
= tcg_const_tl(-1);
2157 tcg_gen_shl_tl(t2
, t2
, t1
);
2158 gen_load_gpr(t1
, rt
);
2159 tcg_gen_andc_tl(t1
, t1
, t2
);
2161 tcg_gen_or_tl(t0
, t0
, t1
);
2163 gen_store_gpr(t0
, rt
);
2166 t1
= tcg_temp_new();
2167 /* Do a byte access to possibly trigger a page
2168 fault with the unaligned address. */
2169 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2170 tcg_gen_andi_tl(t1
, t0
, 7);
2171 #ifdef TARGET_WORDS_BIGENDIAN
2172 tcg_gen_xori_tl(t1
, t1
, 7);
2174 tcg_gen_shli_tl(t1
, t1
, 3);
2175 tcg_gen_andi_tl(t0
, t0
, ~7);
2176 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2177 tcg_gen_shr_tl(t0
, t0
, t1
);
2178 tcg_gen_xori_tl(t1
, t1
, 63);
2179 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2180 tcg_gen_shl_tl(t2
, t2
, t1
);
2181 gen_load_gpr(t1
, rt
);
2182 tcg_gen_and_tl(t1
, t1
, t2
);
2184 tcg_gen_or_tl(t0
, t0
, t1
);
2186 gen_store_gpr(t0
, rt
);
2189 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2190 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2192 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2193 gen_store_gpr(t0
, rt
);
2197 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2198 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2200 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2201 gen_store_gpr(t0
, rt
);
2204 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2205 ctx
->default_tcg_memop_mask
);
2206 gen_store_gpr(t0
, rt
);
2209 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2210 ctx
->default_tcg_memop_mask
);
2211 gen_store_gpr(t0
, rt
);
2214 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2215 ctx
->default_tcg_memop_mask
);
2216 gen_store_gpr(t0
, rt
);
2219 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2220 gen_store_gpr(t0
, rt
);
2223 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2224 gen_store_gpr(t0
, rt
);
2227 t1
= tcg_temp_new();
2228 /* Do a byte access to possibly trigger a page
2229 fault with the unaligned address. */
2230 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2231 tcg_gen_andi_tl(t1
, t0
, 3);
2232 #ifndef TARGET_WORDS_BIGENDIAN
2233 tcg_gen_xori_tl(t1
, t1
, 3);
2235 tcg_gen_shli_tl(t1
, t1
, 3);
2236 tcg_gen_andi_tl(t0
, t0
, ~3);
2237 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2238 tcg_gen_shl_tl(t0
, t0
, t1
);
2239 t2
= tcg_const_tl(-1);
2240 tcg_gen_shl_tl(t2
, t2
, t1
);
2241 gen_load_gpr(t1
, rt
);
2242 tcg_gen_andc_tl(t1
, t1
, t2
);
2244 tcg_gen_or_tl(t0
, t0
, t1
);
2246 tcg_gen_ext32s_tl(t0
, t0
);
2247 gen_store_gpr(t0
, rt
);
2250 t1
= tcg_temp_new();
2251 /* Do a byte access to possibly trigger a page
2252 fault with the unaligned address. */
2253 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2254 tcg_gen_andi_tl(t1
, t0
, 3);
2255 #ifdef TARGET_WORDS_BIGENDIAN
2256 tcg_gen_xori_tl(t1
, t1
, 3);
2258 tcg_gen_shli_tl(t1
, t1
, 3);
2259 tcg_gen_andi_tl(t0
, t0
, ~3);
2260 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2261 tcg_gen_shr_tl(t0
, t0
, t1
);
2262 tcg_gen_xori_tl(t1
, t1
, 31);
2263 t2
= tcg_const_tl(0xfffffffeull
);
2264 tcg_gen_shl_tl(t2
, t2
, t1
);
2265 gen_load_gpr(t1
, rt
);
2266 tcg_gen_and_tl(t1
, t1
, t2
);
2268 tcg_gen_or_tl(t0
, t0
, t1
);
2270 tcg_gen_ext32s_tl(t0
, t0
);
2271 gen_store_gpr(t0
, rt
);
2275 op_ld_ll(t0
, t0
, ctx
);
2276 gen_store_gpr(t0
, rt
);
2283 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2284 int base
, int16_t offset
)
2286 TCGv t0
= tcg_temp_new();
2287 TCGv t1
= tcg_temp_new();
2289 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2290 gen_load_gpr(t1
, rt
);
2292 #if defined(TARGET_MIPS64)
2294 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2295 ctx
->default_tcg_memop_mask
);
2298 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2301 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2305 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2306 ctx
->default_tcg_memop_mask
);
2309 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2310 ctx
->default_tcg_memop_mask
);
2313 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2316 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2319 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2327 /* Store conditional */
2328 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2329 int base
, int16_t offset
)
2333 #ifdef CONFIG_USER_ONLY
2334 t0
= tcg_temp_local_new();
2335 t1
= tcg_temp_local_new();
2337 t0
= tcg_temp_new();
2338 t1
= tcg_temp_new();
2340 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2341 gen_load_gpr(t1
, rt
);
2343 #if defined(TARGET_MIPS64)
2346 op_st_scd(t1
, t0
, rt
, ctx
);
2351 op_st_sc(t1
, t0
, rt
, ctx
);
2358 /* Load and store */
2359 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2360 int base
, int16_t offset
)
2362 TCGv t0
= tcg_temp_new();
2364 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2365 /* Don't do NOP if destination is zero: we must perform the actual
2370 TCGv_i32 fp0
= tcg_temp_new_i32();
2371 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2372 ctx
->default_tcg_memop_mask
);
2373 gen_store_fpr32(ctx
, fp0
, ft
);
2374 tcg_temp_free_i32(fp0
);
2379 TCGv_i32 fp0
= tcg_temp_new_i32();
2380 gen_load_fpr32(ctx
, fp0
, ft
);
2381 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2382 ctx
->default_tcg_memop_mask
);
2383 tcg_temp_free_i32(fp0
);
2388 TCGv_i64 fp0
= tcg_temp_new_i64();
2389 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2390 ctx
->default_tcg_memop_mask
);
2391 gen_store_fpr64(ctx
, fp0
, ft
);
2392 tcg_temp_free_i64(fp0
);
2397 TCGv_i64 fp0
= tcg_temp_new_i64();
2398 gen_load_fpr64(ctx
, fp0
, ft
);
2399 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2400 ctx
->default_tcg_memop_mask
);
2401 tcg_temp_free_i64(fp0
);
2405 MIPS_INVAL("flt_ldst");
2406 generate_exception_end(ctx
, EXCP_RI
);
2413 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2414 int rs
, int16_t imm
)
2416 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2417 check_cp1_enabled(ctx
);
2421 check_insn(ctx
, ISA_MIPS2
);
2424 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2427 generate_exception_err(ctx
, EXCP_CpU
, 1);
2431 /* Arithmetic with immediate operand */
2432 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2433 int rt
, int rs
, int16_t imm
)
2435 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2437 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2438 /* If no destination, treat it as a NOP.
2439 For addi, we must generate the overflow exception when needed. */
2445 TCGv t0
= tcg_temp_local_new();
2446 TCGv t1
= tcg_temp_new();
2447 TCGv t2
= tcg_temp_new();
2448 TCGLabel
*l1
= gen_new_label();
2450 gen_load_gpr(t1
, rs
);
2451 tcg_gen_addi_tl(t0
, t1
, uimm
);
2452 tcg_gen_ext32s_tl(t0
, t0
);
2454 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2455 tcg_gen_xori_tl(t2
, t0
, uimm
);
2456 tcg_gen_and_tl(t1
, t1
, t2
);
2458 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2460 /* operands of same sign, result different sign */
2461 generate_exception(ctx
, EXCP_OVERFLOW
);
2463 tcg_gen_ext32s_tl(t0
, t0
);
2464 gen_store_gpr(t0
, rt
);
2470 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2471 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2473 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2476 #if defined(TARGET_MIPS64)
2479 TCGv t0
= tcg_temp_local_new();
2480 TCGv t1
= tcg_temp_new();
2481 TCGv t2
= tcg_temp_new();
2482 TCGLabel
*l1
= gen_new_label();
2484 gen_load_gpr(t1
, rs
);
2485 tcg_gen_addi_tl(t0
, t1
, uimm
);
2487 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2488 tcg_gen_xori_tl(t2
, t0
, uimm
);
2489 tcg_gen_and_tl(t1
, t1
, t2
);
2491 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2493 /* operands of same sign, result different sign */
2494 generate_exception(ctx
, EXCP_OVERFLOW
);
2496 gen_store_gpr(t0
, rt
);
2502 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2504 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2511 /* Logic with immediate operand */
2512 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2513 int rt
, int rs
, int16_t imm
)
2518 /* If no destination, treat it as a NOP. */
2521 uimm
= (uint16_t)imm
;
2524 if (likely(rs
!= 0))
2525 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2527 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2531 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2533 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2536 if (likely(rs
!= 0))
2537 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2539 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2542 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2544 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2545 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2547 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2556 /* Set on less than with immediate operand */
2557 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2558 int rt
, int rs
, int16_t imm
)
2560 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2564 /* If no destination, treat it as a NOP. */
2567 t0
= tcg_temp_new();
2568 gen_load_gpr(t0
, rs
);
2571 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2574 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2580 /* Shifts with immediate operand */
2581 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2582 int rt
, int rs
, int16_t imm
)
2584 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2588 /* If no destination, treat it as a NOP. */
2592 t0
= tcg_temp_new();
2593 gen_load_gpr(t0
, rs
);
2596 tcg_gen_shli_tl(t0
, t0
, uimm
);
2597 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2600 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2604 tcg_gen_ext32u_tl(t0
, t0
);
2605 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2607 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2612 TCGv_i32 t1
= tcg_temp_new_i32();
2614 tcg_gen_trunc_tl_i32(t1
, t0
);
2615 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2616 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2617 tcg_temp_free_i32(t1
);
2619 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2622 #if defined(TARGET_MIPS64)
2624 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2627 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2630 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2634 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2636 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2640 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2643 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2646 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2649 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2657 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2658 int rd
, int rs
, int rt
)
2660 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2661 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2662 /* If no destination, treat it as a NOP.
2663 For add & sub, we must generate the overflow exception when needed. */
2670 TCGv t0
= tcg_temp_local_new();
2671 TCGv t1
= tcg_temp_new();
2672 TCGv t2
= tcg_temp_new();
2673 TCGLabel
*l1
= gen_new_label();
2675 gen_load_gpr(t1
, rs
);
2676 gen_load_gpr(t2
, rt
);
2677 tcg_gen_add_tl(t0
, t1
, t2
);
2678 tcg_gen_ext32s_tl(t0
, t0
);
2679 tcg_gen_xor_tl(t1
, t1
, t2
);
2680 tcg_gen_xor_tl(t2
, t0
, t2
);
2681 tcg_gen_andc_tl(t1
, t2
, t1
);
2683 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2685 /* operands of same sign, result different sign */
2686 generate_exception(ctx
, EXCP_OVERFLOW
);
2688 gen_store_gpr(t0
, rd
);
2693 if (rs
!= 0 && rt
!= 0) {
2694 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2695 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2696 } else if (rs
== 0 && rt
!= 0) {
2697 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2698 } else if (rs
!= 0 && rt
== 0) {
2699 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2701 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2706 TCGv t0
= tcg_temp_local_new();
2707 TCGv t1
= tcg_temp_new();
2708 TCGv t2
= tcg_temp_new();
2709 TCGLabel
*l1
= gen_new_label();
2711 gen_load_gpr(t1
, rs
);
2712 gen_load_gpr(t2
, rt
);
2713 tcg_gen_sub_tl(t0
, t1
, t2
);
2714 tcg_gen_ext32s_tl(t0
, t0
);
2715 tcg_gen_xor_tl(t2
, t1
, t2
);
2716 tcg_gen_xor_tl(t1
, t0
, t1
);
2717 tcg_gen_and_tl(t1
, t1
, t2
);
2719 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2721 /* operands of different sign, first operand and result different sign */
2722 generate_exception(ctx
, EXCP_OVERFLOW
);
2724 gen_store_gpr(t0
, rd
);
2729 if (rs
!= 0 && rt
!= 0) {
2730 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2731 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2732 } else if (rs
== 0 && rt
!= 0) {
2733 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2734 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2735 } else if (rs
!= 0 && rt
== 0) {
2736 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2738 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2741 #if defined(TARGET_MIPS64)
2744 TCGv t0
= tcg_temp_local_new();
2745 TCGv t1
= tcg_temp_new();
2746 TCGv t2
= tcg_temp_new();
2747 TCGLabel
*l1
= gen_new_label();
2749 gen_load_gpr(t1
, rs
);
2750 gen_load_gpr(t2
, rt
);
2751 tcg_gen_add_tl(t0
, t1
, t2
);
2752 tcg_gen_xor_tl(t1
, t1
, t2
);
2753 tcg_gen_xor_tl(t2
, t0
, t2
);
2754 tcg_gen_andc_tl(t1
, t2
, t1
);
2756 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2758 /* operands of same sign, result different sign */
2759 generate_exception(ctx
, EXCP_OVERFLOW
);
2761 gen_store_gpr(t0
, rd
);
2766 if (rs
!= 0 && rt
!= 0) {
2767 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2768 } else if (rs
== 0 && rt
!= 0) {
2769 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2770 } else if (rs
!= 0 && rt
== 0) {
2771 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2773 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2778 TCGv t0
= tcg_temp_local_new();
2779 TCGv t1
= tcg_temp_new();
2780 TCGv t2
= tcg_temp_new();
2781 TCGLabel
*l1
= gen_new_label();
2783 gen_load_gpr(t1
, rs
);
2784 gen_load_gpr(t2
, rt
);
2785 tcg_gen_sub_tl(t0
, t1
, t2
);
2786 tcg_gen_xor_tl(t2
, t1
, t2
);
2787 tcg_gen_xor_tl(t1
, t0
, t1
);
2788 tcg_gen_and_tl(t1
, t1
, t2
);
2790 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2792 /* operands of different sign, first operand and result different sign */
2793 generate_exception(ctx
, EXCP_OVERFLOW
);
2795 gen_store_gpr(t0
, rd
);
2800 if (rs
!= 0 && rt
!= 0) {
2801 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2802 } else if (rs
== 0 && rt
!= 0) {
2803 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2804 } else if (rs
!= 0 && rt
== 0) {
2805 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2807 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2812 if (likely(rs
!= 0 && rt
!= 0)) {
2813 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2816 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2822 /* Conditional move */
2823 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2824 int rd
, int rs
, int rt
)
2829 /* If no destination, treat it as a NOP. */
2833 t0
= tcg_temp_new();
2834 gen_load_gpr(t0
, rt
);
2835 t1
= tcg_const_tl(0);
2836 t2
= tcg_temp_new();
2837 gen_load_gpr(t2
, rs
);
2840 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2843 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2846 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2849 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2858 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2859 int rd
, int rs
, int rt
)
2862 /* If no destination, treat it as a NOP. */
2868 if (likely(rs
!= 0 && rt
!= 0)) {
2869 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2871 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2875 if (rs
!= 0 && rt
!= 0) {
2876 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2877 } else if (rs
== 0 && rt
!= 0) {
2878 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2879 } else if (rs
!= 0 && rt
== 0) {
2880 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2882 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2886 if (likely(rs
!= 0 && rt
!= 0)) {
2887 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2888 } else if (rs
== 0 && rt
!= 0) {
2889 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2890 } else if (rs
!= 0 && rt
== 0) {
2891 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2893 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2897 if (likely(rs
!= 0 && rt
!= 0)) {
2898 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2899 } else if (rs
== 0 && rt
!= 0) {
2900 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2901 } else if (rs
!= 0 && rt
== 0) {
2902 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2904 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2910 /* Set on lower than */
2911 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2912 int rd
, int rs
, int rt
)
2917 /* If no destination, treat it as a NOP. */
2921 t0
= tcg_temp_new();
2922 t1
= tcg_temp_new();
2923 gen_load_gpr(t0
, rs
);
2924 gen_load_gpr(t1
, rt
);
2927 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2930 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2938 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2939 int rd
, int rs
, int rt
)
2944 /* If no destination, treat it as a NOP.
2945 For add & sub, we must generate the overflow exception when needed. */
2949 t0
= tcg_temp_new();
2950 t1
= tcg_temp_new();
2951 gen_load_gpr(t0
, rs
);
2952 gen_load_gpr(t1
, rt
);
2955 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2956 tcg_gen_shl_tl(t0
, t1
, t0
);
2957 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2960 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2961 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2964 tcg_gen_ext32u_tl(t1
, t1
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_shr_tl(t0
, t1
, t0
);
2967 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2971 TCGv_i32 t2
= tcg_temp_new_i32();
2972 TCGv_i32 t3
= tcg_temp_new_i32();
2974 tcg_gen_trunc_tl_i32(t2
, t0
);
2975 tcg_gen_trunc_tl_i32(t3
, t1
);
2976 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2977 tcg_gen_rotr_i32(t2
, t3
, t2
);
2978 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2979 tcg_temp_free_i32(t2
);
2980 tcg_temp_free_i32(t3
);
2983 #if defined(TARGET_MIPS64)
2985 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2986 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2989 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2990 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2993 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2994 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2997 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2998 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3006 /* Arithmetic on HI/LO registers */
3007 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3009 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3020 #if defined(TARGET_MIPS64)
3022 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3026 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3030 #if defined(TARGET_MIPS64)
3032 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3036 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3041 #if defined(TARGET_MIPS64)
3043 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3047 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3050 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3055 #if defined(TARGET_MIPS64)
3057 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3061 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3064 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3070 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3073 TCGv t0
= tcg_const_tl(addr
);
3074 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3075 gen_store_gpr(t0
, reg
);
3079 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3085 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3088 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3089 addr
= addr_add(ctx
, pc
, offset
);
3090 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3094 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3095 addr
= addr_add(ctx
, pc
, offset
);
3096 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3098 #if defined(TARGET_MIPS64)
3101 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3102 addr
= addr_add(ctx
, pc
, offset
);
3103 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3107 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3110 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3111 addr
= addr_add(ctx
, pc
, offset
);
3112 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3117 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3118 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3119 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3122 #if defined(TARGET_MIPS64)
3123 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3124 case R6_OPC_LDPC
+ (1 << 16):
3125 case R6_OPC_LDPC
+ (2 << 16):
3126 case R6_OPC_LDPC
+ (3 << 16):
3128 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3129 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3130 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3134 MIPS_INVAL("OPC_PCREL");
3135 generate_exception_end(ctx
, EXCP_RI
);
3142 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3151 t0
= tcg_temp_new();
3152 t1
= tcg_temp_new();
3154 gen_load_gpr(t0
, rs
);
3155 gen_load_gpr(t1
, rt
);
3160 TCGv t2
= tcg_temp_new();
3161 TCGv t3
= tcg_temp_new();
3162 tcg_gen_ext32s_tl(t0
, t0
);
3163 tcg_gen_ext32s_tl(t1
, t1
);
3164 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3165 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3166 tcg_gen_and_tl(t2
, t2
, t3
);
3167 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3168 tcg_gen_or_tl(t2
, t2
, t3
);
3169 tcg_gen_movi_tl(t3
, 0);
3170 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3171 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3172 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3179 TCGv t2
= tcg_temp_new();
3180 TCGv t3
= tcg_temp_new();
3181 tcg_gen_ext32s_tl(t0
, t0
);
3182 tcg_gen_ext32s_tl(t1
, t1
);
3183 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3184 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3185 tcg_gen_and_tl(t2
, t2
, t3
);
3186 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3187 tcg_gen_or_tl(t2
, t2
, t3
);
3188 tcg_gen_movi_tl(t3
, 0);
3189 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3190 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3191 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3198 TCGv t2
= tcg_const_tl(0);
3199 TCGv t3
= tcg_const_tl(1);
3200 tcg_gen_ext32u_tl(t0
, t0
);
3201 tcg_gen_ext32u_tl(t1
, t1
);
3202 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3203 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3204 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3211 TCGv t2
= tcg_const_tl(0);
3212 TCGv t3
= tcg_const_tl(1);
3213 tcg_gen_ext32u_tl(t0
, t0
);
3214 tcg_gen_ext32u_tl(t1
, t1
);
3215 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3216 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3217 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3224 TCGv_i32 t2
= tcg_temp_new_i32();
3225 TCGv_i32 t3
= tcg_temp_new_i32();
3226 tcg_gen_trunc_tl_i32(t2
, t0
);
3227 tcg_gen_trunc_tl_i32(t3
, t1
);
3228 tcg_gen_mul_i32(t2
, t2
, t3
);
3229 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3230 tcg_temp_free_i32(t2
);
3231 tcg_temp_free_i32(t3
);
3236 TCGv_i32 t2
= tcg_temp_new_i32();
3237 TCGv_i32 t3
= tcg_temp_new_i32();
3238 tcg_gen_trunc_tl_i32(t2
, t0
);
3239 tcg_gen_trunc_tl_i32(t3
, t1
);
3240 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3241 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3242 tcg_temp_free_i32(t2
);
3243 tcg_temp_free_i32(t3
);
3248 TCGv_i32 t2
= tcg_temp_new_i32();
3249 TCGv_i32 t3
= tcg_temp_new_i32();
3250 tcg_gen_trunc_tl_i32(t2
, t0
);
3251 tcg_gen_trunc_tl_i32(t3
, t1
);
3252 tcg_gen_mul_i32(t2
, t2
, t3
);
3253 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3254 tcg_temp_free_i32(t2
);
3255 tcg_temp_free_i32(t3
);
3260 TCGv_i32 t2
= tcg_temp_new_i32();
3261 TCGv_i32 t3
= tcg_temp_new_i32();
3262 tcg_gen_trunc_tl_i32(t2
, t0
);
3263 tcg_gen_trunc_tl_i32(t3
, t1
);
3264 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3265 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3266 tcg_temp_free_i32(t2
);
3267 tcg_temp_free_i32(t3
);
3270 #if defined(TARGET_MIPS64)
3273 TCGv t2
= tcg_temp_new();
3274 TCGv t3
= tcg_temp_new();
3275 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3276 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3277 tcg_gen_and_tl(t2
, t2
, t3
);
3278 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3279 tcg_gen_or_tl(t2
, t2
, t3
);
3280 tcg_gen_movi_tl(t3
, 0);
3281 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3282 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3289 TCGv t2
= tcg_temp_new();
3290 TCGv t3
= tcg_temp_new();
3291 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3292 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3293 tcg_gen_and_tl(t2
, t2
, t3
);
3294 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3295 tcg_gen_or_tl(t2
, t2
, t3
);
3296 tcg_gen_movi_tl(t3
, 0);
3297 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3298 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3305 TCGv t2
= tcg_const_tl(0);
3306 TCGv t3
= tcg_const_tl(1);
3307 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3308 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3315 TCGv t2
= tcg_const_tl(0);
3316 TCGv t3
= tcg_const_tl(1);
3317 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3318 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3324 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3328 TCGv t2
= tcg_temp_new();
3329 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3334 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3338 TCGv t2
= tcg_temp_new();
3339 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3345 MIPS_INVAL("r6 mul/div");
3346 generate_exception_end(ctx
, EXCP_RI
);
3354 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3355 int acc
, int rs
, int rt
)
3359 t0
= tcg_temp_new();
3360 t1
= tcg_temp_new();
3362 gen_load_gpr(t0
, rs
);
3363 gen_load_gpr(t1
, rt
);
3372 TCGv t2
= tcg_temp_new();
3373 TCGv t3
= tcg_temp_new();
3374 tcg_gen_ext32s_tl(t0
, t0
);
3375 tcg_gen_ext32s_tl(t1
, t1
);
3376 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3377 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3378 tcg_gen_and_tl(t2
, t2
, t3
);
3379 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3380 tcg_gen_or_tl(t2
, t2
, t3
);
3381 tcg_gen_movi_tl(t3
, 0);
3382 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3383 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3384 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3385 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3386 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3393 TCGv t2
= tcg_const_tl(0);
3394 TCGv t3
= tcg_const_tl(1);
3395 tcg_gen_ext32u_tl(t0
, t0
);
3396 tcg_gen_ext32u_tl(t1
, t1
);
3397 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3398 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3399 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3400 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3401 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3408 TCGv_i32 t2
= tcg_temp_new_i32();
3409 TCGv_i32 t3
= tcg_temp_new_i32();
3410 tcg_gen_trunc_tl_i32(t2
, t0
);
3411 tcg_gen_trunc_tl_i32(t3
, t1
);
3412 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3413 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3414 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3415 tcg_temp_free_i32(t2
);
3416 tcg_temp_free_i32(t3
);
3421 TCGv_i32 t2
= tcg_temp_new_i32();
3422 TCGv_i32 t3
= tcg_temp_new_i32();
3423 tcg_gen_trunc_tl_i32(t2
, t0
);
3424 tcg_gen_trunc_tl_i32(t3
, t1
);
3425 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3426 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3427 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3428 tcg_temp_free_i32(t2
);
3429 tcg_temp_free_i32(t3
);
3432 #if defined(TARGET_MIPS64)
3435 TCGv t2
= tcg_temp_new();
3436 TCGv t3
= tcg_temp_new();
3437 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3438 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3439 tcg_gen_and_tl(t2
, t2
, t3
);
3440 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3441 tcg_gen_or_tl(t2
, t2
, t3
);
3442 tcg_gen_movi_tl(t3
, 0);
3443 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3444 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3445 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3452 TCGv t2
= tcg_const_tl(0);
3453 TCGv t3
= tcg_const_tl(1);
3454 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3455 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3456 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3462 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3465 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3470 TCGv_i64 t2
= tcg_temp_new_i64();
3471 TCGv_i64 t3
= tcg_temp_new_i64();
3473 tcg_gen_ext_tl_i64(t2
, t0
);
3474 tcg_gen_ext_tl_i64(t3
, t1
);
3475 tcg_gen_mul_i64(t2
, t2
, t3
);
3476 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3477 tcg_gen_add_i64(t2
, t2
, t3
);
3478 tcg_temp_free_i64(t3
);
3479 gen_move_low32(cpu_LO
[acc
], t2
);
3480 gen_move_high32(cpu_HI
[acc
], t2
);
3481 tcg_temp_free_i64(t2
);
3486 TCGv_i64 t2
= tcg_temp_new_i64();
3487 TCGv_i64 t3
= tcg_temp_new_i64();
3489 tcg_gen_ext32u_tl(t0
, t0
);
3490 tcg_gen_ext32u_tl(t1
, t1
);
3491 tcg_gen_extu_tl_i64(t2
, t0
);
3492 tcg_gen_extu_tl_i64(t3
, t1
);
3493 tcg_gen_mul_i64(t2
, t2
, t3
);
3494 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3495 tcg_gen_add_i64(t2
, t2
, t3
);
3496 tcg_temp_free_i64(t3
);
3497 gen_move_low32(cpu_LO
[acc
], t2
);
3498 gen_move_high32(cpu_HI
[acc
], t2
);
3499 tcg_temp_free_i64(t2
);
3504 TCGv_i64 t2
= tcg_temp_new_i64();
3505 TCGv_i64 t3
= tcg_temp_new_i64();
3507 tcg_gen_ext_tl_i64(t2
, t0
);
3508 tcg_gen_ext_tl_i64(t3
, t1
);
3509 tcg_gen_mul_i64(t2
, t2
, t3
);
3510 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3511 tcg_gen_sub_i64(t2
, t3
, t2
);
3512 tcg_temp_free_i64(t3
);
3513 gen_move_low32(cpu_LO
[acc
], t2
);
3514 gen_move_high32(cpu_HI
[acc
], t2
);
3515 tcg_temp_free_i64(t2
);
3520 TCGv_i64 t2
= tcg_temp_new_i64();
3521 TCGv_i64 t3
= tcg_temp_new_i64();
3523 tcg_gen_ext32u_tl(t0
, t0
);
3524 tcg_gen_ext32u_tl(t1
, t1
);
3525 tcg_gen_extu_tl_i64(t2
, t0
);
3526 tcg_gen_extu_tl_i64(t3
, t1
);
3527 tcg_gen_mul_i64(t2
, t2
, t3
);
3528 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3529 tcg_gen_sub_i64(t2
, t3
, t2
);
3530 tcg_temp_free_i64(t3
);
3531 gen_move_low32(cpu_LO
[acc
], t2
);
3532 gen_move_high32(cpu_HI
[acc
], t2
);
3533 tcg_temp_free_i64(t2
);
3537 MIPS_INVAL("mul/div");
3538 generate_exception_end(ctx
, EXCP_RI
);
3546 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3547 int rd
, int rs
, int rt
)
3549 TCGv t0
= tcg_temp_new();
3550 TCGv t1
= tcg_temp_new();
3552 gen_load_gpr(t0
, rs
);
3553 gen_load_gpr(t1
, rt
);
3556 case OPC_VR54XX_MULS
:
3557 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3559 case OPC_VR54XX_MULSU
:
3560 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3562 case OPC_VR54XX_MACC
:
3563 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3565 case OPC_VR54XX_MACCU
:
3566 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3568 case OPC_VR54XX_MSAC
:
3569 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3571 case OPC_VR54XX_MSACU
:
3572 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3574 case OPC_VR54XX_MULHI
:
3575 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3577 case OPC_VR54XX_MULHIU
:
3578 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3580 case OPC_VR54XX_MULSHI
:
3581 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3583 case OPC_VR54XX_MULSHIU
:
3584 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3586 case OPC_VR54XX_MACCHI
:
3587 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3589 case OPC_VR54XX_MACCHIU
:
3590 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3592 case OPC_VR54XX_MSACHI
:
3593 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3595 case OPC_VR54XX_MSACHIU
:
3596 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3599 MIPS_INVAL("mul vr54xx");
3600 generate_exception_end(ctx
, EXCP_RI
);
3603 gen_store_gpr(t0
, rd
);
3610 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3619 t0
= tcg_temp_new();
3620 gen_load_gpr(t0
, rs
);
3624 gen_helper_clo(cpu_gpr
[rd
], t0
);
3628 gen_helper_clz(cpu_gpr
[rd
], t0
);
3630 #if defined(TARGET_MIPS64)
3633 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3637 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3644 /* Godson integer instructions */
3645 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3646 int rd
, int rs
, int rt
)
3658 case OPC_MULTU_G_2E
:
3659 case OPC_MULTU_G_2F
:
3660 #if defined(TARGET_MIPS64)
3661 case OPC_DMULT_G_2E
:
3662 case OPC_DMULT_G_2F
:
3663 case OPC_DMULTU_G_2E
:
3664 case OPC_DMULTU_G_2F
:
3666 t0
= tcg_temp_new();
3667 t1
= tcg_temp_new();
3670 t0
= tcg_temp_local_new();
3671 t1
= tcg_temp_local_new();
3675 gen_load_gpr(t0
, rs
);
3676 gen_load_gpr(t1
, rt
);
3681 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3682 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3684 case OPC_MULTU_G_2E
:
3685 case OPC_MULTU_G_2F
:
3686 tcg_gen_ext32u_tl(t0
, t0
);
3687 tcg_gen_ext32u_tl(t1
, t1
);
3688 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3689 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3694 TCGLabel
*l1
= gen_new_label();
3695 TCGLabel
*l2
= gen_new_label();
3696 TCGLabel
*l3
= gen_new_label();
3697 tcg_gen_ext32s_tl(t0
, t0
);
3698 tcg_gen_ext32s_tl(t1
, t1
);
3699 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3700 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3703 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3704 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3705 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3708 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3709 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3716 TCGLabel
*l1
= gen_new_label();
3717 TCGLabel
*l2
= gen_new_label();
3718 tcg_gen_ext32u_tl(t0
, t0
);
3719 tcg_gen_ext32u_tl(t1
, t1
);
3720 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3721 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3724 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3725 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3732 TCGLabel
*l1
= gen_new_label();
3733 TCGLabel
*l2
= gen_new_label();
3734 TCGLabel
*l3
= gen_new_label();
3735 tcg_gen_ext32u_tl(t0
, t0
);
3736 tcg_gen_ext32u_tl(t1
, t1
);
3737 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3738 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3739 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3741 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3744 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3745 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3752 TCGLabel
*l1
= gen_new_label();
3753 TCGLabel
*l2
= gen_new_label();
3754 tcg_gen_ext32u_tl(t0
, t0
);
3755 tcg_gen_ext32u_tl(t1
, t1
);
3756 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3757 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3760 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3761 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3765 #if defined(TARGET_MIPS64)
3766 case OPC_DMULT_G_2E
:
3767 case OPC_DMULT_G_2F
:
3768 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3770 case OPC_DMULTU_G_2E
:
3771 case OPC_DMULTU_G_2F
:
3772 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3777 TCGLabel
*l1
= gen_new_label();
3778 TCGLabel
*l2
= gen_new_label();
3779 TCGLabel
*l3
= gen_new_label();
3780 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3781 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3784 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3785 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3786 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3789 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3793 case OPC_DDIVU_G_2E
:
3794 case OPC_DDIVU_G_2F
:
3796 TCGLabel
*l1
= gen_new_label();
3797 TCGLabel
*l2
= gen_new_label();
3798 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3799 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3802 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3809 TCGLabel
*l1
= gen_new_label();
3810 TCGLabel
*l2
= gen_new_label();
3811 TCGLabel
*l3
= gen_new_label();
3812 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3813 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3814 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3816 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3819 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3823 case OPC_DMODU_G_2E
:
3824 case OPC_DMODU_G_2F
:
3826 TCGLabel
*l1
= gen_new_label();
3827 TCGLabel
*l2
= gen_new_label();
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3829 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3832 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3843 /* Loongson multimedia instructions */
3844 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3846 uint32_t opc
, shift_max
;
3849 opc
= MASK_LMI(ctx
->opcode
);
3855 t0
= tcg_temp_local_new_i64();
3856 t1
= tcg_temp_local_new_i64();
3859 t0
= tcg_temp_new_i64();
3860 t1
= tcg_temp_new_i64();
3864 gen_load_fpr64(ctx
, t0
, rs
);
3865 gen_load_fpr64(ctx
, t1
, rt
);
3867 #define LMI_HELPER(UP, LO) \
3868 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3869 #define LMI_HELPER_1(UP, LO) \
3870 case OPC_##UP: gen_helper_##LO(t0, t0); break
3871 #define LMI_DIRECT(UP, LO, OP) \
3872 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3875 LMI_HELPER(PADDSH
, paddsh
);
3876 LMI_HELPER(PADDUSH
, paddush
);
3877 LMI_HELPER(PADDH
, paddh
);
3878 LMI_HELPER(PADDW
, paddw
);
3879 LMI_HELPER(PADDSB
, paddsb
);
3880 LMI_HELPER(PADDUSB
, paddusb
);
3881 LMI_HELPER(PADDB
, paddb
);
3883 LMI_HELPER(PSUBSH
, psubsh
);
3884 LMI_HELPER(PSUBUSH
, psubush
);
3885 LMI_HELPER(PSUBH
, psubh
);
3886 LMI_HELPER(PSUBW
, psubw
);
3887 LMI_HELPER(PSUBSB
, psubsb
);
3888 LMI_HELPER(PSUBUSB
, psubusb
);
3889 LMI_HELPER(PSUBB
, psubb
);
3891 LMI_HELPER(PSHUFH
, pshufh
);
3892 LMI_HELPER(PACKSSWH
, packsswh
);
3893 LMI_HELPER(PACKSSHB
, packsshb
);
3894 LMI_HELPER(PACKUSHB
, packushb
);
3896 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3897 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3898 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3899 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3900 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3901 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3903 LMI_HELPER(PAVGH
, pavgh
);
3904 LMI_HELPER(PAVGB
, pavgb
);
3905 LMI_HELPER(PMAXSH
, pmaxsh
);
3906 LMI_HELPER(PMINSH
, pminsh
);
3907 LMI_HELPER(PMAXUB
, pmaxub
);
3908 LMI_HELPER(PMINUB
, pminub
);
3910 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3911 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3912 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3913 LMI_HELPER(PCMPGTH
, pcmpgth
);
3914 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3915 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3917 LMI_HELPER(PSLLW
, psllw
);
3918 LMI_HELPER(PSLLH
, psllh
);
3919 LMI_HELPER(PSRLW
, psrlw
);
3920 LMI_HELPER(PSRLH
, psrlh
);
3921 LMI_HELPER(PSRAW
, psraw
);
3922 LMI_HELPER(PSRAH
, psrah
);
3924 LMI_HELPER(PMULLH
, pmullh
);
3925 LMI_HELPER(PMULHH
, pmulhh
);
3926 LMI_HELPER(PMULHUH
, pmulhuh
);
3927 LMI_HELPER(PMADDHW
, pmaddhw
);
3929 LMI_HELPER(PASUBUB
, pasubub
);
3930 LMI_HELPER_1(BIADD
, biadd
);
3931 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3933 LMI_DIRECT(PADDD
, paddd
, add
);
3934 LMI_DIRECT(PSUBD
, psubd
, sub
);
3935 LMI_DIRECT(XOR_CP2
, xor, xor);
3936 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3937 LMI_DIRECT(AND_CP2
, and, and);
3938 LMI_DIRECT(PANDN
, pandn
, andc
);
3939 LMI_DIRECT(OR
, or, or);
3942 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3945 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3948 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3951 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3955 tcg_gen_andi_i64(t1
, t1
, 3);
3956 tcg_gen_shli_i64(t1
, t1
, 4);
3957 tcg_gen_shr_i64(t0
, t0
, t1
);
3958 tcg_gen_ext16u_i64(t0
, t0
);
3962 tcg_gen_add_i64(t0
, t0
, t1
);
3963 tcg_gen_ext32s_i64(t0
, t0
);
3966 tcg_gen_sub_i64(t0
, t0
, t1
);
3967 tcg_gen_ext32s_i64(t0
, t0
);
3989 /* Make sure shift count isn't TCG undefined behaviour. */
3990 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3995 tcg_gen_shl_i64(t0
, t0
, t1
);
3999 /* Since SRA is UndefinedResult without sign-extended inputs,
4000 we can treat SRA and DSRA the same. */
4001 tcg_gen_sar_i64(t0
, t0
, t1
);
4004 /* We want to shift in zeros for SRL; zero-extend first. */
4005 tcg_gen_ext32u_i64(t0
, t0
);
4008 tcg_gen_shr_i64(t0
, t0
, t1
);
4012 if (shift_max
== 32) {
4013 tcg_gen_ext32s_i64(t0
, t0
);
4016 /* Shifts larger than MAX produce zero. */
4017 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4018 tcg_gen_neg_i64(t1
, t1
);
4019 tcg_gen_and_i64(t0
, t0
, t1
);
4025 TCGv_i64 t2
= tcg_temp_new_i64();
4026 TCGLabel
*lab
= gen_new_label();
4028 tcg_gen_mov_i64(t2
, t0
);
4029 tcg_gen_add_i64(t0
, t1
, t2
);
4030 if (opc
== OPC_ADD_CP2
) {
4031 tcg_gen_ext32s_i64(t0
, t0
);
4033 tcg_gen_xor_i64(t1
, t1
, t2
);
4034 tcg_gen_xor_i64(t2
, t2
, t0
);
4035 tcg_gen_andc_i64(t1
, t2
, t1
);
4036 tcg_temp_free_i64(t2
);
4037 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4038 generate_exception(ctx
, EXCP_OVERFLOW
);
4046 TCGv_i64 t2
= tcg_temp_new_i64();
4047 TCGLabel
*lab
= gen_new_label();
4049 tcg_gen_mov_i64(t2
, t0
);
4050 tcg_gen_sub_i64(t0
, t1
, t2
);
4051 if (opc
== OPC_SUB_CP2
) {
4052 tcg_gen_ext32s_i64(t0
, t0
);
4054 tcg_gen_xor_i64(t1
, t1
, t2
);
4055 tcg_gen_xor_i64(t2
, t2
, t0
);
4056 tcg_gen_and_i64(t1
, t1
, t2
);
4057 tcg_temp_free_i64(t2
);
4058 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4059 generate_exception(ctx
, EXCP_OVERFLOW
);
4065 tcg_gen_ext32u_i64(t0
, t0
);
4066 tcg_gen_ext32u_i64(t1
, t1
);
4067 tcg_gen_mul_i64(t0
, t0
, t1
);
4076 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4077 FD field is the CC field? */
4079 MIPS_INVAL("loongson_cp2");
4080 generate_exception_end(ctx
, EXCP_RI
);
4087 gen_store_fpr64(ctx
, t0
, rd
);
4089 tcg_temp_free_i64(t0
);
4090 tcg_temp_free_i64(t1
);
4094 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4095 int rs
, int rt
, int16_t imm
)
4098 TCGv t0
= tcg_temp_new();
4099 TCGv t1
= tcg_temp_new();
4102 /* Load needed operands */
4110 /* Compare two registers */
4112 gen_load_gpr(t0
, rs
);
4113 gen_load_gpr(t1
, rt
);
4123 /* Compare register to immediate */
4124 if (rs
!= 0 || imm
!= 0) {
4125 gen_load_gpr(t0
, rs
);
4126 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4133 case OPC_TEQ
: /* rs == rs */
4134 case OPC_TEQI
: /* r0 == 0 */
4135 case OPC_TGE
: /* rs >= rs */
4136 case OPC_TGEI
: /* r0 >= 0 */
4137 case OPC_TGEU
: /* rs >= rs unsigned */
4138 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4140 generate_exception_end(ctx
, EXCP_TRAP
);
4142 case OPC_TLT
: /* rs < rs */
4143 case OPC_TLTI
: /* r0 < 0 */
4144 case OPC_TLTU
: /* rs < rs unsigned */
4145 case OPC_TLTIU
: /* r0 < 0 unsigned */
4146 case OPC_TNE
: /* rs != rs */
4147 case OPC_TNEI
: /* r0 != 0 */
4148 /* Never trap: treat as NOP. */
4152 TCGLabel
*l1
= gen_new_label();
4157 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4161 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4165 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4169 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4173 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4177 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4180 generate_exception(ctx
, EXCP_TRAP
);
4187 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4189 TranslationBlock
*tb
;
4191 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4192 likely(!ctx
->singlestep_enabled
)) {
4195 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4198 if (ctx
->singlestep_enabled
) {
4199 save_cpu_state(ctx
, 0);
4200 gen_helper_raise_exception_debug(cpu_env
);
4206 /* Branches (before delay slot) */
4207 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4209 int rs
, int rt
, int32_t offset
,
4212 target_ulong btgt
= -1;
4214 int bcond_compute
= 0;
4215 TCGv t0
= tcg_temp_new();
4216 TCGv t1
= tcg_temp_new();
4218 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4219 #ifdef MIPS_DEBUG_DISAS
4220 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4221 TARGET_FMT_lx
"\n", ctx
->pc
);
4223 generate_exception_end(ctx
, EXCP_RI
);
4227 /* Load needed operands */
4233 /* Compare two registers */
4235 gen_load_gpr(t0
, rs
);
4236 gen_load_gpr(t1
, rt
);
4239 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4253 /* Compare to zero */
4255 gen_load_gpr(t0
, rs
);
4258 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4261 #if defined(TARGET_MIPS64)
4263 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4265 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4268 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4273 /* Jump to immediate */
4274 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4278 /* Jump to register */
4279 if (offset
!= 0 && offset
!= 16) {
4280 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4281 others are reserved. */
4282 MIPS_INVAL("jump hint");
4283 generate_exception_end(ctx
, EXCP_RI
);
4286 gen_load_gpr(btarget
, rs
);
4289 MIPS_INVAL("branch/jump");
4290 generate_exception_end(ctx
, EXCP_RI
);
4293 if (bcond_compute
== 0) {
4294 /* No condition to be computed */
4296 case OPC_BEQ
: /* rx == rx */
4297 case OPC_BEQL
: /* rx == rx likely */
4298 case OPC_BGEZ
: /* 0 >= 0 */
4299 case OPC_BGEZL
: /* 0 >= 0 likely */
4300 case OPC_BLEZ
: /* 0 <= 0 */
4301 case OPC_BLEZL
: /* 0 <= 0 likely */
4303 ctx
->hflags
|= MIPS_HFLAG_B
;
4305 case OPC_BGEZAL
: /* 0 >= 0 */
4306 case OPC_BGEZALL
: /* 0 >= 0 likely */
4307 /* Always take and link */
4309 ctx
->hflags
|= MIPS_HFLAG_B
;
4311 case OPC_BNE
: /* rx != rx */
4312 case OPC_BGTZ
: /* 0 > 0 */
4313 case OPC_BLTZ
: /* 0 < 0 */
4316 case OPC_BLTZAL
: /* 0 < 0 */
4317 /* Handle as an unconditional branch to get correct delay
4320 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4321 ctx
->hflags
|= MIPS_HFLAG_B
;
4323 case OPC_BLTZALL
: /* 0 < 0 likely */
4324 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4325 /* Skip the instruction in the delay slot */
4328 case OPC_BNEL
: /* rx != rx likely */
4329 case OPC_BGTZL
: /* 0 > 0 likely */
4330 case OPC_BLTZL
: /* 0 < 0 likely */
4331 /* Skip the instruction in the delay slot */
4335 ctx
->hflags
|= MIPS_HFLAG_B
;
4338 ctx
->hflags
|= MIPS_HFLAG_BX
;
4342 ctx
->hflags
|= MIPS_HFLAG_B
;
4345 ctx
->hflags
|= MIPS_HFLAG_BR
;
4349 ctx
->hflags
|= MIPS_HFLAG_BR
;
4352 MIPS_INVAL("branch/jump");
4353 generate_exception_end(ctx
, EXCP_RI
);
4359 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4362 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4365 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4368 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4371 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4374 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4377 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4381 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4385 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4388 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4391 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4397 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4400 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4403 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4405 #if defined(TARGET_MIPS64)
4407 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4411 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4414 ctx
->hflags
|= MIPS_HFLAG_BC
;
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 ctx
->hflags
|= MIPS_HFLAG_BL
;
4423 MIPS_INVAL("conditional branch/jump");
4424 generate_exception_end(ctx
, EXCP_RI
);
4429 ctx
->btarget
= btgt
;
4431 switch (delayslot_size
) {
4433 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4436 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4441 int post_delay
= insn_bytes
+ delayslot_size
;
4442 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4444 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4448 if (insn_bytes
== 2)
4449 ctx
->hflags
|= MIPS_HFLAG_B16
;
4454 /* special3 bitfield operations */
4455 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4456 int rs
, int lsb
, int msb
)
4458 TCGv t0
= tcg_temp_new();
4459 TCGv t1
= tcg_temp_new();
4461 gen_load_gpr(t1
, rs
);
4464 if (lsb
+ msb
> 31) {
4467 tcg_gen_shri_tl(t0
, t1
, lsb
);
4469 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4471 tcg_gen_ext32s_tl(t0
, t0
);
4474 #if defined(TARGET_MIPS64)
4483 if (lsb
+ msb
> 63) {
4486 tcg_gen_shri_tl(t0
, t1
, lsb
);
4488 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4496 gen_load_gpr(t0
, rt
);
4497 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4498 tcg_gen_ext32s_tl(t0
, t0
);
4500 #if defined(TARGET_MIPS64)
4511 gen_load_gpr(t0
, rt
);
4512 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4517 MIPS_INVAL("bitops");
4518 generate_exception_end(ctx
, EXCP_RI
);
4523 gen_store_gpr(t0
, rt
);
4528 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4533 /* If no destination, treat it as a NOP. */
4537 t0
= tcg_temp_new();
4538 gen_load_gpr(t0
, rt
);
4542 TCGv t1
= tcg_temp_new();
4544 tcg_gen_shri_tl(t1
, t0
, 8);
4545 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4546 tcg_gen_shli_tl(t0
, t0
, 8);
4547 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4548 tcg_gen_or_tl(t0
, t0
, t1
);
4550 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4554 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4557 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4559 #if defined(TARGET_MIPS64)
4562 TCGv t1
= tcg_temp_new();
4564 tcg_gen_shri_tl(t1
, t0
, 8);
4565 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4566 tcg_gen_shli_tl(t0
, t0
, 8);
4567 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4568 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4574 TCGv t1
= tcg_temp_new();
4576 tcg_gen_shri_tl(t1
, t0
, 16);
4577 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4578 tcg_gen_shli_tl(t0
, t0
, 16);
4579 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4580 tcg_gen_or_tl(t0
, t0
, t1
);
4581 tcg_gen_shri_tl(t1
, t0
, 32);
4582 tcg_gen_shli_tl(t0
, t0
, 32);
4583 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4589 MIPS_INVAL("bsfhl");
4590 generate_exception_end(ctx
, EXCP_RI
);
4597 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4606 t0
= tcg_temp_new();
4607 t1
= tcg_temp_new();
4608 gen_load_gpr(t0
, rs
);
4609 gen_load_gpr(t1
, rt
);
4610 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4611 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4612 if (opc
== OPC_LSA
) {
4613 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4622 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4630 t0
= tcg_temp_new();
4631 gen_load_gpr(t0
, rt
);
4633 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4635 TCGv t1
= tcg_temp_new();
4636 gen_load_gpr(t1
, rs
);
4640 TCGv_i64 t2
= tcg_temp_new_i64();
4641 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4642 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4643 gen_move_low32(cpu_gpr
[rd
], t2
);
4644 tcg_temp_free_i64(t2
);
4647 #if defined(TARGET_MIPS64)
4649 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4650 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4651 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4661 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4668 t0
= tcg_temp_new();
4669 gen_load_gpr(t0
, rt
);
4672 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4674 #if defined(TARGET_MIPS64)
4676 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4683 #ifndef CONFIG_USER_ONLY
4684 /* CP0 (MMU and control) */
4685 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4687 TCGv_i64 t0
= tcg_temp_new_i64();
4688 TCGv_i64 t1
= tcg_temp_new_i64();
4690 tcg_gen_ext_tl_i64(t0
, arg
);
4691 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4692 #if defined(TARGET_MIPS64)
4693 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4695 tcg_gen_concat32_i64(t1
, t1
, t0
);
4697 tcg_gen_st_i64(t1
, cpu_env
, off
);
4698 tcg_temp_free_i64(t1
);
4699 tcg_temp_free_i64(t0
);
4702 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4704 TCGv_i64 t0
= tcg_temp_new_i64();
4705 TCGv_i64 t1
= tcg_temp_new_i64();
4707 tcg_gen_ext_tl_i64(t0
, arg
);
4708 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4709 tcg_gen_concat32_i64(t1
, t1
, t0
);
4710 tcg_gen_st_i64(t1
, cpu_env
, off
);
4711 tcg_temp_free_i64(t1
);
4712 tcg_temp_free_i64(t0
);
4715 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4717 TCGv_i64 t0
= tcg_temp_new_i64();
4719 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4720 #if defined(TARGET_MIPS64)
4721 tcg_gen_shri_i64(t0
, t0
, 30);
4723 tcg_gen_shri_i64(t0
, t0
, 32);
4725 gen_move_low32(arg
, t0
);
4726 tcg_temp_free_i64(t0
);
4729 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4731 TCGv_i64 t0
= tcg_temp_new_i64();
4733 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4734 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4735 gen_move_low32(arg
, t0
);
4736 tcg_temp_free_i64(t0
);
4739 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4741 TCGv_i32 t0
= tcg_temp_new_i32();
4743 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4744 tcg_gen_ext_i32_tl(arg
, t0
);
4745 tcg_temp_free_i32(t0
);
4748 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4750 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4751 tcg_gen_ext32s_tl(arg
, arg
);
4754 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4756 TCGv_i32 t0
= tcg_temp_new_i32();
4758 tcg_gen_trunc_tl_i32(t0
, arg
);
4759 tcg_gen_st_i32(t0
, cpu_env
, off
);
4760 tcg_temp_free_i32(t0
);
4763 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4765 const char *rn
= "invalid";
4767 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4768 goto mfhc0_read_zero
;
4775 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4779 goto mfhc0_read_zero
;
4785 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4789 goto mfhc0_read_zero
;
4795 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4796 ctx
->CP0_LLAddr_shift
);
4800 goto mfhc0_read_zero
;
4809 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4813 goto mfhc0_read_zero
;
4817 goto mfhc0_read_zero
;
4820 (void)rn
; /* avoid a compiler warning */
4821 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4825 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4826 tcg_gen_movi_tl(arg
, 0);
4829 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4831 const char *rn
= "invalid";
4832 uint64_t mask
= ctx
->PAMask
>> 36;
4834 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4842 tcg_gen_andi_tl(arg
, arg
, mask
);
4843 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4853 tcg_gen_andi_tl(arg
, arg
, mask
);
4854 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4864 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4865 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4866 relevant for modern MIPS cores supporting MTHC0, therefore
4867 treating MTHC0 to LLAddr as NOP. */
4880 tcg_gen_andi_tl(arg
, arg
, mask
);
4881 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4892 (void)rn
; /* avoid a compiler warning */
4894 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4897 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4899 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4900 tcg_gen_movi_tl(arg
, 0);
4902 tcg_gen_movi_tl(arg
, ~0);
4906 #define CP0_CHECK(c) \
4909 goto cp0_unimplemented; \
4913 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4915 const char *rn
= "invalid";
4918 check_insn(ctx
, ISA_MIPS32
);
4924 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4928 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4929 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4933 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4934 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4938 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4939 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4943 goto cp0_unimplemented
;
4949 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4950 gen_helper_mfc0_random(arg
, cpu_env
);
4954 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4955 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4959 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4960 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4964 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4969 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4970 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4975 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4981 rn
= "VPEScheFBack";
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4989 goto cp0_unimplemented
;
4996 TCGv_i64 tmp
= tcg_temp_new_i64();
4997 tcg_gen_ld_i64(tmp
, cpu_env
,
4998 offsetof(CPUMIPSState
, CP0_EntryLo0
));
4999 #if defined(TARGET_MIPS64)
5001 /* Move RI/XI fields to bits 31:30 */
5002 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5003 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5006 gen_move_low32(arg
, tmp
);
5007 tcg_temp_free_i64(tmp
);
5012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5013 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5018 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5023 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5027 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5028 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5033 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5038 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5043 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5047 goto cp0_unimplemented
;
5054 TCGv_i64 tmp
= tcg_temp_new_i64();
5055 tcg_gen_ld_i64(tmp
, cpu_env
,
5056 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5057 #if defined(TARGET_MIPS64)
5059 /* Move RI/XI fields to bits 31:30 */
5060 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5061 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5064 gen_move_low32(arg
, tmp
);
5065 tcg_temp_free_i64(tmp
);
5070 goto cp0_unimplemented
;
5076 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5077 tcg_gen_ext32s_tl(arg
, arg
);
5081 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5082 rn
= "ContextConfig";
5083 goto cp0_unimplemented
;
5086 CP0_CHECK(ctx
->ulri
);
5087 tcg_gen_ld32s_tl(arg
, cpu_env
,
5088 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5092 goto cp0_unimplemented
;
5098 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5102 check_insn(ctx
, ISA_MIPS32R2
);
5103 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5107 goto cp0_unimplemented
;
5113 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5117 check_insn(ctx
, ISA_MIPS32R2
);
5118 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5122 check_insn(ctx
, ISA_MIPS32R2
);
5123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5127 check_insn(ctx
, ISA_MIPS32R2
);
5128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5132 check_insn(ctx
, ISA_MIPS32R2
);
5133 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5137 check_insn(ctx
, ISA_MIPS32R2
);
5138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5142 goto cp0_unimplemented
;
5148 check_insn(ctx
, ISA_MIPS32R2
);
5149 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5153 goto cp0_unimplemented
;
5159 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5160 tcg_gen_ext32s_tl(arg
, arg
);
5165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5174 goto cp0_unimplemented
;
5180 /* Mark as an IO operation because we read the time. */
5181 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5184 gen_helper_mfc0_count(arg
, cpu_env
);
5185 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5188 /* Break the TB to be able to take timer interrupts immediately
5189 after reading count. */
5190 ctx
->bstate
= BS_STOP
;
5193 /* 6,7 are implementation dependent */
5195 goto cp0_unimplemented
;
5201 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5202 tcg_gen_ext32s_tl(arg
, arg
);
5206 goto cp0_unimplemented
;
5212 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5215 /* 6,7 are implementation dependent */
5217 goto cp0_unimplemented
;
5223 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5227 check_insn(ctx
, ISA_MIPS32R2
);
5228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5232 check_insn(ctx
, ISA_MIPS32R2
);
5233 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5237 check_insn(ctx
, ISA_MIPS32R2
);
5238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5242 goto cp0_unimplemented
;
5248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5252 goto cp0_unimplemented
;
5258 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5259 tcg_gen_ext32s_tl(arg
, arg
);
5263 goto cp0_unimplemented
;
5269 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5273 check_insn(ctx
, ISA_MIPS32R2
);
5274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5278 goto cp0_unimplemented
;
5284 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5288 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5292 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5296 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5300 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5304 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5307 /* 6,7 are implementation dependent */
5309 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5317 goto cp0_unimplemented
;
5323 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5327 goto cp0_unimplemented
;
5333 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5337 goto cp0_unimplemented
;
5343 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5347 goto cp0_unimplemented
;
5353 #if defined(TARGET_MIPS64)
5354 check_insn(ctx
, ISA_MIPS3
);
5355 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5356 tcg_gen_ext32s_tl(arg
, arg
);
5361 goto cp0_unimplemented
;
5365 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5366 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5373 goto cp0_unimplemented
;
5377 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5378 rn
= "'Diagnostic"; /* implementation dependent */
5383 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5387 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5388 rn
= "TraceControl";
5391 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5392 rn
= "TraceControl2";
5395 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5396 rn
= "UserTraceData";
5399 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5403 goto cp0_unimplemented
;
5410 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5411 tcg_gen_ext32s_tl(arg
, arg
);
5415 goto cp0_unimplemented
;
5421 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5422 rn
= "Performance0";
5425 // gen_helper_mfc0_performance1(arg);
5426 rn
= "Performance1";
5429 // gen_helper_mfc0_performance2(arg);
5430 rn
= "Performance2";
5433 // gen_helper_mfc0_performance3(arg);
5434 rn
= "Performance3";
5437 // gen_helper_mfc0_performance4(arg);
5438 rn
= "Performance4";
5441 // gen_helper_mfc0_performance5(arg);
5442 rn
= "Performance5";
5445 // gen_helper_mfc0_performance6(arg);
5446 rn
= "Performance6";
5449 // gen_helper_mfc0_performance7(arg);
5450 rn
= "Performance7";
5453 goto cp0_unimplemented
;
5457 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5463 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5467 goto cp0_unimplemented
;
5477 TCGv_i64 tmp
= tcg_temp_new_i64();
5478 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5479 gen_move_low32(arg
, tmp
);
5480 tcg_temp_free_i64(tmp
);
5488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5492 goto cp0_unimplemented
;
5501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5508 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5512 goto cp0_unimplemented
;
5518 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5519 tcg_gen_ext32s_tl(arg
, arg
);
5523 goto cp0_unimplemented
;
5530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5534 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5535 tcg_gen_ld_tl(arg
, cpu_env
,
5536 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5537 tcg_gen_ext32s_tl(arg
, arg
);
5541 goto cp0_unimplemented
;
5545 goto cp0_unimplemented
;
5547 (void)rn
; /* avoid a compiler warning */
5548 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5552 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5553 gen_mfc0_unimplemented(ctx
, arg
);
5556 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5558 const char *rn
= "invalid";
5561 check_insn(ctx
, ISA_MIPS32
);
5563 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5571 gen_helper_mtc0_index(cpu_env
, arg
);
5575 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5576 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5580 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5585 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5590 goto cp0_unimplemented
;
5600 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5601 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5605 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5606 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5610 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5611 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5615 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5616 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5620 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5621 tcg_gen_st_tl(arg
, cpu_env
,
5622 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5627 tcg_gen_st_tl(arg
, cpu_env
,
5628 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5629 rn
= "VPEScheFBack";
5632 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5633 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5637 goto cp0_unimplemented
;
5643 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5647 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5648 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5652 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5653 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5658 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5663 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5668 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5672 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5673 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5677 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5678 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5682 goto cp0_unimplemented
;
5688 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5692 goto cp0_unimplemented
;
5698 gen_helper_mtc0_context(cpu_env
, arg
);
5702 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5703 rn
= "ContextConfig";
5704 goto cp0_unimplemented
;
5707 CP0_CHECK(ctx
->ulri
);
5708 tcg_gen_st_tl(arg
, cpu_env
,
5709 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5713 goto cp0_unimplemented
;
5719 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5723 check_insn(ctx
, ISA_MIPS32R2
);
5724 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5726 ctx
->bstate
= BS_STOP
;
5729 goto cp0_unimplemented
;
5735 gen_helper_mtc0_wired(cpu_env
, arg
);
5739 check_insn(ctx
, ISA_MIPS32R2
);
5740 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5744 check_insn(ctx
, ISA_MIPS32R2
);
5745 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5749 check_insn(ctx
, ISA_MIPS32R2
);
5750 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5754 check_insn(ctx
, ISA_MIPS32R2
);
5755 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5759 check_insn(ctx
, ISA_MIPS32R2
);
5760 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5764 goto cp0_unimplemented
;
5770 check_insn(ctx
, ISA_MIPS32R2
);
5771 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5772 ctx
->bstate
= BS_STOP
;
5776 goto cp0_unimplemented
;
5794 goto cp0_unimplemented
;
5800 gen_helper_mtc0_count(cpu_env
, arg
);
5803 /* 6,7 are implementation dependent */
5805 goto cp0_unimplemented
;
5811 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5815 goto cp0_unimplemented
;
5821 gen_helper_mtc0_compare(cpu_env
, arg
);
5824 /* 6,7 are implementation dependent */
5826 goto cp0_unimplemented
;
5832 save_cpu_state(ctx
, 1);
5833 gen_helper_mtc0_status(cpu_env
, arg
);
5834 /* BS_STOP isn't good enough here, hflags may have changed. */
5835 gen_save_pc(ctx
->pc
+ 4);
5836 ctx
->bstate
= BS_EXCP
;
5840 check_insn(ctx
, ISA_MIPS32R2
);
5841 gen_helper_mtc0_intctl(cpu_env
, arg
);
5842 /* Stop translation as we may have switched the execution mode */
5843 ctx
->bstate
= BS_STOP
;
5847 check_insn(ctx
, ISA_MIPS32R2
);
5848 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5849 /* Stop translation as we may have switched the execution mode */
5850 ctx
->bstate
= BS_STOP
;
5854 check_insn(ctx
, ISA_MIPS32R2
);
5855 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5856 /* Stop translation as we may have switched the execution mode */
5857 ctx
->bstate
= BS_STOP
;
5861 goto cp0_unimplemented
;
5867 save_cpu_state(ctx
, 1);
5868 gen_helper_mtc0_cause(cpu_env
, arg
);
5872 goto cp0_unimplemented
;
5878 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5882 goto cp0_unimplemented
;
5892 check_insn(ctx
, ISA_MIPS32R2
);
5893 gen_helper_mtc0_ebase(cpu_env
, arg
);
5897 goto cp0_unimplemented
;
5903 gen_helper_mtc0_config0(cpu_env
, arg
);
5905 /* Stop translation as we may have switched the execution mode */
5906 ctx
->bstate
= BS_STOP
;
5909 /* ignored, read only */
5913 gen_helper_mtc0_config2(cpu_env
, arg
);
5915 /* Stop translation as we may have switched the execution mode */
5916 ctx
->bstate
= BS_STOP
;
5919 gen_helper_mtc0_config3(cpu_env
, arg
);
5921 /* Stop translation as we may have switched the execution mode */
5922 ctx
->bstate
= BS_STOP
;
5925 gen_helper_mtc0_config4(cpu_env
, arg
);
5927 ctx
->bstate
= BS_STOP
;
5930 gen_helper_mtc0_config5(cpu_env
, arg
);
5932 /* Stop translation as we may have switched the execution mode */
5933 ctx
->bstate
= BS_STOP
;
5935 /* 6,7 are implementation dependent */
5945 rn
= "Invalid config selector";
5946 goto cp0_unimplemented
;
5952 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5956 goto cp0_unimplemented
;
5962 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5966 goto cp0_unimplemented
;
5972 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5976 goto cp0_unimplemented
;
5982 #if defined(TARGET_MIPS64)
5983 check_insn(ctx
, ISA_MIPS3
);
5984 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5989 goto cp0_unimplemented
;
5993 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5994 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5997 gen_helper_mtc0_framemask(cpu_env
, arg
);
6001 goto cp0_unimplemented
;
6006 rn
= "Diagnostic"; /* implementation dependent */
6011 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6012 /* BS_STOP isn't good enough here, hflags may have changed. */
6013 gen_save_pc(ctx
->pc
+ 4);
6014 ctx
->bstate
= BS_EXCP
;
6018 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6019 rn
= "TraceControl";
6020 /* Stop translation as we may have switched the execution mode */
6021 ctx
->bstate
= BS_STOP
;
6024 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6025 rn
= "TraceControl2";
6026 /* Stop translation as we may have switched the execution mode */
6027 ctx
->bstate
= BS_STOP
;
6030 /* Stop translation as we may have switched the execution mode */
6031 ctx
->bstate
= BS_STOP
;
6032 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6033 rn
= "UserTraceData";
6034 /* Stop translation as we may have switched the execution mode */
6035 ctx
->bstate
= BS_STOP
;
6038 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6039 /* Stop translation as we may have switched the execution mode */
6040 ctx
->bstate
= BS_STOP
;
6044 goto cp0_unimplemented
;
6051 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6055 goto cp0_unimplemented
;
6061 gen_helper_mtc0_performance0(cpu_env
, arg
);
6062 rn
= "Performance0";
6065 // gen_helper_mtc0_performance1(arg);
6066 rn
= "Performance1";
6069 // gen_helper_mtc0_performance2(arg);
6070 rn
= "Performance2";
6073 // gen_helper_mtc0_performance3(arg);
6074 rn
= "Performance3";
6077 // gen_helper_mtc0_performance4(arg);
6078 rn
= "Performance4";
6081 // gen_helper_mtc0_performance5(arg);
6082 rn
= "Performance5";
6085 // gen_helper_mtc0_performance6(arg);
6086 rn
= "Performance6";
6089 // gen_helper_mtc0_performance7(arg);
6090 rn
= "Performance7";
6093 goto cp0_unimplemented
;
6107 goto cp0_unimplemented
;
6116 gen_helper_mtc0_taglo(cpu_env
, arg
);
6123 gen_helper_mtc0_datalo(cpu_env
, arg
);
6127 goto cp0_unimplemented
;
6136 gen_helper_mtc0_taghi(cpu_env
, arg
);
6143 gen_helper_mtc0_datahi(cpu_env
, arg
);
6148 goto cp0_unimplemented
;
6154 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6158 goto cp0_unimplemented
;
6165 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6169 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6170 tcg_gen_st_tl(arg
, cpu_env
,
6171 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6175 goto cp0_unimplemented
;
6177 /* Stop translation as we may have switched the execution mode */
6178 ctx
->bstate
= BS_STOP
;
6181 goto cp0_unimplemented
;
6183 (void)rn
; /* avoid a compiler warning */
6184 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6185 /* For simplicity assume that all writes can cause interrupts. */
6186 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6188 ctx
->bstate
= BS_STOP
;
6193 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6196 #if defined(TARGET_MIPS64)
6197 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6199 const char *rn
= "invalid";
6202 check_insn(ctx
, ISA_MIPS64
);
6208 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6212 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6213 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6217 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6218 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6223 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6227 goto cp0_unimplemented
;
6233 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6234 gen_helper_mfc0_random(arg
, cpu_env
);
6238 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6239 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6243 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6248 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6254 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6258 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6259 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6263 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6264 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6265 rn
= "VPEScheFBack";
6268 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6269 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6273 goto cp0_unimplemented
;
6279 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6283 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6284 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6288 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6289 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6293 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6294 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6298 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6299 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6303 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6304 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6308 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6309 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6313 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6314 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6318 goto cp0_unimplemented
;
6324 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6328 goto cp0_unimplemented
;
6334 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6338 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6339 rn
= "ContextConfig";
6340 goto cp0_unimplemented
;
6343 CP0_CHECK(ctx
->ulri
);
6344 tcg_gen_ld_tl(arg
, cpu_env
,
6345 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6349 goto cp0_unimplemented
;
6355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6359 check_insn(ctx
, ISA_MIPS32R2
);
6360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6364 goto cp0_unimplemented
;
6370 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6374 check_insn(ctx
, ISA_MIPS32R2
);
6375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6379 check_insn(ctx
, ISA_MIPS32R2
);
6380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6384 check_insn(ctx
, ISA_MIPS32R2
);
6385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6389 check_insn(ctx
, ISA_MIPS32R2
);
6390 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6394 check_insn(ctx
, ISA_MIPS32R2
);
6395 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6399 goto cp0_unimplemented
;
6405 check_insn(ctx
, ISA_MIPS32R2
);
6406 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6410 goto cp0_unimplemented
;
6416 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6421 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6426 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6430 goto cp0_unimplemented
;
6436 /* Mark as an IO operation because we read the time. */
6437 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6440 gen_helper_mfc0_count(arg
, cpu_env
);
6441 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6444 /* Break the TB to be able to take timer interrupts immediately
6445 after reading count. */
6446 ctx
->bstate
= BS_STOP
;
6449 /* 6,7 are implementation dependent */
6451 goto cp0_unimplemented
;
6457 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6461 goto cp0_unimplemented
;
6467 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6470 /* 6,7 are implementation dependent */
6472 goto cp0_unimplemented
;
6478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6482 check_insn(ctx
, ISA_MIPS32R2
);
6483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6487 check_insn(ctx
, ISA_MIPS32R2
);
6488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6492 check_insn(ctx
, ISA_MIPS32R2
);
6493 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6497 goto cp0_unimplemented
;
6503 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6507 goto cp0_unimplemented
;
6513 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6517 goto cp0_unimplemented
;
6523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6527 check_insn(ctx
, ISA_MIPS32R2
);
6528 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6532 goto cp0_unimplemented
;
6538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6542 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6546 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6550 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6558 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6561 /* 6,7 are implementation dependent */
6563 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6571 goto cp0_unimplemented
;
6577 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6581 goto cp0_unimplemented
;
6587 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6591 goto cp0_unimplemented
;
6597 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6601 goto cp0_unimplemented
;
6607 check_insn(ctx
, ISA_MIPS3
);
6608 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6612 goto cp0_unimplemented
;
6616 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6617 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6624 goto cp0_unimplemented
;
6628 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6629 rn
= "'Diagnostic"; /* implementation dependent */
6634 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6638 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6639 rn
= "TraceControl";
6642 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6643 rn
= "TraceControl2";
6646 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6647 rn
= "UserTraceData";
6650 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6654 goto cp0_unimplemented
;
6661 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6665 goto cp0_unimplemented
;
6671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6672 rn
= "Performance0";
6675 // gen_helper_dmfc0_performance1(arg);
6676 rn
= "Performance1";
6679 // gen_helper_dmfc0_performance2(arg);
6680 rn
= "Performance2";
6683 // gen_helper_dmfc0_performance3(arg);
6684 rn
= "Performance3";
6687 // gen_helper_dmfc0_performance4(arg);
6688 rn
= "Performance4";
6691 // gen_helper_dmfc0_performance5(arg);
6692 rn
= "Performance5";
6695 // gen_helper_dmfc0_performance6(arg);
6696 rn
= "Performance6";
6699 // gen_helper_dmfc0_performance7(arg);
6700 rn
= "Performance7";
6703 goto cp0_unimplemented
;
6707 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6714 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6718 goto cp0_unimplemented
;
6727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6738 goto cp0_unimplemented
;
6747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6754 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6758 goto cp0_unimplemented
;
6764 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6768 goto cp0_unimplemented
;
6775 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6779 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6780 tcg_gen_ld_tl(arg
, cpu_env
,
6781 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6785 goto cp0_unimplemented
;
6789 goto cp0_unimplemented
;
6791 (void)rn
; /* avoid a compiler warning */
6792 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6796 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6797 gen_mfc0_unimplemented(ctx
, arg
);
6800 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6802 const char *rn
= "invalid";
6805 check_insn(ctx
, ISA_MIPS64
);
6807 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6815 gen_helper_mtc0_index(cpu_env
, arg
);
6819 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6820 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6824 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6829 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6834 goto cp0_unimplemented
;
6844 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6845 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6849 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6850 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6854 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6855 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6859 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6860 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6864 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6865 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6869 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6870 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6871 rn
= "VPEScheFBack";
6874 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6875 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6879 goto cp0_unimplemented
;
6885 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6889 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6890 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6894 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6895 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6899 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6900 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6904 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6905 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6909 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6910 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6914 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6915 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6919 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6920 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6924 goto cp0_unimplemented
;
6930 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6934 goto cp0_unimplemented
;
6940 gen_helper_mtc0_context(cpu_env
, arg
);
6944 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6945 rn
= "ContextConfig";
6946 goto cp0_unimplemented
;
6949 CP0_CHECK(ctx
->ulri
);
6950 tcg_gen_st_tl(arg
, cpu_env
,
6951 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6955 goto cp0_unimplemented
;
6961 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6965 check_insn(ctx
, ISA_MIPS32R2
);
6966 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6970 goto cp0_unimplemented
;
6976 gen_helper_mtc0_wired(cpu_env
, arg
);
6980 check_insn(ctx
, ISA_MIPS32R2
);
6981 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6985 check_insn(ctx
, ISA_MIPS32R2
);
6986 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6990 check_insn(ctx
, ISA_MIPS32R2
);
6991 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6995 check_insn(ctx
, ISA_MIPS32R2
);
6996 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7000 check_insn(ctx
, ISA_MIPS32R2
);
7001 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7005 goto cp0_unimplemented
;
7011 check_insn(ctx
, ISA_MIPS32R2
);
7012 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7013 ctx
->bstate
= BS_STOP
;
7017 goto cp0_unimplemented
;
7035 goto cp0_unimplemented
;
7041 gen_helper_mtc0_count(cpu_env
, arg
);
7044 /* 6,7 are implementation dependent */
7046 goto cp0_unimplemented
;
7048 /* Stop translation as we may have switched the execution mode */
7049 ctx
->bstate
= BS_STOP
;
7054 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7058 goto cp0_unimplemented
;
7064 gen_helper_mtc0_compare(cpu_env
, arg
);
7067 /* 6,7 are implementation dependent */
7069 goto cp0_unimplemented
;
7071 /* Stop translation as we may have switched the execution mode */
7072 ctx
->bstate
= BS_STOP
;
7077 save_cpu_state(ctx
, 1);
7078 gen_helper_mtc0_status(cpu_env
, arg
);
7079 /* BS_STOP isn't good enough here, hflags may have changed. */
7080 gen_save_pc(ctx
->pc
+ 4);
7081 ctx
->bstate
= BS_EXCP
;
7085 check_insn(ctx
, ISA_MIPS32R2
);
7086 gen_helper_mtc0_intctl(cpu_env
, arg
);
7087 /* Stop translation as we may have switched the execution mode */
7088 ctx
->bstate
= BS_STOP
;
7092 check_insn(ctx
, ISA_MIPS32R2
);
7093 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7094 /* Stop translation as we may have switched the execution mode */
7095 ctx
->bstate
= BS_STOP
;
7099 check_insn(ctx
, ISA_MIPS32R2
);
7100 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7101 /* Stop translation as we may have switched the execution mode */
7102 ctx
->bstate
= BS_STOP
;
7106 goto cp0_unimplemented
;
7112 save_cpu_state(ctx
, 1);
7113 /* Mark as an IO operation because we may trigger a software
7115 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7118 gen_helper_mtc0_cause(cpu_env
, arg
);
7119 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7122 /* Stop translation as we may have triggered an intetrupt */
7123 ctx
->bstate
= BS_STOP
;
7127 goto cp0_unimplemented
;
7133 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7137 goto cp0_unimplemented
;
7147 check_insn(ctx
, ISA_MIPS32R2
);
7148 gen_helper_mtc0_ebase(cpu_env
, arg
);
7152 goto cp0_unimplemented
;
7158 gen_helper_mtc0_config0(cpu_env
, arg
);
7160 /* Stop translation as we may have switched the execution mode */
7161 ctx
->bstate
= BS_STOP
;
7164 /* ignored, read only */
7168 gen_helper_mtc0_config2(cpu_env
, arg
);
7170 /* Stop translation as we may have switched the execution mode */
7171 ctx
->bstate
= BS_STOP
;
7174 gen_helper_mtc0_config3(cpu_env
, arg
);
7176 /* Stop translation as we may have switched the execution mode */
7177 ctx
->bstate
= BS_STOP
;
7180 /* currently ignored */
7184 gen_helper_mtc0_config5(cpu_env
, arg
);
7186 /* Stop translation as we may have switched the execution mode */
7187 ctx
->bstate
= BS_STOP
;
7189 /* 6,7 are implementation dependent */
7191 rn
= "Invalid config selector";
7192 goto cp0_unimplemented
;
7198 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7202 goto cp0_unimplemented
;
7208 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7212 goto cp0_unimplemented
;
7218 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7222 goto cp0_unimplemented
;
7228 check_insn(ctx
, ISA_MIPS3
);
7229 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7233 goto cp0_unimplemented
;
7237 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7238 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7241 gen_helper_mtc0_framemask(cpu_env
, arg
);
7245 goto cp0_unimplemented
;
7250 rn
= "Diagnostic"; /* implementation dependent */
7255 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7256 /* BS_STOP isn't good enough here, hflags may have changed. */
7257 gen_save_pc(ctx
->pc
+ 4);
7258 ctx
->bstate
= BS_EXCP
;
7262 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7263 /* Stop translation as we may have switched the execution mode */
7264 ctx
->bstate
= BS_STOP
;
7265 rn
= "TraceControl";
7268 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7269 /* Stop translation as we may have switched the execution mode */
7270 ctx
->bstate
= BS_STOP
;
7271 rn
= "TraceControl2";
7274 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7275 /* Stop translation as we may have switched the execution mode */
7276 ctx
->bstate
= BS_STOP
;
7277 rn
= "UserTraceData";
7280 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7281 /* Stop translation as we may have switched the execution mode */
7282 ctx
->bstate
= BS_STOP
;
7286 goto cp0_unimplemented
;
7293 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7297 goto cp0_unimplemented
;
7303 gen_helper_mtc0_performance0(cpu_env
, arg
);
7304 rn
= "Performance0";
7307 // gen_helper_mtc0_performance1(cpu_env, arg);
7308 rn
= "Performance1";
7311 // gen_helper_mtc0_performance2(cpu_env, arg);
7312 rn
= "Performance2";
7315 // gen_helper_mtc0_performance3(cpu_env, arg);
7316 rn
= "Performance3";
7319 // gen_helper_mtc0_performance4(cpu_env, arg);
7320 rn
= "Performance4";
7323 // gen_helper_mtc0_performance5(cpu_env, arg);
7324 rn
= "Performance5";
7327 // gen_helper_mtc0_performance6(cpu_env, arg);
7328 rn
= "Performance6";
7331 // gen_helper_mtc0_performance7(cpu_env, arg);
7332 rn
= "Performance7";
7335 goto cp0_unimplemented
;
7349 goto cp0_unimplemented
;
7358 gen_helper_mtc0_taglo(cpu_env
, arg
);
7365 gen_helper_mtc0_datalo(cpu_env
, arg
);
7369 goto cp0_unimplemented
;
7378 gen_helper_mtc0_taghi(cpu_env
, arg
);
7385 gen_helper_mtc0_datahi(cpu_env
, arg
);
7390 goto cp0_unimplemented
;
7396 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7400 goto cp0_unimplemented
;
7407 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7411 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7412 tcg_gen_st_tl(arg
, cpu_env
,
7413 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7417 goto cp0_unimplemented
;
7419 /* Stop translation as we may have switched the execution mode */
7420 ctx
->bstate
= BS_STOP
;
7423 goto cp0_unimplemented
;
7425 (void)rn
; /* avoid a compiler warning */
7426 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7427 /* For simplicity assume that all writes can cause interrupts. */
7428 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7430 ctx
->bstate
= BS_STOP
;
7435 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7437 #endif /* TARGET_MIPS64 */
7439 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7440 int u
, int sel
, int h
)
7442 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7443 TCGv t0
= tcg_temp_local_new();
7445 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7446 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7447 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7448 tcg_gen_movi_tl(t0
, -1);
7449 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7450 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7451 tcg_gen_movi_tl(t0
, -1);
7457 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7460 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7470 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7473 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7476 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7479 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7482 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7485 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7488 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7491 gen_mfc0(ctx
, t0
, rt
, sel
);
7498 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7501 gen_mfc0(ctx
, t0
, rt
, sel
);
7507 gen_helper_mftc0_status(t0
, cpu_env
);
7510 gen_mfc0(ctx
, t0
, rt
, sel
);
7516 gen_helper_mftc0_cause(t0
, cpu_env
);
7526 gen_helper_mftc0_epc(t0
, cpu_env
);
7536 gen_helper_mftc0_ebase(t0
, cpu_env
);
7546 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7556 gen_helper_mftc0_debug(t0
, cpu_env
);
7559 gen_mfc0(ctx
, t0
, rt
, sel
);
7564 gen_mfc0(ctx
, t0
, rt
, sel
);
7566 } else switch (sel
) {
7567 /* GPR registers. */
7569 gen_helper_1e0i(mftgpr
, t0
, rt
);
7571 /* Auxiliary CPU registers */
7575 gen_helper_1e0i(mftlo
, t0
, 0);
7578 gen_helper_1e0i(mfthi
, t0
, 0);
7581 gen_helper_1e0i(mftacx
, t0
, 0);
7584 gen_helper_1e0i(mftlo
, t0
, 1);
7587 gen_helper_1e0i(mfthi
, t0
, 1);
7590 gen_helper_1e0i(mftacx
, t0
, 1);
7593 gen_helper_1e0i(mftlo
, t0
, 2);
7596 gen_helper_1e0i(mfthi
, t0
, 2);
7599 gen_helper_1e0i(mftacx
, t0
, 2);
7602 gen_helper_1e0i(mftlo
, t0
, 3);
7605 gen_helper_1e0i(mfthi
, t0
, 3);
7608 gen_helper_1e0i(mftacx
, t0
, 3);
7611 gen_helper_mftdsp(t0
, cpu_env
);
7617 /* Floating point (COP1). */
7619 /* XXX: For now we support only a single FPU context. */
7621 TCGv_i32 fp0
= tcg_temp_new_i32();
7623 gen_load_fpr32(ctx
, fp0
, rt
);
7624 tcg_gen_ext_i32_tl(t0
, fp0
);
7625 tcg_temp_free_i32(fp0
);
7627 TCGv_i32 fp0
= tcg_temp_new_i32();
7629 gen_load_fpr32h(ctx
, fp0
, rt
);
7630 tcg_gen_ext_i32_tl(t0
, fp0
);
7631 tcg_temp_free_i32(fp0
);
7635 /* XXX: For now we support only a single FPU context. */
7636 gen_helper_1e0i(cfc1
, t0
, rt
);
7638 /* COP2: Not implemented. */
7645 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7646 gen_store_gpr(t0
, rd
);
7652 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7653 generate_exception_end(ctx
, EXCP_RI
);
7656 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7657 int u
, int sel
, int h
)
7659 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7660 TCGv t0
= tcg_temp_local_new();
7662 gen_load_gpr(t0
, rt
);
7663 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7664 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7665 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7667 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7668 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7675 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7678 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7688 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7691 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7694 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7697 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7700 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7703 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7706 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7709 gen_mtc0(ctx
, t0
, rd
, sel
);
7716 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7719 gen_mtc0(ctx
, t0
, rd
, sel
);
7725 gen_helper_mttc0_status(cpu_env
, t0
);
7728 gen_mtc0(ctx
, t0
, rd
, sel
);
7734 gen_helper_mttc0_cause(cpu_env
, t0
);
7744 gen_helper_mttc0_ebase(cpu_env
, t0
);
7754 gen_helper_mttc0_debug(cpu_env
, t0
);
7757 gen_mtc0(ctx
, t0
, rd
, sel
);
7762 gen_mtc0(ctx
, t0
, rd
, sel
);
7764 } else switch (sel
) {
7765 /* GPR registers. */
7767 gen_helper_0e1i(mttgpr
, t0
, rd
);
7769 /* Auxiliary CPU registers */
7773 gen_helper_0e1i(mttlo
, t0
, 0);
7776 gen_helper_0e1i(mtthi
, t0
, 0);
7779 gen_helper_0e1i(mttacx
, t0
, 0);
7782 gen_helper_0e1i(mttlo
, t0
, 1);
7785 gen_helper_0e1i(mtthi
, t0
, 1);
7788 gen_helper_0e1i(mttacx
, t0
, 1);
7791 gen_helper_0e1i(mttlo
, t0
, 2);
7794 gen_helper_0e1i(mtthi
, t0
, 2);
7797 gen_helper_0e1i(mttacx
, t0
, 2);
7800 gen_helper_0e1i(mttlo
, t0
, 3);
7803 gen_helper_0e1i(mtthi
, t0
, 3);
7806 gen_helper_0e1i(mttacx
, t0
, 3);
7809 gen_helper_mttdsp(cpu_env
, t0
);
7815 /* Floating point (COP1). */
7817 /* XXX: For now we support only a single FPU context. */
7819 TCGv_i32 fp0
= tcg_temp_new_i32();
7821 tcg_gen_trunc_tl_i32(fp0
, t0
);
7822 gen_store_fpr32(ctx
, fp0
, rd
);
7823 tcg_temp_free_i32(fp0
);
7825 TCGv_i32 fp0
= tcg_temp_new_i32();
7827 tcg_gen_trunc_tl_i32(fp0
, t0
);
7828 gen_store_fpr32h(ctx
, fp0
, rd
);
7829 tcg_temp_free_i32(fp0
);
7833 /* XXX: For now we support only a single FPU context. */
7835 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7837 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7838 tcg_temp_free_i32(fs_tmp
);
7840 /* Stop translation as we may have changed hflags */
7841 ctx
->bstate
= BS_STOP
;
7843 /* COP2: Not implemented. */
7850 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7856 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7857 generate_exception_end(ctx
, EXCP_RI
);
7860 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7862 const char *opn
= "ldst";
7864 check_cp0_enabled(ctx
);
7871 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7876 TCGv t0
= tcg_temp_new();
7878 gen_load_gpr(t0
, rt
);
7879 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7884 #if defined(TARGET_MIPS64)
7886 check_insn(ctx
, ISA_MIPS3
);
7891 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7895 check_insn(ctx
, ISA_MIPS3
);
7897 TCGv t0
= tcg_temp_new();
7899 gen_load_gpr(t0
, rt
);
7900 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7912 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7918 TCGv t0
= tcg_temp_new();
7919 gen_load_gpr(t0
, rt
);
7920 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7926 check_insn(ctx
, ASE_MT
);
7931 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7932 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7936 check_insn(ctx
, ASE_MT
);
7937 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7938 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7943 if (!env
->tlb
->helper_tlbwi
)
7945 gen_helper_tlbwi(cpu_env
);
7950 if (!env
->tlb
->helper_tlbinv
) {
7953 gen_helper_tlbinv(cpu_env
);
7954 } /* treat as nop if TLBINV not supported */
7959 if (!env
->tlb
->helper_tlbinvf
) {
7962 gen_helper_tlbinvf(cpu_env
);
7963 } /* treat as nop if TLBINV not supported */
7967 if (!env
->tlb
->helper_tlbwr
)
7969 gen_helper_tlbwr(cpu_env
);
7973 if (!env
->tlb
->helper_tlbp
)
7975 gen_helper_tlbp(cpu_env
);
7979 if (!env
->tlb
->helper_tlbr
)
7981 gen_helper_tlbr(cpu_env
);
7983 case OPC_ERET
: /* OPC_ERETNC */
7984 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7985 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7988 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
7989 if (ctx
->opcode
& (1 << bit_shift
)) {
7992 check_insn(ctx
, ISA_MIPS32R5
);
7993 gen_helper_eretnc(cpu_env
);
7997 check_insn(ctx
, ISA_MIPS2
);
7998 gen_helper_eret(cpu_env
);
8000 ctx
->bstate
= BS_EXCP
;
8005 check_insn(ctx
, ISA_MIPS32
);
8006 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8007 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8010 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8012 generate_exception_end(ctx
, EXCP_RI
);
8014 gen_helper_deret(cpu_env
);
8015 ctx
->bstate
= BS_EXCP
;
8020 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8021 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8022 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8025 /* If we get an exception, we want to restart at next instruction */
8027 save_cpu_state(ctx
, 1);
8029 gen_helper_wait(cpu_env
);
8030 ctx
->bstate
= BS_EXCP
;
8035 generate_exception_end(ctx
, EXCP_RI
);
8038 (void)opn
; /* avoid a compiler warning */
8040 #endif /* !CONFIG_USER_ONLY */
8042 /* CP1 Branches (before delay slot) */
8043 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8044 int32_t cc
, int32_t offset
)
8046 target_ulong btarget
;
8047 TCGv_i32 t0
= tcg_temp_new_i32();
8049 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8050 generate_exception_end(ctx
, EXCP_RI
);
8055 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8057 btarget
= ctx
->pc
+ 4 + offset
;
8061 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8062 tcg_gen_not_i32(t0
, t0
);
8063 tcg_gen_andi_i32(t0
, t0
, 1);
8064 tcg_gen_extu_i32_tl(bcond
, t0
);
8067 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8068 tcg_gen_not_i32(t0
, t0
);
8069 tcg_gen_andi_i32(t0
, t0
, 1);
8070 tcg_gen_extu_i32_tl(bcond
, t0
);
8073 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8074 tcg_gen_andi_i32(t0
, t0
, 1);
8075 tcg_gen_extu_i32_tl(bcond
, t0
);
8078 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8079 tcg_gen_andi_i32(t0
, t0
, 1);
8080 tcg_gen_extu_i32_tl(bcond
, t0
);
8082 ctx
->hflags
|= MIPS_HFLAG_BL
;
8086 TCGv_i32 t1
= tcg_temp_new_i32();
8087 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8088 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8089 tcg_gen_nand_i32(t0
, t0
, t1
);
8090 tcg_temp_free_i32(t1
);
8091 tcg_gen_andi_i32(t0
, t0
, 1);
8092 tcg_gen_extu_i32_tl(bcond
, t0
);
8097 TCGv_i32 t1
= tcg_temp_new_i32();
8098 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8099 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8100 tcg_gen_or_i32(t0
, t0
, t1
);
8101 tcg_temp_free_i32(t1
);
8102 tcg_gen_andi_i32(t0
, t0
, 1);
8103 tcg_gen_extu_i32_tl(bcond
, t0
);
8108 TCGv_i32 t1
= tcg_temp_new_i32();
8109 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8110 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8111 tcg_gen_and_i32(t0
, t0
, t1
);
8112 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8113 tcg_gen_and_i32(t0
, t0
, t1
);
8114 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8115 tcg_gen_nand_i32(t0
, t0
, t1
);
8116 tcg_temp_free_i32(t1
);
8117 tcg_gen_andi_i32(t0
, t0
, 1);
8118 tcg_gen_extu_i32_tl(bcond
, t0
);
8123 TCGv_i32 t1
= tcg_temp_new_i32();
8124 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8125 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8126 tcg_gen_or_i32(t0
, t0
, t1
);
8127 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8128 tcg_gen_or_i32(t0
, t0
, t1
);
8129 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8130 tcg_gen_or_i32(t0
, t0
, t1
);
8131 tcg_temp_free_i32(t1
);
8132 tcg_gen_andi_i32(t0
, t0
, 1);
8133 tcg_gen_extu_i32_tl(bcond
, t0
);
8136 ctx
->hflags
|= MIPS_HFLAG_BC
;
8139 MIPS_INVAL("cp1 cond branch");
8140 generate_exception_end(ctx
, EXCP_RI
);
8143 ctx
->btarget
= btarget
;
8144 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8146 tcg_temp_free_i32(t0
);
8149 /* R6 CP1 Branches */
8150 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8151 int32_t ft
, int32_t offset
,
8154 target_ulong btarget
;
8155 TCGv_i64 t0
= tcg_temp_new_i64();
8157 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8158 #ifdef MIPS_DEBUG_DISAS
8159 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8162 generate_exception_end(ctx
, EXCP_RI
);
8166 gen_load_fpr64(ctx
, t0
, ft
);
8167 tcg_gen_andi_i64(t0
, t0
, 1);
8169 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8173 tcg_gen_xori_i64(t0
, t0
, 1);
8174 ctx
->hflags
|= MIPS_HFLAG_BC
;
8177 /* t0 already set */
8178 ctx
->hflags
|= MIPS_HFLAG_BC
;
8181 MIPS_INVAL("cp1 cond branch");
8182 generate_exception_end(ctx
, EXCP_RI
);
8186 tcg_gen_trunc_i64_tl(bcond
, t0
);
8188 ctx
->btarget
= btarget
;
8190 switch (delayslot_size
) {
8192 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8195 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8200 tcg_temp_free_i64(t0
);
8203 /* Coprocessor 1 (FPU) */
8205 #define FOP(func, fmt) (((fmt) << 21) | (func))
8208 OPC_ADD_S
= FOP(0, FMT_S
),
8209 OPC_SUB_S
= FOP(1, FMT_S
),
8210 OPC_MUL_S
= FOP(2, FMT_S
),
8211 OPC_DIV_S
= FOP(3, FMT_S
),
8212 OPC_SQRT_S
= FOP(4, FMT_S
),
8213 OPC_ABS_S
= FOP(5, FMT_S
),
8214 OPC_MOV_S
= FOP(6, FMT_S
),
8215 OPC_NEG_S
= FOP(7, FMT_S
),
8216 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8217 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8218 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8219 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8220 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8221 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8222 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8223 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8224 OPC_SEL_S
= FOP(16, FMT_S
),
8225 OPC_MOVCF_S
= FOP(17, FMT_S
),
8226 OPC_MOVZ_S
= FOP(18, FMT_S
),
8227 OPC_MOVN_S
= FOP(19, FMT_S
),
8228 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8229 OPC_RECIP_S
= FOP(21, FMT_S
),
8230 OPC_RSQRT_S
= FOP(22, FMT_S
),
8231 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8232 OPC_MADDF_S
= FOP(24, FMT_S
),
8233 OPC_MSUBF_S
= FOP(25, FMT_S
),
8234 OPC_RINT_S
= FOP(26, FMT_S
),
8235 OPC_CLASS_S
= FOP(27, FMT_S
),
8236 OPC_MIN_S
= FOP(28, FMT_S
),
8237 OPC_RECIP2_S
= FOP(28, FMT_S
),
8238 OPC_MINA_S
= FOP(29, FMT_S
),
8239 OPC_RECIP1_S
= FOP(29, FMT_S
),
8240 OPC_MAX_S
= FOP(30, FMT_S
),
8241 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8242 OPC_MAXA_S
= FOP(31, FMT_S
),
8243 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8244 OPC_CVT_D_S
= FOP(33, FMT_S
),
8245 OPC_CVT_W_S
= FOP(36, FMT_S
),
8246 OPC_CVT_L_S
= FOP(37, FMT_S
),
8247 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8248 OPC_CMP_F_S
= FOP (48, FMT_S
),
8249 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8250 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8251 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8252 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8253 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8254 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8255 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8256 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8257 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8258 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8259 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8260 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8261 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8262 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8263 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8265 OPC_ADD_D
= FOP(0, FMT_D
),
8266 OPC_SUB_D
= FOP(1, FMT_D
),
8267 OPC_MUL_D
= FOP(2, FMT_D
),
8268 OPC_DIV_D
= FOP(3, FMT_D
),
8269 OPC_SQRT_D
= FOP(4, FMT_D
),
8270 OPC_ABS_D
= FOP(5, FMT_D
),
8271 OPC_MOV_D
= FOP(6, FMT_D
),
8272 OPC_NEG_D
= FOP(7, FMT_D
),
8273 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8274 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8275 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8276 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8277 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8278 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8279 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8280 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8281 OPC_SEL_D
= FOP(16, FMT_D
),
8282 OPC_MOVCF_D
= FOP(17, FMT_D
),
8283 OPC_MOVZ_D
= FOP(18, FMT_D
),
8284 OPC_MOVN_D
= FOP(19, FMT_D
),
8285 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8286 OPC_RECIP_D
= FOP(21, FMT_D
),
8287 OPC_RSQRT_D
= FOP(22, FMT_D
),
8288 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8289 OPC_MADDF_D
= FOP(24, FMT_D
),
8290 OPC_MSUBF_D
= FOP(25, FMT_D
),
8291 OPC_RINT_D
= FOP(26, FMT_D
),
8292 OPC_CLASS_D
= FOP(27, FMT_D
),
8293 OPC_MIN_D
= FOP(28, FMT_D
),
8294 OPC_RECIP2_D
= FOP(28, FMT_D
),
8295 OPC_MINA_D
= FOP(29, FMT_D
),
8296 OPC_RECIP1_D
= FOP(29, FMT_D
),
8297 OPC_MAX_D
= FOP(30, FMT_D
),
8298 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8299 OPC_MAXA_D
= FOP(31, FMT_D
),
8300 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8301 OPC_CVT_S_D
= FOP(32, FMT_D
),
8302 OPC_CVT_W_D
= FOP(36, FMT_D
),
8303 OPC_CVT_L_D
= FOP(37, FMT_D
),
8304 OPC_CMP_F_D
= FOP (48, FMT_D
),
8305 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8306 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8307 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8308 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8309 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8310 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8311 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8312 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8313 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8314 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8315 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8316 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8317 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8318 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8319 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8321 OPC_CVT_S_W
= FOP(32, FMT_W
),
8322 OPC_CVT_D_W
= FOP(33, FMT_W
),
8323 OPC_CVT_S_L
= FOP(32, FMT_L
),
8324 OPC_CVT_D_L
= FOP(33, FMT_L
),
8325 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8327 OPC_ADD_PS
= FOP(0, FMT_PS
),
8328 OPC_SUB_PS
= FOP(1, FMT_PS
),
8329 OPC_MUL_PS
= FOP(2, FMT_PS
),
8330 OPC_DIV_PS
= FOP(3, FMT_PS
),
8331 OPC_ABS_PS
= FOP(5, FMT_PS
),
8332 OPC_MOV_PS
= FOP(6, FMT_PS
),
8333 OPC_NEG_PS
= FOP(7, FMT_PS
),
8334 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8335 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8336 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8337 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8338 OPC_MULR_PS
= FOP(26, FMT_PS
),
8339 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8340 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8341 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8342 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8344 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8345 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8346 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8347 OPC_PLL_PS
= FOP(44, FMT_PS
),
8348 OPC_PLU_PS
= FOP(45, FMT_PS
),
8349 OPC_PUL_PS
= FOP(46, FMT_PS
),
8350 OPC_PUU_PS
= FOP(47, FMT_PS
),
8351 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8352 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8353 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8354 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8355 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8356 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8357 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8358 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8359 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8360 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8361 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8362 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8363 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8364 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8365 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8366 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8370 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8371 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8372 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8373 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8374 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8375 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8376 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8377 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8378 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8379 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8380 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8381 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8382 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8383 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8384 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8385 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8386 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8387 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8388 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8389 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8390 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8391 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8393 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8394 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8395 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8396 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8397 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8398 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8399 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8400 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8401 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8402 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8403 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8404 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8405 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8406 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8407 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8408 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8409 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8410 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8411 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8412 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8413 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8414 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8416 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8418 TCGv t0
= tcg_temp_new();
8423 TCGv_i32 fp0
= tcg_temp_new_i32();
8425 gen_load_fpr32(ctx
, fp0
, fs
);
8426 tcg_gen_ext_i32_tl(t0
, fp0
);
8427 tcg_temp_free_i32(fp0
);
8429 gen_store_gpr(t0
, rt
);
8432 gen_load_gpr(t0
, rt
);
8434 TCGv_i32 fp0
= tcg_temp_new_i32();
8436 tcg_gen_trunc_tl_i32(fp0
, t0
);
8437 gen_store_fpr32(ctx
, fp0
, fs
);
8438 tcg_temp_free_i32(fp0
);
8442 gen_helper_1e0i(cfc1
, t0
, fs
);
8443 gen_store_gpr(t0
, rt
);
8446 gen_load_gpr(t0
, rt
);
8447 save_cpu_state(ctx
, 0);
8449 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8451 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8452 tcg_temp_free_i32(fs_tmp
);
8454 /* Stop translation as we may have changed hflags */
8455 ctx
->bstate
= BS_STOP
;
8457 #if defined(TARGET_MIPS64)
8459 gen_load_fpr64(ctx
, t0
, fs
);
8460 gen_store_gpr(t0
, rt
);
8463 gen_load_gpr(t0
, rt
);
8464 gen_store_fpr64(ctx
, t0
, fs
);
8469 TCGv_i32 fp0
= tcg_temp_new_i32();
8471 gen_load_fpr32h(ctx
, fp0
, fs
);
8472 tcg_gen_ext_i32_tl(t0
, fp0
);
8473 tcg_temp_free_i32(fp0
);
8475 gen_store_gpr(t0
, rt
);
8478 gen_load_gpr(t0
, rt
);
8480 TCGv_i32 fp0
= tcg_temp_new_i32();
8482 tcg_gen_trunc_tl_i32(fp0
, t0
);
8483 gen_store_fpr32h(ctx
, fp0
, fs
);
8484 tcg_temp_free_i32(fp0
);
8488 MIPS_INVAL("cp1 move");
8489 generate_exception_end(ctx
, EXCP_RI
);
8497 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8513 l1
= gen_new_label();
8514 t0
= tcg_temp_new_i32();
8515 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8516 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8517 tcg_temp_free_i32(t0
);
8519 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8521 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8526 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8530 TCGv_i32 t0
= tcg_temp_new_i32();
8531 TCGLabel
*l1
= gen_new_label();
8538 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8539 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8540 gen_load_fpr32(ctx
, t0
, fs
);
8541 gen_store_fpr32(ctx
, t0
, fd
);
8543 tcg_temp_free_i32(t0
);
8546 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8549 TCGv_i32 t0
= tcg_temp_new_i32();
8551 TCGLabel
*l1
= gen_new_label();
8558 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8559 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8560 tcg_temp_free_i32(t0
);
8561 fp0
= tcg_temp_new_i64();
8562 gen_load_fpr64(ctx
, fp0
, fs
);
8563 gen_store_fpr64(ctx
, fp0
, fd
);
8564 tcg_temp_free_i64(fp0
);
8568 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8572 TCGv_i32 t0
= tcg_temp_new_i32();
8573 TCGLabel
*l1
= gen_new_label();
8574 TCGLabel
*l2
= gen_new_label();
8581 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8582 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8583 gen_load_fpr32(ctx
, t0
, fs
);
8584 gen_store_fpr32(ctx
, t0
, fd
);
8587 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8588 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8589 gen_load_fpr32h(ctx
, t0
, fs
);
8590 gen_store_fpr32h(ctx
, t0
, fd
);
8591 tcg_temp_free_i32(t0
);
8595 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8598 TCGv_i32 t1
= tcg_const_i32(0);
8599 TCGv_i32 fp0
= tcg_temp_new_i32();
8600 TCGv_i32 fp1
= tcg_temp_new_i32();
8601 TCGv_i32 fp2
= tcg_temp_new_i32();
8602 gen_load_fpr32(ctx
, fp0
, fd
);
8603 gen_load_fpr32(ctx
, fp1
, ft
);
8604 gen_load_fpr32(ctx
, fp2
, fs
);
8608 tcg_gen_andi_i32(fp0
, fp0
, 1);
8609 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8612 tcg_gen_andi_i32(fp1
, fp1
, 1);
8613 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8616 tcg_gen_andi_i32(fp1
, fp1
, 1);
8617 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8620 MIPS_INVAL("gen_sel_s");
8621 generate_exception_end(ctx
, EXCP_RI
);
8625 gen_store_fpr32(ctx
, fp0
, fd
);
8626 tcg_temp_free_i32(fp2
);
8627 tcg_temp_free_i32(fp1
);
8628 tcg_temp_free_i32(fp0
);
8629 tcg_temp_free_i32(t1
);
8632 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8635 TCGv_i64 t1
= tcg_const_i64(0);
8636 TCGv_i64 fp0
= tcg_temp_new_i64();
8637 TCGv_i64 fp1
= tcg_temp_new_i64();
8638 TCGv_i64 fp2
= tcg_temp_new_i64();
8639 gen_load_fpr64(ctx
, fp0
, fd
);
8640 gen_load_fpr64(ctx
, fp1
, ft
);
8641 gen_load_fpr64(ctx
, fp2
, fs
);
8645 tcg_gen_andi_i64(fp0
, fp0
, 1);
8646 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8649 tcg_gen_andi_i64(fp1
, fp1
, 1);
8650 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8653 tcg_gen_andi_i64(fp1
, fp1
, 1);
8654 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8657 MIPS_INVAL("gen_sel_d");
8658 generate_exception_end(ctx
, EXCP_RI
);
8662 gen_store_fpr64(ctx
, fp0
, fd
);
8663 tcg_temp_free_i64(fp2
);
8664 tcg_temp_free_i64(fp1
);
8665 tcg_temp_free_i64(fp0
);
8666 tcg_temp_free_i64(t1
);
8669 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8670 int ft
, int fs
, int fd
, int cc
)
8672 uint32_t func
= ctx
->opcode
& 0x3f;
8676 TCGv_i32 fp0
= tcg_temp_new_i32();
8677 TCGv_i32 fp1
= tcg_temp_new_i32();
8679 gen_load_fpr32(ctx
, fp0
, fs
);
8680 gen_load_fpr32(ctx
, fp1
, ft
);
8681 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8682 tcg_temp_free_i32(fp1
);
8683 gen_store_fpr32(ctx
, fp0
, fd
);
8684 tcg_temp_free_i32(fp0
);
8689 TCGv_i32 fp0
= tcg_temp_new_i32();
8690 TCGv_i32 fp1
= tcg_temp_new_i32();
8692 gen_load_fpr32(ctx
, fp0
, fs
);
8693 gen_load_fpr32(ctx
, fp1
, ft
);
8694 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8695 tcg_temp_free_i32(fp1
);
8696 gen_store_fpr32(ctx
, fp0
, fd
);
8697 tcg_temp_free_i32(fp0
);
8702 TCGv_i32 fp0
= tcg_temp_new_i32();
8703 TCGv_i32 fp1
= tcg_temp_new_i32();
8705 gen_load_fpr32(ctx
, fp0
, fs
);
8706 gen_load_fpr32(ctx
, fp1
, ft
);
8707 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8708 tcg_temp_free_i32(fp1
);
8709 gen_store_fpr32(ctx
, fp0
, fd
);
8710 tcg_temp_free_i32(fp0
);
8715 TCGv_i32 fp0
= tcg_temp_new_i32();
8716 TCGv_i32 fp1
= tcg_temp_new_i32();
8718 gen_load_fpr32(ctx
, fp0
, fs
);
8719 gen_load_fpr32(ctx
, fp1
, ft
);
8720 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8721 tcg_temp_free_i32(fp1
);
8722 gen_store_fpr32(ctx
, fp0
, fd
);
8723 tcg_temp_free_i32(fp0
);
8728 TCGv_i32 fp0
= tcg_temp_new_i32();
8730 gen_load_fpr32(ctx
, fp0
, fs
);
8731 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8732 gen_store_fpr32(ctx
, fp0
, fd
);
8733 tcg_temp_free_i32(fp0
);
8738 TCGv_i32 fp0
= tcg_temp_new_i32();
8740 gen_load_fpr32(ctx
, fp0
, fs
);
8741 gen_helper_float_abs_s(fp0
, fp0
);
8742 gen_store_fpr32(ctx
, fp0
, fd
);
8743 tcg_temp_free_i32(fp0
);
8748 TCGv_i32 fp0
= tcg_temp_new_i32();
8750 gen_load_fpr32(ctx
, fp0
, fs
);
8751 gen_store_fpr32(ctx
, fp0
, fd
);
8752 tcg_temp_free_i32(fp0
);
8757 TCGv_i32 fp0
= tcg_temp_new_i32();
8759 gen_load_fpr32(ctx
, fp0
, fs
);
8760 gen_helper_float_chs_s(fp0
, fp0
);
8761 gen_store_fpr32(ctx
, fp0
, fd
);
8762 tcg_temp_free_i32(fp0
);
8766 check_cp1_64bitmode(ctx
);
8768 TCGv_i32 fp32
= tcg_temp_new_i32();
8769 TCGv_i64 fp64
= tcg_temp_new_i64();
8771 gen_load_fpr32(ctx
, fp32
, fs
);
8772 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8773 tcg_temp_free_i32(fp32
);
8774 gen_store_fpr64(ctx
, fp64
, fd
);
8775 tcg_temp_free_i64(fp64
);
8779 check_cp1_64bitmode(ctx
);
8781 TCGv_i32 fp32
= tcg_temp_new_i32();
8782 TCGv_i64 fp64
= tcg_temp_new_i64();
8784 gen_load_fpr32(ctx
, fp32
, fs
);
8785 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8786 tcg_temp_free_i32(fp32
);
8787 gen_store_fpr64(ctx
, fp64
, fd
);
8788 tcg_temp_free_i64(fp64
);
8792 check_cp1_64bitmode(ctx
);
8794 TCGv_i32 fp32
= tcg_temp_new_i32();
8795 TCGv_i64 fp64
= tcg_temp_new_i64();
8797 gen_load_fpr32(ctx
, fp32
, fs
);
8798 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8799 tcg_temp_free_i32(fp32
);
8800 gen_store_fpr64(ctx
, fp64
, fd
);
8801 tcg_temp_free_i64(fp64
);
8805 check_cp1_64bitmode(ctx
);
8807 TCGv_i32 fp32
= tcg_temp_new_i32();
8808 TCGv_i64 fp64
= tcg_temp_new_i64();
8810 gen_load_fpr32(ctx
, fp32
, fs
);
8811 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8812 tcg_temp_free_i32(fp32
);
8813 gen_store_fpr64(ctx
, fp64
, fd
);
8814 tcg_temp_free_i64(fp64
);
8819 TCGv_i32 fp0
= tcg_temp_new_i32();
8821 gen_load_fpr32(ctx
, fp0
, fs
);
8822 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8823 gen_store_fpr32(ctx
, fp0
, fd
);
8824 tcg_temp_free_i32(fp0
);
8829 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 gen_load_fpr32(ctx
, fp0
, fs
);
8832 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8833 gen_store_fpr32(ctx
, fp0
, fd
);
8834 tcg_temp_free_i32(fp0
);
8839 TCGv_i32 fp0
= tcg_temp_new_i32();
8841 gen_load_fpr32(ctx
, fp0
, fs
);
8842 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8843 gen_store_fpr32(ctx
, fp0
, fd
);
8844 tcg_temp_free_i32(fp0
);
8849 TCGv_i32 fp0
= tcg_temp_new_i32();
8851 gen_load_fpr32(ctx
, fp0
, fs
);
8852 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8853 gen_store_fpr32(ctx
, fp0
, fd
);
8854 tcg_temp_free_i32(fp0
);
8858 check_insn(ctx
, ISA_MIPS32R6
);
8859 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8862 check_insn(ctx
, ISA_MIPS32R6
);
8863 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8866 check_insn(ctx
, ISA_MIPS32R6
);
8867 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8871 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8874 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8876 TCGLabel
*l1
= gen_new_label();
8880 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8882 fp0
= tcg_temp_new_i32();
8883 gen_load_fpr32(ctx
, fp0
, fs
);
8884 gen_store_fpr32(ctx
, fp0
, fd
);
8885 tcg_temp_free_i32(fp0
);
8890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8892 TCGLabel
*l1
= gen_new_label();
8896 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8897 fp0
= tcg_temp_new_i32();
8898 gen_load_fpr32(ctx
, fp0
, fs
);
8899 gen_store_fpr32(ctx
, fp0
, fd
);
8900 tcg_temp_free_i32(fp0
);
8907 TCGv_i32 fp0
= tcg_temp_new_i32();
8909 gen_load_fpr32(ctx
, fp0
, fs
);
8910 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8911 gen_store_fpr32(ctx
, fp0
, fd
);
8912 tcg_temp_free_i32(fp0
);
8917 TCGv_i32 fp0
= tcg_temp_new_i32();
8919 gen_load_fpr32(ctx
, fp0
, fs
);
8920 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8921 gen_store_fpr32(ctx
, fp0
, fd
);
8922 tcg_temp_free_i32(fp0
);
8926 check_insn(ctx
, ISA_MIPS32R6
);
8928 TCGv_i32 fp0
= tcg_temp_new_i32();
8929 TCGv_i32 fp1
= tcg_temp_new_i32();
8930 TCGv_i32 fp2
= tcg_temp_new_i32();
8931 gen_load_fpr32(ctx
, fp0
, fs
);
8932 gen_load_fpr32(ctx
, fp1
, ft
);
8933 gen_load_fpr32(ctx
, fp2
, fd
);
8934 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8935 gen_store_fpr32(ctx
, fp2
, fd
);
8936 tcg_temp_free_i32(fp2
);
8937 tcg_temp_free_i32(fp1
);
8938 tcg_temp_free_i32(fp0
);
8942 check_insn(ctx
, ISA_MIPS32R6
);
8944 TCGv_i32 fp0
= tcg_temp_new_i32();
8945 TCGv_i32 fp1
= tcg_temp_new_i32();
8946 TCGv_i32 fp2
= tcg_temp_new_i32();
8947 gen_load_fpr32(ctx
, fp0
, fs
);
8948 gen_load_fpr32(ctx
, fp1
, ft
);
8949 gen_load_fpr32(ctx
, fp2
, fd
);
8950 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8951 gen_store_fpr32(ctx
, fp2
, fd
);
8952 tcg_temp_free_i32(fp2
);
8953 tcg_temp_free_i32(fp1
);
8954 tcg_temp_free_i32(fp0
);
8958 check_insn(ctx
, ISA_MIPS32R6
);
8960 TCGv_i32 fp0
= tcg_temp_new_i32();
8961 gen_load_fpr32(ctx
, fp0
, fs
);
8962 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8963 gen_store_fpr32(ctx
, fp0
, fd
);
8964 tcg_temp_free_i32(fp0
);
8968 check_insn(ctx
, ISA_MIPS32R6
);
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8971 gen_load_fpr32(ctx
, fp0
, fs
);
8972 gen_helper_float_class_s(fp0
, fp0
);
8973 gen_store_fpr32(ctx
, fp0
, fd
);
8974 tcg_temp_free_i32(fp0
);
8977 case OPC_MIN_S
: /* OPC_RECIP2_S */
8978 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8981 TCGv_i32 fp1
= tcg_temp_new_i32();
8982 TCGv_i32 fp2
= tcg_temp_new_i32();
8983 gen_load_fpr32(ctx
, fp0
, fs
);
8984 gen_load_fpr32(ctx
, fp1
, ft
);
8985 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8986 gen_store_fpr32(ctx
, fp2
, fd
);
8987 tcg_temp_free_i32(fp2
);
8988 tcg_temp_free_i32(fp1
);
8989 tcg_temp_free_i32(fp0
);
8992 check_cp1_64bitmode(ctx
);
8994 TCGv_i32 fp0
= tcg_temp_new_i32();
8995 TCGv_i32 fp1
= tcg_temp_new_i32();
8997 gen_load_fpr32(ctx
, fp0
, fs
);
8998 gen_load_fpr32(ctx
, fp1
, ft
);
8999 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9000 tcg_temp_free_i32(fp1
);
9001 gen_store_fpr32(ctx
, fp0
, fd
);
9002 tcg_temp_free_i32(fp0
);
9006 case OPC_MINA_S
: /* OPC_RECIP1_S */
9007 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9009 TCGv_i32 fp0
= tcg_temp_new_i32();
9010 TCGv_i32 fp1
= tcg_temp_new_i32();
9011 TCGv_i32 fp2
= tcg_temp_new_i32();
9012 gen_load_fpr32(ctx
, fp0
, fs
);
9013 gen_load_fpr32(ctx
, fp1
, ft
);
9014 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9015 gen_store_fpr32(ctx
, fp2
, fd
);
9016 tcg_temp_free_i32(fp2
);
9017 tcg_temp_free_i32(fp1
);
9018 tcg_temp_free_i32(fp0
);
9021 check_cp1_64bitmode(ctx
);
9023 TCGv_i32 fp0
= tcg_temp_new_i32();
9025 gen_load_fpr32(ctx
, fp0
, fs
);
9026 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9027 gen_store_fpr32(ctx
, fp0
, fd
);
9028 tcg_temp_free_i32(fp0
);
9032 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9033 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9035 TCGv_i32 fp0
= tcg_temp_new_i32();
9036 TCGv_i32 fp1
= tcg_temp_new_i32();
9037 gen_load_fpr32(ctx
, fp0
, fs
);
9038 gen_load_fpr32(ctx
, fp1
, ft
);
9039 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9040 gen_store_fpr32(ctx
, fp1
, fd
);
9041 tcg_temp_free_i32(fp1
);
9042 tcg_temp_free_i32(fp0
);
9045 check_cp1_64bitmode(ctx
);
9047 TCGv_i32 fp0
= tcg_temp_new_i32();
9049 gen_load_fpr32(ctx
, fp0
, fs
);
9050 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9051 gen_store_fpr32(ctx
, fp0
, fd
);
9052 tcg_temp_free_i32(fp0
);
9056 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9057 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9059 TCGv_i32 fp0
= tcg_temp_new_i32();
9060 TCGv_i32 fp1
= tcg_temp_new_i32();
9061 gen_load_fpr32(ctx
, fp0
, fs
);
9062 gen_load_fpr32(ctx
, fp1
, ft
);
9063 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9064 gen_store_fpr32(ctx
, fp1
, fd
);
9065 tcg_temp_free_i32(fp1
);
9066 tcg_temp_free_i32(fp0
);
9069 check_cp1_64bitmode(ctx
);
9071 TCGv_i32 fp0
= tcg_temp_new_i32();
9072 TCGv_i32 fp1
= tcg_temp_new_i32();
9074 gen_load_fpr32(ctx
, fp0
, fs
);
9075 gen_load_fpr32(ctx
, fp1
, ft
);
9076 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9077 tcg_temp_free_i32(fp1
);
9078 gen_store_fpr32(ctx
, fp0
, fd
);
9079 tcg_temp_free_i32(fp0
);
9084 check_cp1_registers(ctx
, fd
);
9086 TCGv_i32 fp32
= tcg_temp_new_i32();
9087 TCGv_i64 fp64
= tcg_temp_new_i64();
9089 gen_load_fpr32(ctx
, fp32
, fs
);
9090 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9091 tcg_temp_free_i32(fp32
);
9092 gen_store_fpr64(ctx
, fp64
, fd
);
9093 tcg_temp_free_i64(fp64
);
9098 TCGv_i32 fp0
= tcg_temp_new_i32();
9100 gen_load_fpr32(ctx
, fp0
, fs
);
9101 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9102 gen_store_fpr32(ctx
, fp0
, fd
);
9103 tcg_temp_free_i32(fp0
);
9107 check_cp1_64bitmode(ctx
);
9109 TCGv_i32 fp32
= tcg_temp_new_i32();
9110 TCGv_i64 fp64
= tcg_temp_new_i64();
9112 gen_load_fpr32(ctx
, fp32
, fs
);
9113 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9114 tcg_temp_free_i32(fp32
);
9115 gen_store_fpr64(ctx
, fp64
, fd
);
9116 tcg_temp_free_i64(fp64
);
9122 TCGv_i64 fp64
= tcg_temp_new_i64();
9123 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9124 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9126 gen_load_fpr32(ctx
, fp32_0
, fs
);
9127 gen_load_fpr32(ctx
, fp32_1
, ft
);
9128 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9129 tcg_temp_free_i32(fp32_1
);
9130 tcg_temp_free_i32(fp32_0
);
9131 gen_store_fpr64(ctx
, fp64
, fd
);
9132 tcg_temp_free_i64(fp64
);
9144 case OPC_CMP_NGLE_S
:
9151 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9152 if (ctx
->opcode
& (1 << 6)) {
9153 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9155 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9159 check_cp1_registers(ctx
, fs
| ft
| fd
);
9161 TCGv_i64 fp0
= tcg_temp_new_i64();
9162 TCGv_i64 fp1
= tcg_temp_new_i64();
9164 gen_load_fpr64(ctx
, fp0
, fs
);
9165 gen_load_fpr64(ctx
, fp1
, ft
);
9166 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9167 tcg_temp_free_i64(fp1
);
9168 gen_store_fpr64(ctx
, fp0
, fd
);
9169 tcg_temp_free_i64(fp0
);
9173 check_cp1_registers(ctx
, fs
| ft
| fd
);
9175 TCGv_i64 fp0
= tcg_temp_new_i64();
9176 TCGv_i64 fp1
= tcg_temp_new_i64();
9178 gen_load_fpr64(ctx
, fp0
, fs
);
9179 gen_load_fpr64(ctx
, fp1
, ft
);
9180 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9181 tcg_temp_free_i64(fp1
);
9182 gen_store_fpr64(ctx
, fp0
, fd
);
9183 tcg_temp_free_i64(fp0
);
9187 check_cp1_registers(ctx
, fs
| ft
| fd
);
9189 TCGv_i64 fp0
= tcg_temp_new_i64();
9190 TCGv_i64 fp1
= tcg_temp_new_i64();
9192 gen_load_fpr64(ctx
, fp0
, fs
);
9193 gen_load_fpr64(ctx
, fp1
, ft
);
9194 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9195 tcg_temp_free_i64(fp1
);
9196 gen_store_fpr64(ctx
, fp0
, fd
);
9197 tcg_temp_free_i64(fp0
);
9201 check_cp1_registers(ctx
, fs
| ft
| fd
);
9203 TCGv_i64 fp0
= tcg_temp_new_i64();
9204 TCGv_i64 fp1
= tcg_temp_new_i64();
9206 gen_load_fpr64(ctx
, fp0
, fs
);
9207 gen_load_fpr64(ctx
, fp1
, ft
);
9208 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9209 tcg_temp_free_i64(fp1
);
9210 gen_store_fpr64(ctx
, fp0
, fd
);
9211 tcg_temp_free_i64(fp0
);
9215 check_cp1_registers(ctx
, fs
| fd
);
9217 TCGv_i64 fp0
= tcg_temp_new_i64();
9219 gen_load_fpr64(ctx
, fp0
, fs
);
9220 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9221 gen_store_fpr64(ctx
, fp0
, fd
);
9222 tcg_temp_free_i64(fp0
);
9226 check_cp1_registers(ctx
, fs
| fd
);
9228 TCGv_i64 fp0
= tcg_temp_new_i64();
9230 gen_load_fpr64(ctx
, fp0
, fs
);
9231 gen_helper_float_abs_d(fp0
, fp0
);
9232 gen_store_fpr64(ctx
, fp0
, fd
);
9233 tcg_temp_free_i64(fp0
);
9237 check_cp1_registers(ctx
, fs
| fd
);
9239 TCGv_i64 fp0
= tcg_temp_new_i64();
9241 gen_load_fpr64(ctx
, fp0
, fs
);
9242 gen_store_fpr64(ctx
, fp0
, fd
);
9243 tcg_temp_free_i64(fp0
);
9247 check_cp1_registers(ctx
, fs
| fd
);
9249 TCGv_i64 fp0
= tcg_temp_new_i64();
9251 gen_load_fpr64(ctx
, fp0
, fs
);
9252 gen_helper_float_chs_d(fp0
, fp0
);
9253 gen_store_fpr64(ctx
, fp0
, fd
);
9254 tcg_temp_free_i64(fp0
);
9258 check_cp1_64bitmode(ctx
);
9260 TCGv_i64 fp0
= tcg_temp_new_i64();
9262 gen_load_fpr64(ctx
, fp0
, fs
);
9263 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9264 gen_store_fpr64(ctx
, fp0
, fd
);
9265 tcg_temp_free_i64(fp0
);
9269 check_cp1_64bitmode(ctx
);
9271 TCGv_i64 fp0
= tcg_temp_new_i64();
9273 gen_load_fpr64(ctx
, fp0
, fs
);
9274 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9275 gen_store_fpr64(ctx
, fp0
, fd
);
9276 tcg_temp_free_i64(fp0
);
9280 check_cp1_64bitmode(ctx
);
9282 TCGv_i64 fp0
= tcg_temp_new_i64();
9284 gen_load_fpr64(ctx
, fp0
, fs
);
9285 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9286 gen_store_fpr64(ctx
, fp0
, fd
);
9287 tcg_temp_free_i64(fp0
);
9291 check_cp1_64bitmode(ctx
);
9293 TCGv_i64 fp0
= tcg_temp_new_i64();
9295 gen_load_fpr64(ctx
, fp0
, fs
);
9296 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9297 gen_store_fpr64(ctx
, fp0
, fd
);
9298 tcg_temp_free_i64(fp0
);
9302 check_cp1_registers(ctx
, fs
);
9304 TCGv_i32 fp32
= tcg_temp_new_i32();
9305 TCGv_i64 fp64
= tcg_temp_new_i64();
9307 gen_load_fpr64(ctx
, fp64
, fs
);
9308 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9309 tcg_temp_free_i64(fp64
);
9310 gen_store_fpr32(ctx
, fp32
, fd
);
9311 tcg_temp_free_i32(fp32
);
9315 check_cp1_registers(ctx
, fs
);
9317 TCGv_i32 fp32
= tcg_temp_new_i32();
9318 TCGv_i64 fp64
= tcg_temp_new_i64();
9320 gen_load_fpr64(ctx
, fp64
, fs
);
9321 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9322 tcg_temp_free_i64(fp64
);
9323 gen_store_fpr32(ctx
, fp32
, fd
);
9324 tcg_temp_free_i32(fp32
);
9328 check_cp1_registers(ctx
, fs
);
9330 TCGv_i32 fp32
= tcg_temp_new_i32();
9331 TCGv_i64 fp64
= tcg_temp_new_i64();
9333 gen_load_fpr64(ctx
, fp64
, fs
);
9334 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9335 tcg_temp_free_i64(fp64
);
9336 gen_store_fpr32(ctx
, fp32
, fd
);
9337 tcg_temp_free_i32(fp32
);
9341 check_cp1_registers(ctx
, fs
);
9343 TCGv_i32 fp32
= tcg_temp_new_i32();
9344 TCGv_i64 fp64
= tcg_temp_new_i64();
9346 gen_load_fpr64(ctx
, fp64
, fs
);
9347 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9348 tcg_temp_free_i64(fp64
);
9349 gen_store_fpr32(ctx
, fp32
, fd
);
9350 tcg_temp_free_i32(fp32
);
9354 check_insn(ctx
, ISA_MIPS32R6
);
9355 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9358 check_insn(ctx
, ISA_MIPS32R6
);
9359 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9362 check_insn(ctx
, ISA_MIPS32R6
);
9363 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9367 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9372 TCGLabel
*l1
= gen_new_label();
9376 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9378 fp0
= tcg_temp_new_i64();
9379 gen_load_fpr64(ctx
, fp0
, fs
);
9380 gen_store_fpr64(ctx
, fp0
, fd
);
9381 tcg_temp_free_i64(fp0
);
9386 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9388 TCGLabel
*l1
= gen_new_label();
9392 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9393 fp0
= tcg_temp_new_i64();
9394 gen_load_fpr64(ctx
, fp0
, fs
);
9395 gen_store_fpr64(ctx
, fp0
, fd
);
9396 tcg_temp_free_i64(fp0
);
9402 check_cp1_registers(ctx
, fs
| fd
);
9404 TCGv_i64 fp0
= tcg_temp_new_i64();
9406 gen_load_fpr64(ctx
, fp0
, fs
);
9407 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9408 gen_store_fpr64(ctx
, fp0
, fd
);
9409 tcg_temp_free_i64(fp0
);
9413 check_cp1_registers(ctx
, fs
| fd
);
9415 TCGv_i64 fp0
= tcg_temp_new_i64();
9417 gen_load_fpr64(ctx
, fp0
, fs
);
9418 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9419 gen_store_fpr64(ctx
, fp0
, fd
);
9420 tcg_temp_free_i64(fp0
);
9424 check_insn(ctx
, ISA_MIPS32R6
);
9426 TCGv_i64 fp0
= tcg_temp_new_i64();
9427 TCGv_i64 fp1
= tcg_temp_new_i64();
9428 TCGv_i64 fp2
= tcg_temp_new_i64();
9429 gen_load_fpr64(ctx
, fp0
, fs
);
9430 gen_load_fpr64(ctx
, fp1
, ft
);
9431 gen_load_fpr64(ctx
, fp2
, fd
);
9432 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9433 gen_store_fpr64(ctx
, fp2
, fd
);
9434 tcg_temp_free_i64(fp2
);
9435 tcg_temp_free_i64(fp1
);
9436 tcg_temp_free_i64(fp0
);
9440 check_insn(ctx
, ISA_MIPS32R6
);
9442 TCGv_i64 fp0
= tcg_temp_new_i64();
9443 TCGv_i64 fp1
= tcg_temp_new_i64();
9444 TCGv_i64 fp2
= tcg_temp_new_i64();
9445 gen_load_fpr64(ctx
, fp0
, fs
);
9446 gen_load_fpr64(ctx
, fp1
, ft
);
9447 gen_load_fpr64(ctx
, fp2
, fd
);
9448 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9449 gen_store_fpr64(ctx
, fp2
, fd
);
9450 tcg_temp_free_i64(fp2
);
9451 tcg_temp_free_i64(fp1
);
9452 tcg_temp_free_i64(fp0
);
9456 check_insn(ctx
, ISA_MIPS32R6
);
9458 TCGv_i64 fp0
= tcg_temp_new_i64();
9459 gen_load_fpr64(ctx
, fp0
, fs
);
9460 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9461 gen_store_fpr64(ctx
, fp0
, fd
);
9462 tcg_temp_free_i64(fp0
);
9466 check_insn(ctx
, ISA_MIPS32R6
);
9468 TCGv_i64 fp0
= tcg_temp_new_i64();
9469 gen_load_fpr64(ctx
, fp0
, fs
);
9470 gen_helper_float_class_d(fp0
, fp0
);
9471 gen_store_fpr64(ctx
, fp0
, fd
);
9472 tcg_temp_free_i64(fp0
);
9475 case OPC_MIN_D
: /* OPC_RECIP2_D */
9476 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9478 TCGv_i64 fp0
= tcg_temp_new_i64();
9479 TCGv_i64 fp1
= tcg_temp_new_i64();
9480 gen_load_fpr64(ctx
, fp0
, fs
);
9481 gen_load_fpr64(ctx
, fp1
, ft
);
9482 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9483 gen_store_fpr64(ctx
, fp1
, fd
);
9484 tcg_temp_free_i64(fp1
);
9485 tcg_temp_free_i64(fp0
);
9488 check_cp1_64bitmode(ctx
);
9490 TCGv_i64 fp0
= tcg_temp_new_i64();
9491 TCGv_i64 fp1
= tcg_temp_new_i64();
9493 gen_load_fpr64(ctx
, fp0
, fs
);
9494 gen_load_fpr64(ctx
, fp1
, ft
);
9495 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9496 tcg_temp_free_i64(fp1
);
9497 gen_store_fpr64(ctx
, fp0
, fd
);
9498 tcg_temp_free_i64(fp0
);
9502 case OPC_MINA_D
: /* OPC_RECIP1_D */
9503 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9505 TCGv_i64 fp0
= tcg_temp_new_i64();
9506 TCGv_i64 fp1
= tcg_temp_new_i64();
9507 gen_load_fpr64(ctx
, fp0
, fs
);
9508 gen_load_fpr64(ctx
, fp1
, ft
);
9509 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9510 gen_store_fpr64(ctx
, fp1
, fd
);
9511 tcg_temp_free_i64(fp1
);
9512 tcg_temp_free_i64(fp0
);
9515 check_cp1_64bitmode(ctx
);
9517 TCGv_i64 fp0
= tcg_temp_new_i64();
9519 gen_load_fpr64(ctx
, fp0
, fs
);
9520 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9521 gen_store_fpr64(ctx
, fp0
, fd
);
9522 tcg_temp_free_i64(fp0
);
9526 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9527 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9529 TCGv_i64 fp0
= tcg_temp_new_i64();
9530 TCGv_i64 fp1
= tcg_temp_new_i64();
9531 gen_load_fpr64(ctx
, fp0
, fs
);
9532 gen_load_fpr64(ctx
, fp1
, ft
);
9533 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9534 gen_store_fpr64(ctx
, fp1
, fd
);
9535 tcg_temp_free_i64(fp1
);
9536 tcg_temp_free_i64(fp0
);
9539 check_cp1_64bitmode(ctx
);
9541 TCGv_i64 fp0
= tcg_temp_new_i64();
9543 gen_load_fpr64(ctx
, fp0
, fs
);
9544 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9545 gen_store_fpr64(ctx
, fp0
, fd
);
9546 tcg_temp_free_i64(fp0
);
9550 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9551 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9553 TCGv_i64 fp0
= tcg_temp_new_i64();
9554 TCGv_i64 fp1
= tcg_temp_new_i64();
9555 gen_load_fpr64(ctx
, fp0
, fs
);
9556 gen_load_fpr64(ctx
, fp1
, ft
);
9557 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9558 gen_store_fpr64(ctx
, fp1
, fd
);
9559 tcg_temp_free_i64(fp1
);
9560 tcg_temp_free_i64(fp0
);
9563 check_cp1_64bitmode(ctx
);
9565 TCGv_i64 fp0
= tcg_temp_new_i64();
9566 TCGv_i64 fp1
= tcg_temp_new_i64();
9568 gen_load_fpr64(ctx
, fp0
, fs
);
9569 gen_load_fpr64(ctx
, fp1
, ft
);
9570 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9571 tcg_temp_free_i64(fp1
);
9572 gen_store_fpr64(ctx
, fp0
, fd
);
9573 tcg_temp_free_i64(fp0
);
9586 case OPC_CMP_NGLE_D
:
9593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9594 if (ctx
->opcode
& (1 << 6)) {
9595 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9597 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9601 check_cp1_registers(ctx
, fs
);
9603 TCGv_i32 fp32
= tcg_temp_new_i32();
9604 TCGv_i64 fp64
= tcg_temp_new_i64();
9606 gen_load_fpr64(ctx
, fp64
, fs
);
9607 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9608 tcg_temp_free_i64(fp64
);
9609 gen_store_fpr32(ctx
, fp32
, fd
);
9610 tcg_temp_free_i32(fp32
);
9614 check_cp1_registers(ctx
, fs
);
9616 TCGv_i32 fp32
= tcg_temp_new_i32();
9617 TCGv_i64 fp64
= tcg_temp_new_i64();
9619 gen_load_fpr64(ctx
, fp64
, fs
);
9620 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9621 tcg_temp_free_i64(fp64
);
9622 gen_store_fpr32(ctx
, fp32
, fd
);
9623 tcg_temp_free_i32(fp32
);
9627 check_cp1_64bitmode(ctx
);
9629 TCGv_i64 fp0
= tcg_temp_new_i64();
9631 gen_load_fpr64(ctx
, fp0
, fs
);
9632 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9633 gen_store_fpr64(ctx
, fp0
, fd
);
9634 tcg_temp_free_i64(fp0
);
9639 TCGv_i32 fp0
= tcg_temp_new_i32();
9641 gen_load_fpr32(ctx
, fp0
, fs
);
9642 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9643 gen_store_fpr32(ctx
, fp0
, fd
);
9644 tcg_temp_free_i32(fp0
);
9648 check_cp1_registers(ctx
, fd
);
9650 TCGv_i32 fp32
= tcg_temp_new_i32();
9651 TCGv_i64 fp64
= tcg_temp_new_i64();
9653 gen_load_fpr32(ctx
, fp32
, fs
);
9654 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9655 tcg_temp_free_i32(fp32
);
9656 gen_store_fpr64(ctx
, fp64
, fd
);
9657 tcg_temp_free_i64(fp64
);
9661 check_cp1_64bitmode(ctx
);
9663 TCGv_i32 fp32
= tcg_temp_new_i32();
9664 TCGv_i64 fp64
= tcg_temp_new_i64();
9666 gen_load_fpr64(ctx
, fp64
, fs
);
9667 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9668 tcg_temp_free_i64(fp64
);
9669 gen_store_fpr32(ctx
, fp32
, fd
);
9670 tcg_temp_free_i32(fp32
);
9674 check_cp1_64bitmode(ctx
);
9676 TCGv_i64 fp0
= tcg_temp_new_i64();
9678 gen_load_fpr64(ctx
, fp0
, fs
);
9679 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9680 gen_store_fpr64(ctx
, fp0
, fd
);
9681 tcg_temp_free_i64(fp0
);
9687 TCGv_i64 fp0
= tcg_temp_new_i64();
9689 gen_load_fpr64(ctx
, fp0
, fs
);
9690 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9691 gen_store_fpr64(ctx
, fp0
, fd
);
9692 tcg_temp_free_i64(fp0
);
9698 TCGv_i64 fp0
= tcg_temp_new_i64();
9699 TCGv_i64 fp1
= tcg_temp_new_i64();
9701 gen_load_fpr64(ctx
, fp0
, fs
);
9702 gen_load_fpr64(ctx
, fp1
, ft
);
9703 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9704 tcg_temp_free_i64(fp1
);
9705 gen_store_fpr64(ctx
, fp0
, fd
);
9706 tcg_temp_free_i64(fp0
);
9712 TCGv_i64 fp0
= tcg_temp_new_i64();
9713 TCGv_i64 fp1
= tcg_temp_new_i64();
9715 gen_load_fpr64(ctx
, fp0
, fs
);
9716 gen_load_fpr64(ctx
, fp1
, ft
);
9717 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9718 tcg_temp_free_i64(fp1
);
9719 gen_store_fpr64(ctx
, fp0
, fd
);
9720 tcg_temp_free_i64(fp0
);
9726 TCGv_i64 fp0
= tcg_temp_new_i64();
9727 TCGv_i64 fp1
= tcg_temp_new_i64();
9729 gen_load_fpr64(ctx
, fp0
, fs
);
9730 gen_load_fpr64(ctx
, fp1
, ft
);
9731 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9732 tcg_temp_free_i64(fp1
);
9733 gen_store_fpr64(ctx
, fp0
, fd
);
9734 tcg_temp_free_i64(fp0
);
9740 TCGv_i64 fp0
= tcg_temp_new_i64();
9742 gen_load_fpr64(ctx
, fp0
, fs
);
9743 gen_helper_float_abs_ps(fp0
, fp0
);
9744 gen_store_fpr64(ctx
, fp0
, fd
);
9745 tcg_temp_free_i64(fp0
);
9751 TCGv_i64 fp0
= tcg_temp_new_i64();
9753 gen_load_fpr64(ctx
, fp0
, fs
);
9754 gen_store_fpr64(ctx
, fp0
, fd
);
9755 tcg_temp_free_i64(fp0
);
9761 TCGv_i64 fp0
= tcg_temp_new_i64();
9763 gen_load_fpr64(ctx
, fp0
, fs
);
9764 gen_helper_float_chs_ps(fp0
, fp0
);
9765 gen_store_fpr64(ctx
, fp0
, fd
);
9766 tcg_temp_free_i64(fp0
);
9771 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9776 TCGLabel
*l1
= gen_new_label();
9780 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9781 fp0
= tcg_temp_new_i64();
9782 gen_load_fpr64(ctx
, fp0
, fs
);
9783 gen_store_fpr64(ctx
, fp0
, fd
);
9784 tcg_temp_free_i64(fp0
);
9791 TCGLabel
*l1
= gen_new_label();
9795 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9796 fp0
= tcg_temp_new_i64();
9797 gen_load_fpr64(ctx
, fp0
, fs
);
9798 gen_store_fpr64(ctx
, fp0
, fd
);
9799 tcg_temp_free_i64(fp0
);
9807 TCGv_i64 fp0
= tcg_temp_new_i64();
9808 TCGv_i64 fp1
= tcg_temp_new_i64();
9810 gen_load_fpr64(ctx
, fp0
, ft
);
9811 gen_load_fpr64(ctx
, fp1
, fs
);
9812 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9813 tcg_temp_free_i64(fp1
);
9814 gen_store_fpr64(ctx
, fp0
, fd
);
9815 tcg_temp_free_i64(fp0
);
9821 TCGv_i64 fp0
= tcg_temp_new_i64();
9822 TCGv_i64 fp1
= tcg_temp_new_i64();
9824 gen_load_fpr64(ctx
, fp0
, ft
);
9825 gen_load_fpr64(ctx
, fp1
, fs
);
9826 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9827 tcg_temp_free_i64(fp1
);
9828 gen_store_fpr64(ctx
, fp0
, fd
);
9829 tcg_temp_free_i64(fp0
);
9835 TCGv_i64 fp0
= tcg_temp_new_i64();
9836 TCGv_i64 fp1
= tcg_temp_new_i64();
9838 gen_load_fpr64(ctx
, fp0
, fs
);
9839 gen_load_fpr64(ctx
, fp1
, ft
);
9840 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9841 tcg_temp_free_i64(fp1
);
9842 gen_store_fpr64(ctx
, fp0
, fd
);
9843 tcg_temp_free_i64(fp0
);
9849 TCGv_i64 fp0
= tcg_temp_new_i64();
9851 gen_load_fpr64(ctx
, fp0
, fs
);
9852 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9853 gen_store_fpr64(ctx
, fp0
, fd
);
9854 tcg_temp_free_i64(fp0
);
9860 TCGv_i64 fp0
= tcg_temp_new_i64();
9862 gen_load_fpr64(ctx
, fp0
, fs
);
9863 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9864 gen_store_fpr64(ctx
, fp0
, fd
);
9865 tcg_temp_free_i64(fp0
);
9871 TCGv_i64 fp0
= tcg_temp_new_i64();
9872 TCGv_i64 fp1
= tcg_temp_new_i64();
9874 gen_load_fpr64(ctx
, fp0
, fs
);
9875 gen_load_fpr64(ctx
, fp1
, ft
);
9876 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9877 tcg_temp_free_i64(fp1
);
9878 gen_store_fpr64(ctx
, fp0
, fd
);
9879 tcg_temp_free_i64(fp0
);
9883 check_cp1_64bitmode(ctx
);
9885 TCGv_i32 fp0
= tcg_temp_new_i32();
9887 gen_load_fpr32h(ctx
, fp0
, fs
);
9888 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9889 gen_store_fpr32(ctx
, fp0
, fd
);
9890 tcg_temp_free_i32(fp0
);
9896 TCGv_i64 fp0
= tcg_temp_new_i64();
9898 gen_load_fpr64(ctx
, fp0
, fs
);
9899 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9900 gen_store_fpr64(ctx
, fp0
, fd
);
9901 tcg_temp_free_i64(fp0
);
9905 check_cp1_64bitmode(ctx
);
9907 TCGv_i32 fp0
= tcg_temp_new_i32();
9909 gen_load_fpr32(ctx
, fp0
, fs
);
9910 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9911 gen_store_fpr32(ctx
, fp0
, fd
);
9912 tcg_temp_free_i32(fp0
);
9918 TCGv_i32 fp0
= tcg_temp_new_i32();
9919 TCGv_i32 fp1
= tcg_temp_new_i32();
9921 gen_load_fpr32(ctx
, fp0
, fs
);
9922 gen_load_fpr32(ctx
, fp1
, ft
);
9923 gen_store_fpr32h(ctx
, fp0
, fd
);
9924 gen_store_fpr32(ctx
, fp1
, fd
);
9925 tcg_temp_free_i32(fp0
);
9926 tcg_temp_free_i32(fp1
);
9932 TCGv_i32 fp0
= tcg_temp_new_i32();
9933 TCGv_i32 fp1
= tcg_temp_new_i32();
9935 gen_load_fpr32(ctx
, fp0
, fs
);
9936 gen_load_fpr32h(ctx
, fp1
, ft
);
9937 gen_store_fpr32(ctx
, fp1
, fd
);
9938 gen_store_fpr32h(ctx
, fp0
, fd
);
9939 tcg_temp_free_i32(fp0
);
9940 tcg_temp_free_i32(fp1
);
9946 TCGv_i32 fp0
= tcg_temp_new_i32();
9947 TCGv_i32 fp1
= tcg_temp_new_i32();
9949 gen_load_fpr32h(ctx
, fp0
, fs
);
9950 gen_load_fpr32(ctx
, fp1
, ft
);
9951 gen_store_fpr32(ctx
, fp1
, fd
);
9952 gen_store_fpr32h(ctx
, fp0
, fd
);
9953 tcg_temp_free_i32(fp0
);
9954 tcg_temp_free_i32(fp1
);
9960 TCGv_i32 fp0
= tcg_temp_new_i32();
9961 TCGv_i32 fp1
= tcg_temp_new_i32();
9963 gen_load_fpr32h(ctx
, fp0
, fs
);
9964 gen_load_fpr32h(ctx
, fp1
, ft
);
9965 gen_store_fpr32(ctx
, fp1
, fd
);
9966 gen_store_fpr32h(ctx
, fp0
, fd
);
9967 tcg_temp_free_i32(fp0
);
9968 tcg_temp_free_i32(fp1
);
9974 case OPC_CMP_UEQ_PS
:
9975 case OPC_CMP_OLT_PS
:
9976 case OPC_CMP_ULT_PS
:
9977 case OPC_CMP_OLE_PS
:
9978 case OPC_CMP_ULE_PS
:
9980 case OPC_CMP_NGLE_PS
:
9981 case OPC_CMP_SEQ_PS
:
9982 case OPC_CMP_NGL_PS
:
9984 case OPC_CMP_NGE_PS
:
9986 case OPC_CMP_NGT_PS
:
9987 if (ctx
->opcode
& (1 << 6)) {
9988 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
9990 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
9994 MIPS_INVAL("farith");
9995 generate_exception_end(ctx
, EXCP_RI
);
10000 /* Coprocessor 3 (FPU) */
10001 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10002 int fd
, int fs
, int base
, int index
)
10004 TCGv t0
= tcg_temp_new();
10007 gen_load_gpr(t0
, index
);
10008 } else if (index
== 0) {
10009 gen_load_gpr(t0
, base
);
10011 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10013 /* Don't do NOP if destination is zero: we must perform the actual
10019 TCGv_i32 fp0
= tcg_temp_new_i32();
10021 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10022 tcg_gen_trunc_tl_i32(fp0
, t0
);
10023 gen_store_fpr32(ctx
, fp0
, fd
);
10024 tcg_temp_free_i32(fp0
);
10029 check_cp1_registers(ctx
, fd
);
10031 TCGv_i64 fp0
= tcg_temp_new_i64();
10032 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10033 gen_store_fpr64(ctx
, fp0
, fd
);
10034 tcg_temp_free_i64(fp0
);
10038 check_cp1_64bitmode(ctx
);
10039 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10041 TCGv_i64 fp0
= tcg_temp_new_i64();
10043 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10044 gen_store_fpr64(ctx
, fp0
, fd
);
10045 tcg_temp_free_i64(fp0
);
10051 TCGv_i32 fp0
= tcg_temp_new_i32();
10052 gen_load_fpr32(ctx
, fp0
, fs
);
10053 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10054 tcg_temp_free_i32(fp0
);
10059 check_cp1_registers(ctx
, fs
);
10061 TCGv_i64 fp0
= tcg_temp_new_i64();
10062 gen_load_fpr64(ctx
, fp0
, fs
);
10063 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10064 tcg_temp_free_i64(fp0
);
10068 check_cp1_64bitmode(ctx
);
10069 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10071 TCGv_i64 fp0
= tcg_temp_new_i64();
10072 gen_load_fpr64(ctx
, fp0
, fs
);
10073 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10074 tcg_temp_free_i64(fp0
);
10081 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10082 int fd
, int fr
, int fs
, int ft
)
10088 TCGv t0
= tcg_temp_local_new();
10089 TCGv_i32 fp
= tcg_temp_new_i32();
10090 TCGv_i32 fph
= tcg_temp_new_i32();
10091 TCGLabel
*l1
= gen_new_label();
10092 TCGLabel
*l2
= gen_new_label();
10094 gen_load_gpr(t0
, fr
);
10095 tcg_gen_andi_tl(t0
, t0
, 0x7);
10097 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10098 gen_load_fpr32(ctx
, fp
, fs
);
10099 gen_load_fpr32h(ctx
, fph
, fs
);
10100 gen_store_fpr32(ctx
, fp
, fd
);
10101 gen_store_fpr32h(ctx
, fph
, fd
);
10104 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10106 #ifdef TARGET_WORDS_BIGENDIAN
10107 gen_load_fpr32(ctx
, fp
, fs
);
10108 gen_load_fpr32h(ctx
, fph
, ft
);
10109 gen_store_fpr32h(ctx
, fp
, fd
);
10110 gen_store_fpr32(ctx
, fph
, fd
);
10112 gen_load_fpr32h(ctx
, fph
, fs
);
10113 gen_load_fpr32(ctx
, fp
, ft
);
10114 gen_store_fpr32(ctx
, fph
, fd
);
10115 gen_store_fpr32h(ctx
, fp
, fd
);
10118 tcg_temp_free_i32(fp
);
10119 tcg_temp_free_i32(fph
);
10125 TCGv_i32 fp0
= tcg_temp_new_i32();
10126 TCGv_i32 fp1
= tcg_temp_new_i32();
10127 TCGv_i32 fp2
= tcg_temp_new_i32();
10129 gen_load_fpr32(ctx
, fp0
, fs
);
10130 gen_load_fpr32(ctx
, fp1
, ft
);
10131 gen_load_fpr32(ctx
, fp2
, fr
);
10132 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10133 tcg_temp_free_i32(fp0
);
10134 tcg_temp_free_i32(fp1
);
10135 gen_store_fpr32(ctx
, fp2
, fd
);
10136 tcg_temp_free_i32(fp2
);
10141 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10143 TCGv_i64 fp0
= tcg_temp_new_i64();
10144 TCGv_i64 fp1
= tcg_temp_new_i64();
10145 TCGv_i64 fp2
= tcg_temp_new_i64();
10147 gen_load_fpr64(ctx
, fp0
, fs
);
10148 gen_load_fpr64(ctx
, fp1
, ft
);
10149 gen_load_fpr64(ctx
, fp2
, fr
);
10150 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10151 tcg_temp_free_i64(fp0
);
10152 tcg_temp_free_i64(fp1
);
10153 gen_store_fpr64(ctx
, fp2
, fd
);
10154 tcg_temp_free_i64(fp2
);
10160 TCGv_i64 fp0
= tcg_temp_new_i64();
10161 TCGv_i64 fp1
= tcg_temp_new_i64();
10162 TCGv_i64 fp2
= tcg_temp_new_i64();
10164 gen_load_fpr64(ctx
, fp0
, fs
);
10165 gen_load_fpr64(ctx
, fp1
, ft
);
10166 gen_load_fpr64(ctx
, fp2
, fr
);
10167 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10168 tcg_temp_free_i64(fp0
);
10169 tcg_temp_free_i64(fp1
);
10170 gen_store_fpr64(ctx
, fp2
, fd
);
10171 tcg_temp_free_i64(fp2
);
10177 TCGv_i32 fp0
= tcg_temp_new_i32();
10178 TCGv_i32 fp1
= tcg_temp_new_i32();
10179 TCGv_i32 fp2
= tcg_temp_new_i32();
10181 gen_load_fpr32(ctx
, fp0
, fs
);
10182 gen_load_fpr32(ctx
, fp1
, ft
);
10183 gen_load_fpr32(ctx
, fp2
, fr
);
10184 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10185 tcg_temp_free_i32(fp0
);
10186 tcg_temp_free_i32(fp1
);
10187 gen_store_fpr32(ctx
, fp2
, fd
);
10188 tcg_temp_free_i32(fp2
);
10193 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10195 TCGv_i64 fp0
= tcg_temp_new_i64();
10196 TCGv_i64 fp1
= tcg_temp_new_i64();
10197 TCGv_i64 fp2
= tcg_temp_new_i64();
10199 gen_load_fpr64(ctx
, fp0
, fs
);
10200 gen_load_fpr64(ctx
, fp1
, ft
);
10201 gen_load_fpr64(ctx
, fp2
, fr
);
10202 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10203 tcg_temp_free_i64(fp0
);
10204 tcg_temp_free_i64(fp1
);
10205 gen_store_fpr64(ctx
, fp2
, fd
);
10206 tcg_temp_free_i64(fp2
);
10212 TCGv_i64 fp0
= tcg_temp_new_i64();
10213 TCGv_i64 fp1
= tcg_temp_new_i64();
10214 TCGv_i64 fp2
= tcg_temp_new_i64();
10216 gen_load_fpr64(ctx
, fp0
, fs
);
10217 gen_load_fpr64(ctx
, fp1
, ft
);
10218 gen_load_fpr64(ctx
, fp2
, fr
);
10219 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10220 tcg_temp_free_i64(fp0
);
10221 tcg_temp_free_i64(fp1
);
10222 gen_store_fpr64(ctx
, fp2
, fd
);
10223 tcg_temp_free_i64(fp2
);
10229 TCGv_i32 fp0
= tcg_temp_new_i32();
10230 TCGv_i32 fp1
= tcg_temp_new_i32();
10231 TCGv_i32 fp2
= tcg_temp_new_i32();
10233 gen_load_fpr32(ctx
, fp0
, fs
);
10234 gen_load_fpr32(ctx
, fp1
, ft
);
10235 gen_load_fpr32(ctx
, fp2
, fr
);
10236 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10237 tcg_temp_free_i32(fp0
);
10238 tcg_temp_free_i32(fp1
);
10239 gen_store_fpr32(ctx
, fp2
, fd
);
10240 tcg_temp_free_i32(fp2
);
10245 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10247 TCGv_i64 fp0
= tcg_temp_new_i64();
10248 TCGv_i64 fp1
= tcg_temp_new_i64();
10249 TCGv_i64 fp2
= tcg_temp_new_i64();
10251 gen_load_fpr64(ctx
, fp0
, fs
);
10252 gen_load_fpr64(ctx
, fp1
, ft
);
10253 gen_load_fpr64(ctx
, fp2
, fr
);
10254 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10255 tcg_temp_free_i64(fp0
);
10256 tcg_temp_free_i64(fp1
);
10257 gen_store_fpr64(ctx
, fp2
, fd
);
10258 tcg_temp_free_i64(fp2
);
10264 TCGv_i64 fp0
= tcg_temp_new_i64();
10265 TCGv_i64 fp1
= tcg_temp_new_i64();
10266 TCGv_i64 fp2
= tcg_temp_new_i64();
10268 gen_load_fpr64(ctx
, fp0
, fs
);
10269 gen_load_fpr64(ctx
, fp1
, ft
);
10270 gen_load_fpr64(ctx
, fp2
, fr
);
10271 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10272 tcg_temp_free_i64(fp0
);
10273 tcg_temp_free_i64(fp1
);
10274 gen_store_fpr64(ctx
, fp2
, fd
);
10275 tcg_temp_free_i64(fp2
);
10281 TCGv_i32 fp0
= tcg_temp_new_i32();
10282 TCGv_i32 fp1
= tcg_temp_new_i32();
10283 TCGv_i32 fp2
= tcg_temp_new_i32();
10285 gen_load_fpr32(ctx
, fp0
, fs
);
10286 gen_load_fpr32(ctx
, fp1
, ft
);
10287 gen_load_fpr32(ctx
, fp2
, fr
);
10288 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10289 tcg_temp_free_i32(fp0
);
10290 tcg_temp_free_i32(fp1
);
10291 gen_store_fpr32(ctx
, fp2
, fd
);
10292 tcg_temp_free_i32(fp2
);
10297 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10299 TCGv_i64 fp0
= tcg_temp_new_i64();
10300 TCGv_i64 fp1
= tcg_temp_new_i64();
10301 TCGv_i64 fp2
= tcg_temp_new_i64();
10303 gen_load_fpr64(ctx
, fp0
, fs
);
10304 gen_load_fpr64(ctx
, fp1
, ft
);
10305 gen_load_fpr64(ctx
, fp2
, fr
);
10306 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10307 tcg_temp_free_i64(fp0
);
10308 tcg_temp_free_i64(fp1
);
10309 gen_store_fpr64(ctx
, fp2
, fd
);
10310 tcg_temp_free_i64(fp2
);
10316 TCGv_i64 fp0
= tcg_temp_new_i64();
10317 TCGv_i64 fp1
= tcg_temp_new_i64();
10318 TCGv_i64 fp2
= tcg_temp_new_i64();
10320 gen_load_fpr64(ctx
, fp0
, fs
);
10321 gen_load_fpr64(ctx
, fp1
, ft
);
10322 gen_load_fpr64(ctx
, fp2
, fr
);
10323 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10324 tcg_temp_free_i64(fp0
);
10325 tcg_temp_free_i64(fp1
);
10326 gen_store_fpr64(ctx
, fp2
, fd
);
10327 tcg_temp_free_i64(fp2
);
10331 MIPS_INVAL("flt3_arith");
10332 generate_exception_end(ctx
, EXCP_RI
);
10337 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10341 #if !defined(CONFIG_USER_ONLY)
10342 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10343 Therefore only check the ISA in system mode. */
10344 check_insn(ctx
, ISA_MIPS32R2
);
10346 t0
= tcg_temp_new();
10350 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10351 gen_store_gpr(t0
, rt
);
10354 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10355 gen_store_gpr(t0
, rt
);
10358 gen_helper_rdhwr_cc(t0
, cpu_env
);
10359 gen_store_gpr(t0
, rt
);
10362 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10363 gen_store_gpr(t0
, rt
);
10366 check_insn(ctx
, ISA_MIPS32R6
);
10368 /* Performance counter registers are not implemented other than
10369 * control register 0.
10371 generate_exception(ctx
, EXCP_RI
);
10373 gen_helper_rdhwr_performance(t0
, cpu_env
);
10374 gen_store_gpr(t0
, rt
);
10377 check_insn(ctx
, ISA_MIPS32R6
);
10378 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10379 gen_store_gpr(t0
, rt
);
10382 #if defined(CONFIG_USER_ONLY)
10383 tcg_gen_ld_tl(t0
, cpu_env
,
10384 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10385 gen_store_gpr(t0
, rt
);
10388 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10389 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10390 tcg_gen_ld_tl(t0
, cpu_env
,
10391 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10392 gen_store_gpr(t0
, rt
);
10394 generate_exception_end(ctx
, EXCP_RI
);
10398 default: /* Invalid */
10399 MIPS_INVAL("rdhwr");
10400 generate_exception_end(ctx
, EXCP_RI
);
10406 static inline void clear_branch_hflags(DisasContext
*ctx
)
10408 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10409 if (ctx
->bstate
== BS_NONE
) {
10410 save_cpu_state(ctx
, 0);
10412 /* it is not safe to save ctx->hflags as hflags may be changed
10413 in execution time by the instruction in delay / forbidden slot. */
10414 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10418 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10420 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10421 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10422 /* Branches completion */
10423 clear_branch_hflags(ctx
);
10424 ctx
->bstate
= BS_BRANCH
;
10425 /* FIXME: Need to clear can_do_io. */
10426 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10427 case MIPS_HFLAG_FBNSLOT
:
10428 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10431 /* unconditional branch */
10432 if (proc_hflags
& MIPS_HFLAG_BX
) {
10433 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10435 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10437 case MIPS_HFLAG_BL
:
10438 /* blikely taken case */
10439 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10441 case MIPS_HFLAG_BC
:
10442 /* Conditional branch */
10444 TCGLabel
*l1
= gen_new_label();
10446 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10447 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10449 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10452 case MIPS_HFLAG_BR
:
10453 /* unconditional branch to register */
10454 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10455 TCGv t0
= tcg_temp_new();
10456 TCGv_i32 t1
= tcg_temp_new_i32();
10458 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10459 tcg_gen_trunc_tl_i32(t1
, t0
);
10461 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10462 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10463 tcg_gen_or_i32(hflags
, hflags
, t1
);
10464 tcg_temp_free_i32(t1
);
10466 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10468 tcg_gen_mov_tl(cpu_PC
, btarget
);
10470 if (ctx
->singlestep_enabled
) {
10471 save_cpu_state(ctx
, 0);
10472 gen_helper_raise_exception_debug(cpu_env
);
10474 tcg_gen_exit_tb(0);
10477 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10483 /* Compact Branches */
10484 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10485 int rs
, int rt
, int32_t offset
)
10487 int bcond_compute
= 0;
10488 TCGv t0
= tcg_temp_new();
10489 TCGv t1
= tcg_temp_new();
10490 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10492 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10493 #ifdef MIPS_DEBUG_DISAS
10494 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10497 generate_exception_end(ctx
, EXCP_RI
);
10501 /* Load needed operands and calculate btarget */
10503 /* compact branch */
10504 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10505 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10506 gen_load_gpr(t0
, rs
);
10507 gen_load_gpr(t1
, rt
);
10509 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10510 if (rs
<= rt
&& rs
== 0) {
10511 /* OPC_BEQZALC, OPC_BNEZALC */
10512 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10515 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10516 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10517 gen_load_gpr(t0
, rs
);
10518 gen_load_gpr(t1
, rt
);
10520 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10522 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10523 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10524 if (rs
== 0 || rs
== rt
) {
10525 /* OPC_BLEZALC, OPC_BGEZALC */
10526 /* OPC_BGTZALC, OPC_BLTZALC */
10527 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10529 gen_load_gpr(t0
, rs
);
10530 gen_load_gpr(t1
, rt
);
10532 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10536 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10541 /* OPC_BEQZC, OPC_BNEZC */
10542 gen_load_gpr(t0
, rs
);
10544 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10546 /* OPC_JIC, OPC_JIALC */
10547 TCGv tbase
= tcg_temp_new();
10548 TCGv toffset
= tcg_temp_new();
10550 gen_load_gpr(tbase
, rt
);
10551 tcg_gen_movi_tl(toffset
, offset
);
10552 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10553 tcg_temp_free(tbase
);
10554 tcg_temp_free(toffset
);
10558 MIPS_INVAL("Compact branch/jump");
10559 generate_exception_end(ctx
, EXCP_RI
);
10563 if (bcond_compute
== 0) {
10564 /* Uncoditional compact branch */
10567 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10570 ctx
->hflags
|= MIPS_HFLAG_BR
;
10573 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10576 ctx
->hflags
|= MIPS_HFLAG_B
;
10579 MIPS_INVAL("Compact branch/jump");
10580 generate_exception_end(ctx
, EXCP_RI
);
10584 /* Generating branch here as compact branches don't have delay slot */
10585 gen_branch(ctx
, 4);
10587 /* Conditional compact branch */
10588 TCGLabel
*fs
= gen_new_label();
10589 save_cpu_state(ctx
, 0);
10592 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10593 if (rs
== 0 && rt
!= 0) {
10595 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10596 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10598 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10601 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10604 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10605 if (rs
== 0 && rt
!= 0) {
10607 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10608 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10610 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10613 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10616 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10617 if (rs
== 0 && rt
!= 0) {
10619 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10620 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10622 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10625 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10628 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10629 if (rs
== 0 && rt
!= 0) {
10631 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10632 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10634 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10637 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10640 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10641 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10643 /* OPC_BOVC, OPC_BNVC */
10644 TCGv t2
= tcg_temp_new();
10645 TCGv t3
= tcg_temp_new();
10646 TCGv t4
= tcg_temp_new();
10647 TCGv input_overflow
= tcg_temp_new();
10649 gen_load_gpr(t0
, rs
);
10650 gen_load_gpr(t1
, rt
);
10651 tcg_gen_ext32s_tl(t2
, t0
);
10652 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10653 tcg_gen_ext32s_tl(t3
, t1
);
10654 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10655 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10657 tcg_gen_add_tl(t4
, t2
, t3
);
10658 tcg_gen_ext32s_tl(t4
, t4
);
10659 tcg_gen_xor_tl(t2
, t2
, t3
);
10660 tcg_gen_xor_tl(t3
, t4
, t3
);
10661 tcg_gen_andc_tl(t2
, t3
, t2
);
10662 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10663 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10664 if (opc
== OPC_BOVC
) {
10666 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10669 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10671 tcg_temp_free(input_overflow
);
10675 } else if (rs
< rt
&& rs
== 0) {
10676 /* OPC_BEQZALC, OPC_BNEZALC */
10677 if (opc
== OPC_BEQZALC
) {
10679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10682 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10685 /* OPC_BEQC, OPC_BNEC */
10686 if (opc
== OPC_BEQC
) {
10688 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10691 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10696 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10699 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10702 MIPS_INVAL("Compact conditional branch/jump");
10703 generate_exception_end(ctx
, EXCP_RI
);
10707 /* Generating branch here as compact branches don't have delay slot */
10708 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10711 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10719 /* ISA extensions (ASEs) */
10720 /* MIPS16 extension to MIPS32 */
10722 /* MIPS16 major opcodes */
10724 M16_OPC_ADDIUSP
= 0x00,
10725 M16_OPC_ADDIUPC
= 0x01,
10727 M16_OPC_JAL
= 0x03,
10728 M16_OPC_BEQZ
= 0x04,
10729 M16_OPC_BNEQZ
= 0x05,
10730 M16_OPC_SHIFT
= 0x06,
10732 M16_OPC_RRIA
= 0x08,
10733 M16_OPC_ADDIU8
= 0x09,
10734 M16_OPC_SLTI
= 0x0a,
10735 M16_OPC_SLTIU
= 0x0b,
10738 M16_OPC_CMPI
= 0x0e,
10742 M16_OPC_LWSP
= 0x12,
10744 M16_OPC_LBU
= 0x14,
10745 M16_OPC_LHU
= 0x15,
10746 M16_OPC_LWPC
= 0x16,
10747 M16_OPC_LWU
= 0x17,
10750 M16_OPC_SWSP
= 0x1a,
10752 M16_OPC_RRR
= 0x1c,
10754 M16_OPC_EXTEND
= 0x1e,
10758 /* I8 funct field */
10777 /* RR funct field */
10811 /* I64 funct field */
10819 I64_DADDIUPC
= 0x6,
10823 /* RR ry field for CNVT */
10825 RR_RY_CNVT_ZEB
= 0x0,
10826 RR_RY_CNVT_ZEH
= 0x1,
10827 RR_RY_CNVT_ZEW
= 0x2,
10828 RR_RY_CNVT_SEB
= 0x4,
10829 RR_RY_CNVT_SEH
= 0x5,
10830 RR_RY_CNVT_SEW
= 0x6,
10833 static int xlat (int r
)
10835 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10840 static void gen_mips16_save (DisasContext
*ctx
,
10841 int xsregs
, int aregs
,
10842 int do_ra
, int do_s0
, int do_s1
,
10845 TCGv t0
= tcg_temp_new();
10846 TCGv t1
= tcg_temp_new();
10847 TCGv t2
= tcg_temp_new();
10877 generate_exception_end(ctx
, EXCP_RI
);
10883 gen_base_offset_addr(ctx
, t0
, 29, 12);
10884 gen_load_gpr(t1
, 7);
10885 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10888 gen_base_offset_addr(ctx
, t0
, 29, 8);
10889 gen_load_gpr(t1
, 6);
10890 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10893 gen_base_offset_addr(ctx
, t0
, 29, 4);
10894 gen_load_gpr(t1
, 5);
10895 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10898 gen_base_offset_addr(ctx
, t0
, 29, 0);
10899 gen_load_gpr(t1
, 4);
10900 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10903 gen_load_gpr(t0
, 29);
10905 #define DECR_AND_STORE(reg) do { \
10906 tcg_gen_movi_tl(t2, -4); \
10907 gen_op_addr_add(ctx, t0, t0, t2); \
10908 gen_load_gpr(t1, reg); \
10909 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10913 DECR_AND_STORE(31);
10918 DECR_AND_STORE(30);
10921 DECR_AND_STORE(23);
10924 DECR_AND_STORE(22);
10927 DECR_AND_STORE(21);
10930 DECR_AND_STORE(20);
10933 DECR_AND_STORE(19);
10936 DECR_AND_STORE(18);
10940 DECR_AND_STORE(17);
10943 DECR_AND_STORE(16);
10973 generate_exception_end(ctx
, EXCP_RI
);
10989 #undef DECR_AND_STORE
10991 tcg_gen_movi_tl(t2
, -framesize
);
10992 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
10998 static void gen_mips16_restore (DisasContext
*ctx
,
10999 int xsregs
, int aregs
,
11000 int do_ra
, int do_s0
, int do_s1
,
11004 TCGv t0
= tcg_temp_new();
11005 TCGv t1
= tcg_temp_new();
11006 TCGv t2
= tcg_temp_new();
11008 tcg_gen_movi_tl(t2
, framesize
);
11009 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11011 #define DECR_AND_LOAD(reg) do { \
11012 tcg_gen_movi_tl(t2, -4); \
11013 gen_op_addr_add(ctx, t0, t0, t2); \
11014 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11015 gen_store_gpr(t1, reg); \
11079 generate_exception_end(ctx
, EXCP_RI
);
11095 #undef DECR_AND_LOAD
11097 tcg_gen_movi_tl(t2
, framesize
);
11098 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11104 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11105 int is_64_bit
, int extended
)
11109 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11110 generate_exception_end(ctx
, EXCP_RI
);
11114 t0
= tcg_temp_new();
11116 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11117 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11119 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11125 #if defined(TARGET_MIPS64)
11126 static void decode_i64_mips16 (DisasContext
*ctx
,
11127 int ry
, int funct
, int16_t offset
,
11132 check_insn(ctx
, ISA_MIPS3
);
11133 check_mips_64(ctx
);
11134 offset
= extended
? offset
: offset
<< 3;
11135 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11138 check_insn(ctx
, ISA_MIPS3
);
11139 check_mips_64(ctx
);
11140 offset
= extended
? offset
: offset
<< 3;
11141 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11144 check_insn(ctx
, ISA_MIPS3
);
11145 check_mips_64(ctx
);
11146 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11147 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11150 check_insn(ctx
, ISA_MIPS3
);
11151 check_mips_64(ctx
);
11152 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11153 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11156 check_insn(ctx
, ISA_MIPS3
);
11157 check_mips_64(ctx
);
11158 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11159 generate_exception_end(ctx
, EXCP_RI
);
11161 offset
= extended
? offset
: offset
<< 3;
11162 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11166 check_insn(ctx
, ISA_MIPS3
);
11167 check_mips_64(ctx
);
11168 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11169 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11172 check_insn(ctx
, ISA_MIPS3
);
11173 check_mips_64(ctx
);
11174 offset
= extended
? offset
: offset
<< 2;
11175 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11178 check_insn(ctx
, ISA_MIPS3
);
11179 check_mips_64(ctx
);
11180 offset
= extended
? offset
: offset
<< 2;
11181 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11187 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11189 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11190 int op
, rx
, ry
, funct
, sa
;
11191 int16_t imm
, offset
;
11193 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11194 op
= (ctx
->opcode
>> 11) & 0x1f;
11195 sa
= (ctx
->opcode
>> 22) & 0x1f;
11196 funct
= (ctx
->opcode
>> 8) & 0x7;
11197 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11198 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11199 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11200 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11201 | (ctx
->opcode
& 0x1f));
11203 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11206 case M16_OPC_ADDIUSP
:
11207 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11209 case M16_OPC_ADDIUPC
:
11210 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11213 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11214 /* No delay slot, so just process as a normal instruction */
11217 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11218 /* No delay slot, so just process as a normal instruction */
11220 case M16_OPC_BNEQZ
:
11221 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11222 /* No delay slot, so just process as a normal instruction */
11224 case M16_OPC_SHIFT
:
11225 switch (ctx
->opcode
& 0x3) {
11227 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11230 #if defined(TARGET_MIPS64)
11231 check_mips_64(ctx
);
11232 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11234 generate_exception_end(ctx
, EXCP_RI
);
11238 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11241 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11245 #if defined(TARGET_MIPS64)
11247 check_insn(ctx
, ISA_MIPS3
);
11248 check_mips_64(ctx
);
11249 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11253 imm
= ctx
->opcode
& 0xf;
11254 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11255 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11256 imm
= (int16_t) (imm
<< 1) >> 1;
11257 if ((ctx
->opcode
>> 4) & 0x1) {
11258 #if defined(TARGET_MIPS64)
11259 check_mips_64(ctx
);
11260 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11262 generate_exception_end(ctx
, EXCP_RI
);
11265 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11268 case M16_OPC_ADDIU8
:
11269 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11272 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11274 case M16_OPC_SLTIU
:
11275 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11280 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11283 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11286 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11289 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11292 check_insn(ctx
, ISA_MIPS32
);
11294 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11295 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11296 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11297 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11298 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11299 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11300 | (ctx
->opcode
& 0xf)) << 3;
11302 if (ctx
->opcode
& (1 << 7)) {
11303 gen_mips16_save(ctx
, xsregs
, aregs
,
11304 do_ra
, do_s0
, do_s1
,
11307 gen_mips16_restore(ctx
, xsregs
, aregs
,
11308 do_ra
, do_s0
, do_s1
,
11314 generate_exception_end(ctx
, EXCP_RI
);
11319 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11322 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11324 #if defined(TARGET_MIPS64)
11326 check_insn(ctx
, ISA_MIPS3
);
11327 check_mips_64(ctx
);
11328 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11332 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11335 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11338 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11341 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11344 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11347 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11350 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11352 #if defined(TARGET_MIPS64)
11354 check_insn(ctx
, ISA_MIPS3
);
11355 check_mips_64(ctx
);
11356 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11360 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11363 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11366 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11369 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11371 #if defined(TARGET_MIPS64)
11373 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11377 generate_exception_end(ctx
, EXCP_RI
);
11384 static inline bool is_uhi(int sdbbp_code
)
11386 #ifdef CONFIG_USER_ONLY
11389 return semihosting_enabled() && sdbbp_code
== 1;
11393 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11397 int op
, cnvt_op
, op1
, offset
;
11401 op
= (ctx
->opcode
>> 11) & 0x1f;
11402 sa
= (ctx
->opcode
>> 2) & 0x7;
11403 sa
= sa
== 0 ? 8 : sa
;
11404 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11405 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11406 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11407 op1
= offset
= ctx
->opcode
& 0x1f;
11412 case M16_OPC_ADDIUSP
:
11414 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11416 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11419 case M16_OPC_ADDIUPC
:
11420 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11423 offset
= (ctx
->opcode
& 0x7ff) << 1;
11424 offset
= (int16_t)(offset
<< 4) >> 4;
11425 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11426 /* No delay slot, so just process as a normal instruction */
11429 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11430 offset
= (((ctx
->opcode
& 0x1f) << 21)
11431 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11433 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11434 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11438 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11439 ((int8_t)ctx
->opcode
) << 1, 0);
11440 /* No delay slot, so just process as a normal instruction */
11442 case M16_OPC_BNEQZ
:
11443 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11444 ((int8_t)ctx
->opcode
) << 1, 0);
11445 /* No delay slot, so just process as a normal instruction */
11447 case M16_OPC_SHIFT
:
11448 switch (ctx
->opcode
& 0x3) {
11450 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11453 #if defined(TARGET_MIPS64)
11454 check_insn(ctx
, ISA_MIPS3
);
11455 check_mips_64(ctx
);
11456 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11458 generate_exception_end(ctx
, EXCP_RI
);
11462 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11465 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11469 #if defined(TARGET_MIPS64)
11471 check_insn(ctx
, ISA_MIPS3
);
11472 check_mips_64(ctx
);
11473 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11478 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11480 if ((ctx
->opcode
>> 4) & 1) {
11481 #if defined(TARGET_MIPS64)
11482 check_insn(ctx
, ISA_MIPS3
);
11483 check_mips_64(ctx
);
11484 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11486 generate_exception_end(ctx
, EXCP_RI
);
11489 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11493 case M16_OPC_ADDIU8
:
11495 int16_t imm
= (int8_t) ctx
->opcode
;
11497 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11502 int16_t imm
= (uint8_t) ctx
->opcode
;
11503 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11506 case M16_OPC_SLTIU
:
11508 int16_t imm
= (uint8_t) ctx
->opcode
;
11509 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11516 funct
= (ctx
->opcode
>> 8) & 0x7;
11519 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11520 ((int8_t)ctx
->opcode
) << 1, 0);
11523 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11524 ((int8_t)ctx
->opcode
) << 1, 0);
11527 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11530 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11531 ((int8_t)ctx
->opcode
) << 3);
11534 check_insn(ctx
, ISA_MIPS32
);
11536 int do_ra
= ctx
->opcode
& (1 << 6);
11537 int do_s0
= ctx
->opcode
& (1 << 5);
11538 int do_s1
= ctx
->opcode
& (1 << 4);
11539 int framesize
= ctx
->opcode
& 0xf;
11541 if (framesize
== 0) {
11544 framesize
= framesize
<< 3;
11547 if (ctx
->opcode
& (1 << 7)) {
11548 gen_mips16_save(ctx
, 0, 0,
11549 do_ra
, do_s0
, do_s1
, framesize
);
11551 gen_mips16_restore(ctx
, 0, 0,
11552 do_ra
, do_s0
, do_s1
, framesize
);
11558 int rz
= xlat(ctx
->opcode
& 0x7);
11560 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11561 ((ctx
->opcode
>> 5) & 0x7);
11562 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11566 reg32
= ctx
->opcode
& 0x1f;
11567 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11570 generate_exception_end(ctx
, EXCP_RI
);
11577 int16_t imm
= (uint8_t) ctx
->opcode
;
11579 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11584 int16_t imm
= (uint8_t) ctx
->opcode
;
11585 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11588 #if defined(TARGET_MIPS64)
11590 check_insn(ctx
, ISA_MIPS3
);
11591 check_mips_64(ctx
);
11592 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11596 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11599 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11602 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11605 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11608 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11611 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11614 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11616 #if defined (TARGET_MIPS64)
11618 check_insn(ctx
, ISA_MIPS3
);
11619 check_mips_64(ctx
);
11620 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11624 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11627 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11630 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11633 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11637 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11640 switch (ctx
->opcode
& 0x3) {
11642 mips32_op
= OPC_ADDU
;
11645 mips32_op
= OPC_SUBU
;
11647 #if defined(TARGET_MIPS64)
11649 mips32_op
= OPC_DADDU
;
11650 check_insn(ctx
, ISA_MIPS3
);
11651 check_mips_64(ctx
);
11654 mips32_op
= OPC_DSUBU
;
11655 check_insn(ctx
, ISA_MIPS3
);
11656 check_mips_64(ctx
);
11660 generate_exception_end(ctx
, EXCP_RI
);
11664 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11673 int nd
= (ctx
->opcode
>> 7) & 0x1;
11674 int link
= (ctx
->opcode
>> 6) & 0x1;
11675 int ra
= (ctx
->opcode
>> 5) & 0x1;
11678 check_insn(ctx
, ISA_MIPS32
);
11687 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11692 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11693 gen_helper_do_semihosting(cpu_env
);
11695 /* XXX: not clear which exception should be raised
11696 * when in debug mode...
11698 check_insn(ctx
, ISA_MIPS32
);
11699 generate_exception_end(ctx
, EXCP_DBp
);
11703 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11706 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11709 generate_exception_end(ctx
, EXCP_BREAK
);
11712 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11715 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11718 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11720 #if defined (TARGET_MIPS64)
11722 check_insn(ctx
, ISA_MIPS3
);
11723 check_mips_64(ctx
);
11724 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11728 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11731 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11734 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11737 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11740 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11743 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11746 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11749 check_insn(ctx
, ISA_MIPS32
);
11751 case RR_RY_CNVT_ZEB
:
11752 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11754 case RR_RY_CNVT_ZEH
:
11755 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11757 case RR_RY_CNVT_SEB
:
11758 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11760 case RR_RY_CNVT_SEH
:
11761 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11763 #if defined (TARGET_MIPS64)
11764 case RR_RY_CNVT_ZEW
:
11765 check_insn(ctx
, ISA_MIPS64
);
11766 check_mips_64(ctx
);
11767 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11769 case RR_RY_CNVT_SEW
:
11770 check_insn(ctx
, ISA_MIPS64
);
11771 check_mips_64(ctx
);
11772 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11776 generate_exception_end(ctx
, EXCP_RI
);
11781 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11783 #if defined (TARGET_MIPS64)
11785 check_insn(ctx
, ISA_MIPS3
);
11786 check_mips_64(ctx
);
11787 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11790 check_insn(ctx
, ISA_MIPS3
);
11791 check_mips_64(ctx
);
11792 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11795 check_insn(ctx
, ISA_MIPS3
);
11796 check_mips_64(ctx
);
11797 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11800 check_insn(ctx
, ISA_MIPS3
);
11801 check_mips_64(ctx
);
11802 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11806 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11809 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11812 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11815 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11817 #if defined (TARGET_MIPS64)
11819 check_insn(ctx
, ISA_MIPS3
);
11820 check_mips_64(ctx
);
11821 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11824 check_insn(ctx
, ISA_MIPS3
);
11825 check_mips_64(ctx
);
11826 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11829 check_insn(ctx
, ISA_MIPS3
);
11830 check_mips_64(ctx
);
11831 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11834 check_insn(ctx
, ISA_MIPS3
);
11835 check_mips_64(ctx
);
11836 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11840 generate_exception_end(ctx
, EXCP_RI
);
11844 case M16_OPC_EXTEND
:
11845 decode_extended_mips16_opc(env
, ctx
);
11848 #if defined(TARGET_MIPS64)
11850 funct
= (ctx
->opcode
>> 8) & 0x7;
11851 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11855 generate_exception_end(ctx
, EXCP_RI
);
11862 /* microMIPS extension to MIPS32/MIPS64 */
11865 * microMIPS32/microMIPS64 major opcodes
11867 * 1. MIPS Architecture for Programmers Volume II-B:
11868 * The microMIPS32 Instruction Set (Revision 3.05)
11870 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11872 * 2. MIPS Architecture For Programmers Volume II-A:
11873 * The MIPS64 Instruction Set (Revision 3.51)
11903 POOL32S
= 0x16, /* MIPS64 */
11904 DADDIU32
= 0x17, /* MIPS64 */
11933 /* 0x29 is reserved */
11946 /* 0x31 is reserved */
11959 SD32
= 0x36, /* MIPS64 */
11960 LD32
= 0x37, /* MIPS64 */
11962 /* 0x39 is reserved */
11978 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11988 /* POOL32A encoding of minor opcode field */
11991 /* These opcodes are distinguished only by bits 9..6; those bits are
11992 * what are recorded below. */
12029 /* The following can be distinguished by their lower 6 bits. */
12039 /* POOL32AXF encoding of minor opcode field extension */
12042 * 1. MIPS Architecture for Programmers Volume II-B:
12043 * The microMIPS32 Instruction Set (Revision 3.05)
12045 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12047 * 2. MIPS Architecture for Programmers VolumeIV-e:
12048 * The MIPS DSP Application-Specific Extension
12049 * to the microMIPS32 Architecture (Revision 2.34)
12051 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12066 /* begin of microMIPS32 DSP */
12068 /* bits 13..12 for 0x01 */
12074 /* bits 13..12 for 0x2a */
12080 /* bits 13..12 for 0x32 */
12084 /* end of microMIPS32 DSP */
12086 /* bits 15..12 for 0x2c */
12103 /* bits 15..12 for 0x34 */
12111 /* bits 15..12 for 0x3c */
12113 JR
= 0x0, /* alias */
12121 /* bits 15..12 for 0x05 */
12125 /* bits 15..12 for 0x0d */
12137 /* bits 15..12 for 0x15 */
12143 /* bits 15..12 for 0x1d */
12147 /* bits 15..12 for 0x2d */
12152 /* bits 15..12 for 0x35 */
12159 /* POOL32B encoding of minor opcode field (bits 15..12) */
12175 /* POOL32C encoding of minor opcode field (bits 15..12) */
12183 /* 0xa is reserved */
12190 /* 0x6 is reserved */
12196 /* POOL32F encoding of minor opcode field (bits 5..0) */
12199 /* These are the bit 7..6 values */
12208 /* These are the bit 8..6 values */
12233 MOVZ_FMT_05
= 0x05,
12267 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12274 /* POOL32Fxf encoding of minor opcode extension field */
12312 /* POOL32I encoding of minor opcode field (bits 25..21) */
12342 /* These overlap and are distinguished by bit16 of the instruction */
12351 /* POOL16A encoding of minor opcode field */
12358 /* POOL16B encoding of minor opcode field */
12365 /* POOL16C encoding of minor opcode field */
12385 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12405 /* POOL16D encoding of minor opcode field */
12412 /* POOL16E encoding of minor opcode field */
12419 static int mmreg (int r
)
12421 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12426 /* Used for 16-bit store instructions. */
12427 static int mmreg2 (int r
)
12429 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12434 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12435 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12436 #define uMIPS_RS2(op) uMIPS_RS(op)
12437 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12438 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12439 #define uMIPS_RS5(op) (op & 0x1f)
12441 /* Signed immediate */
12442 #define SIMM(op, start, width) \
12443 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12446 /* Zero-extended immediate */
12447 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12449 static void gen_addiur1sp(DisasContext
*ctx
)
12451 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12453 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12456 static void gen_addiur2(DisasContext
*ctx
)
12458 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12459 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12460 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12462 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12465 static void gen_addiusp(DisasContext
*ctx
)
12467 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12470 if (encoded
<= 1) {
12471 decoded
= 256 + encoded
;
12472 } else if (encoded
<= 255) {
12474 } else if (encoded
<= 509) {
12475 decoded
= encoded
- 512;
12477 decoded
= encoded
- 768;
12480 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12483 static void gen_addius5(DisasContext
*ctx
)
12485 int imm
= SIMM(ctx
->opcode
, 1, 4);
12486 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12488 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12491 static void gen_andi16(DisasContext
*ctx
)
12493 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12494 31, 32, 63, 64, 255, 32768, 65535 };
12495 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12496 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12497 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12499 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12502 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12503 int base
, int16_t offset
)
12508 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12509 generate_exception_end(ctx
, EXCP_RI
);
12513 t0
= tcg_temp_new();
12515 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12517 t1
= tcg_const_tl(reglist
);
12518 t2
= tcg_const_i32(ctx
->mem_idx
);
12520 save_cpu_state(ctx
, 1);
12523 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12526 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12528 #ifdef TARGET_MIPS64
12530 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12533 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12539 tcg_temp_free_i32(t2
);
12543 static void gen_pool16c_insn(DisasContext
*ctx
)
12545 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12546 int rs
= mmreg(ctx
->opcode
& 0x7);
12548 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12553 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12559 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12565 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12571 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12578 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12579 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12581 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12590 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12591 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12593 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12600 int reg
= ctx
->opcode
& 0x1f;
12602 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12608 int reg
= ctx
->opcode
& 0x1f;
12609 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12610 /* Let normal delay slot handling in our caller take us
12611 to the branch target. */
12616 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12617 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12621 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12622 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12626 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12630 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12633 generate_exception_end(ctx
, EXCP_BREAK
);
12636 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12637 gen_helper_do_semihosting(cpu_env
);
12639 /* XXX: not clear which exception should be raised
12640 * when in debug mode...
12642 check_insn(ctx
, ISA_MIPS32
);
12643 generate_exception_end(ctx
, EXCP_DBp
);
12646 case JRADDIUSP
+ 0:
12647 case JRADDIUSP
+ 1:
12649 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12650 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12651 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12652 /* Let normal delay slot handling in our caller take us
12653 to the branch target. */
12657 generate_exception_end(ctx
, EXCP_RI
);
12662 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12665 int rd
, rs
, re
, rt
;
12666 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12667 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12668 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12669 rd
= rd_enc
[enc_dest
];
12670 re
= re_enc
[enc_dest
];
12671 rs
= rs_rt_enc
[enc_rs
];
12672 rt
= rs_rt_enc
[enc_rt
];
12674 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12676 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12679 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12681 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12685 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12687 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12688 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12690 switch (ctx
->opcode
& 0xf) {
12692 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12695 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12699 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12700 int offset
= extract32(ctx
->opcode
, 4, 4);
12701 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12704 case R6_JRC16
: /* JRCADDIUSP */
12705 if ((ctx
->opcode
>> 4) & 1) {
12707 int imm
= extract32(ctx
->opcode
, 5, 5);
12708 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12709 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12712 int rs
= extract32(ctx
->opcode
, 5, 5);
12713 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12716 case MOVEP
... MOVEP_07
:
12717 case MOVEP_0C
... MOVEP_0F
:
12719 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12720 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12721 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12722 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12726 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12729 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12733 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12734 int offset
= extract32(ctx
->opcode
, 4, 4);
12735 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12738 case JALRC16
: /* BREAK16, SDBBP16 */
12739 switch (ctx
->opcode
& 0x3f) {
12741 case JALRC16
+ 0x20:
12743 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12748 generate_exception(ctx
, EXCP_BREAK
);
12752 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12753 gen_helper_do_semihosting(cpu_env
);
12755 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12756 generate_exception(ctx
, EXCP_RI
);
12758 generate_exception(ctx
, EXCP_DBp
);
12765 generate_exception(ctx
, EXCP_RI
);
12770 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12772 TCGv t0
= tcg_temp_new();
12773 TCGv t1
= tcg_temp_new();
12775 gen_load_gpr(t0
, base
);
12778 gen_load_gpr(t1
, index
);
12779 tcg_gen_shli_tl(t1
, t1
, 2);
12780 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12783 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12784 gen_store_gpr(t1
, rd
);
12790 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12791 int base
, int16_t offset
)
12795 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12796 generate_exception_end(ctx
, EXCP_RI
);
12800 t0
= tcg_temp_new();
12801 t1
= tcg_temp_new();
12803 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12808 generate_exception_end(ctx
, EXCP_RI
);
12811 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12812 gen_store_gpr(t1
, rd
);
12813 tcg_gen_movi_tl(t1
, 4);
12814 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12815 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12816 gen_store_gpr(t1
, rd
+1);
12819 gen_load_gpr(t1
, rd
);
12820 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12821 tcg_gen_movi_tl(t1
, 4);
12822 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12823 gen_load_gpr(t1
, rd
+1);
12824 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12826 #ifdef TARGET_MIPS64
12829 generate_exception_end(ctx
, EXCP_RI
);
12832 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12833 gen_store_gpr(t1
, rd
);
12834 tcg_gen_movi_tl(t1
, 8);
12835 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12836 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12837 gen_store_gpr(t1
, rd
+1);
12840 gen_load_gpr(t1
, rd
);
12841 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12842 tcg_gen_movi_tl(t1
, 8);
12843 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12844 gen_load_gpr(t1
, rd
+1);
12845 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12853 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12855 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12856 int minor
= (ctx
->opcode
>> 12) & 0xf;
12857 uint32_t mips32_op
;
12859 switch (extension
) {
12861 mips32_op
= OPC_TEQ
;
12864 mips32_op
= OPC_TGE
;
12867 mips32_op
= OPC_TGEU
;
12870 mips32_op
= OPC_TLT
;
12873 mips32_op
= OPC_TLTU
;
12876 mips32_op
= OPC_TNE
;
12878 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12880 #ifndef CONFIG_USER_ONLY
12883 check_cp0_enabled(ctx
);
12885 /* Treat as NOP. */
12888 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12892 check_cp0_enabled(ctx
);
12894 TCGv t0
= tcg_temp_new();
12896 gen_load_gpr(t0
, rt
);
12897 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12903 switch (minor
& 3) {
12905 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12908 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12911 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12914 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12917 goto pool32axf_invalid
;
12921 switch (minor
& 3) {
12923 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12926 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12929 goto pool32axf_invalid
;
12935 check_insn(ctx
, ISA_MIPS32R6
);
12936 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12939 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12942 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12945 mips32_op
= OPC_CLO
;
12948 mips32_op
= OPC_CLZ
;
12950 check_insn(ctx
, ISA_MIPS32
);
12951 gen_cl(ctx
, mips32_op
, rt
, rs
);
12954 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12955 gen_rdhwr(ctx
, rt
, rs
, 0);
12958 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12961 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12962 mips32_op
= OPC_MULT
;
12965 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12966 mips32_op
= OPC_MULTU
;
12969 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12970 mips32_op
= OPC_DIV
;
12973 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12974 mips32_op
= OPC_DIVU
;
12977 check_insn(ctx
, ISA_MIPS32
);
12978 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12981 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12982 mips32_op
= OPC_MADD
;
12985 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12986 mips32_op
= OPC_MADDU
;
12989 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12990 mips32_op
= OPC_MSUB
;
12993 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12994 mips32_op
= OPC_MSUBU
;
12996 check_insn(ctx
, ISA_MIPS32
);
12997 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13000 goto pool32axf_invalid
;
13011 generate_exception_err(ctx
, EXCP_CpU
, 2);
13014 goto pool32axf_invalid
;
13019 case JALR
: /* JALRC */
13020 case JALR_HB
: /* JALRC_HB */
13021 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13022 /* JALRC, JALRC_HB */
13023 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13025 /* JALR, JALR_HB */
13026 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13027 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13033 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13034 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13037 goto pool32axf_invalid
;
13043 check_cp0_enabled(ctx
);
13044 check_insn(ctx
, ISA_MIPS32R2
);
13045 gen_load_srsgpr(rs
, rt
);
13048 check_cp0_enabled(ctx
);
13049 check_insn(ctx
, ISA_MIPS32R2
);
13050 gen_store_srsgpr(rs
, rt
);
13053 goto pool32axf_invalid
;
13056 #ifndef CONFIG_USER_ONLY
13060 mips32_op
= OPC_TLBP
;
13063 mips32_op
= OPC_TLBR
;
13066 mips32_op
= OPC_TLBWI
;
13069 mips32_op
= OPC_TLBWR
;
13072 mips32_op
= OPC_TLBINV
;
13075 mips32_op
= OPC_TLBINVF
;
13078 mips32_op
= OPC_WAIT
;
13081 mips32_op
= OPC_DERET
;
13084 mips32_op
= OPC_ERET
;
13086 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13089 goto pool32axf_invalid
;
13095 check_cp0_enabled(ctx
);
13097 TCGv t0
= tcg_temp_new();
13099 save_cpu_state(ctx
, 1);
13100 gen_helper_di(t0
, cpu_env
);
13101 gen_store_gpr(t0
, rs
);
13102 /* Stop translation as we may have switched the execution mode */
13103 ctx
->bstate
= BS_STOP
;
13108 check_cp0_enabled(ctx
);
13110 TCGv t0
= tcg_temp_new();
13112 save_cpu_state(ctx
, 1);
13113 gen_helper_ei(t0
, cpu_env
);
13114 gen_store_gpr(t0
, rs
);
13115 /* Stop translation as we may have switched the execution mode */
13116 ctx
->bstate
= BS_STOP
;
13121 goto pool32axf_invalid
;
13131 generate_exception_end(ctx
, EXCP_SYSCALL
);
13134 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13135 gen_helper_do_semihosting(cpu_env
);
13137 check_insn(ctx
, ISA_MIPS32
);
13138 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13139 generate_exception_end(ctx
, EXCP_RI
);
13141 generate_exception_end(ctx
, EXCP_DBp
);
13146 goto pool32axf_invalid
;
13150 switch (minor
& 3) {
13152 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13155 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13158 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13161 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13164 goto pool32axf_invalid
;
13168 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13171 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13174 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13177 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13180 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13183 goto pool32axf_invalid
;
13188 MIPS_INVAL("pool32axf");
13189 generate_exception_end(ctx
, EXCP_RI
);
13194 /* Values for microMIPS fmt field. Variable-width, depending on which
13195 formats the instruction supports. */
13214 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13216 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13217 uint32_t mips32_op
;
13219 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13220 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13221 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13223 switch (extension
) {
13224 case FLOAT_1BIT_FMT(CFC1
, 0):
13225 mips32_op
= OPC_CFC1
;
13227 case FLOAT_1BIT_FMT(CTC1
, 0):
13228 mips32_op
= OPC_CTC1
;
13230 case FLOAT_1BIT_FMT(MFC1
, 0):
13231 mips32_op
= OPC_MFC1
;
13233 case FLOAT_1BIT_FMT(MTC1
, 0):
13234 mips32_op
= OPC_MTC1
;
13236 case FLOAT_1BIT_FMT(MFHC1
, 0):
13237 mips32_op
= OPC_MFHC1
;
13239 case FLOAT_1BIT_FMT(MTHC1
, 0):
13240 mips32_op
= OPC_MTHC1
;
13242 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13245 /* Reciprocal square root */
13246 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13247 mips32_op
= OPC_RSQRT_S
;
13249 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13250 mips32_op
= OPC_RSQRT_D
;
13254 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13255 mips32_op
= OPC_SQRT_S
;
13257 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13258 mips32_op
= OPC_SQRT_D
;
13262 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13263 mips32_op
= OPC_RECIP_S
;
13265 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13266 mips32_op
= OPC_RECIP_D
;
13270 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13271 mips32_op
= OPC_FLOOR_L_S
;
13273 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13274 mips32_op
= OPC_FLOOR_L_D
;
13276 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13277 mips32_op
= OPC_FLOOR_W_S
;
13279 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13280 mips32_op
= OPC_FLOOR_W_D
;
13284 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13285 mips32_op
= OPC_CEIL_L_S
;
13287 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13288 mips32_op
= OPC_CEIL_L_D
;
13290 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13291 mips32_op
= OPC_CEIL_W_S
;
13293 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13294 mips32_op
= OPC_CEIL_W_D
;
13298 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13299 mips32_op
= OPC_TRUNC_L_S
;
13301 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13302 mips32_op
= OPC_TRUNC_L_D
;
13304 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13305 mips32_op
= OPC_TRUNC_W_S
;
13307 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13308 mips32_op
= OPC_TRUNC_W_D
;
13312 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13313 mips32_op
= OPC_ROUND_L_S
;
13315 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13316 mips32_op
= OPC_ROUND_L_D
;
13318 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13319 mips32_op
= OPC_ROUND_W_S
;
13321 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13322 mips32_op
= OPC_ROUND_W_D
;
13325 /* Integer to floating-point conversion */
13326 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13327 mips32_op
= OPC_CVT_L_S
;
13329 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13330 mips32_op
= OPC_CVT_L_D
;
13332 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13333 mips32_op
= OPC_CVT_W_S
;
13335 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13336 mips32_op
= OPC_CVT_W_D
;
13339 /* Paired-foo conversions */
13340 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13341 mips32_op
= OPC_CVT_S_PL
;
13343 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13344 mips32_op
= OPC_CVT_S_PU
;
13346 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13347 mips32_op
= OPC_CVT_PW_PS
;
13349 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13350 mips32_op
= OPC_CVT_PS_PW
;
13353 /* Floating-point moves */
13354 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13355 mips32_op
= OPC_MOV_S
;
13357 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13358 mips32_op
= OPC_MOV_D
;
13360 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13361 mips32_op
= OPC_MOV_PS
;
13364 /* Absolute value */
13365 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13366 mips32_op
= OPC_ABS_S
;
13368 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13369 mips32_op
= OPC_ABS_D
;
13371 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13372 mips32_op
= OPC_ABS_PS
;
13376 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13377 mips32_op
= OPC_NEG_S
;
13379 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13380 mips32_op
= OPC_NEG_D
;
13382 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13383 mips32_op
= OPC_NEG_PS
;
13386 /* Reciprocal square root step */
13387 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13388 mips32_op
= OPC_RSQRT1_S
;
13390 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13391 mips32_op
= OPC_RSQRT1_D
;
13393 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13394 mips32_op
= OPC_RSQRT1_PS
;
13397 /* Reciprocal step */
13398 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13399 mips32_op
= OPC_RECIP1_S
;
13401 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13402 mips32_op
= OPC_RECIP1_S
;
13404 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13405 mips32_op
= OPC_RECIP1_PS
;
13408 /* Conversions from double */
13409 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13410 mips32_op
= OPC_CVT_D_S
;
13412 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13413 mips32_op
= OPC_CVT_D_W
;
13415 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13416 mips32_op
= OPC_CVT_D_L
;
13419 /* Conversions from single */
13420 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13421 mips32_op
= OPC_CVT_S_D
;
13423 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13424 mips32_op
= OPC_CVT_S_W
;
13426 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13427 mips32_op
= OPC_CVT_S_L
;
13429 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13432 /* Conditional moves on floating-point codes */
13433 case COND_FLOAT_MOV(MOVT
, 0):
13434 case COND_FLOAT_MOV(MOVT
, 1):
13435 case COND_FLOAT_MOV(MOVT
, 2):
13436 case COND_FLOAT_MOV(MOVT
, 3):
13437 case COND_FLOAT_MOV(MOVT
, 4):
13438 case COND_FLOAT_MOV(MOVT
, 5):
13439 case COND_FLOAT_MOV(MOVT
, 6):
13440 case COND_FLOAT_MOV(MOVT
, 7):
13441 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13442 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13444 case COND_FLOAT_MOV(MOVF
, 0):
13445 case COND_FLOAT_MOV(MOVF
, 1):
13446 case COND_FLOAT_MOV(MOVF
, 2):
13447 case COND_FLOAT_MOV(MOVF
, 3):
13448 case COND_FLOAT_MOV(MOVF
, 4):
13449 case COND_FLOAT_MOV(MOVF
, 5):
13450 case COND_FLOAT_MOV(MOVF
, 6):
13451 case COND_FLOAT_MOV(MOVF
, 7):
13452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13453 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13456 MIPS_INVAL("pool32fxf");
13457 generate_exception_end(ctx
, EXCP_RI
);
13462 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13466 int rt
, rs
, rd
, rr
;
13468 uint32_t op
, minor
, mips32_op
;
13469 uint32_t cond
, fmt
, cc
;
13471 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13472 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13474 rt
= (ctx
->opcode
>> 21) & 0x1f;
13475 rs
= (ctx
->opcode
>> 16) & 0x1f;
13476 rd
= (ctx
->opcode
>> 11) & 0x1f;
13477 rr
= (ctx
->opcode
>> 6) & 0x1f;
13478 imm
= (int16_t) ctx
->opcode
;
13480 op
= (ctx
->opcode
>> 26) & 0x3f;
13483 minor
= ctx
->opcode
& 0x3f;
13486 minor
= (ctx
->opcode
>> 6) & 0xf;
13489 mips32_op
= OPC_SLL
;
13492 mips32_op
= OPC_SRA
;
13495 mips32_op
= OPC_SRL
;
13498 mips32_op
= OPC_ROTR
;
13500 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13503 check_insn(ctx
, ISA_MIPS32R6
);
13504 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13507 check_insn(ctx
, ISA_MIPS32R6
);
13508 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13511 check_insn(ctx
, ISA_MIPS32R6
);
13512 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13515 goto pool32a_invalid
;
13519 minor
= (ctx
->opcode
>> 6) & 0xf;
13523 mips32_op
= OPC_ADD
;
13526 mips32_op
= OPC_ADDU
;
13529 mips32_op
= OPC_SUB
;
13532 mips32_op
= OPC_SUBU
;
13535 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13536 mips32_op
= OPC_MUL
;
13538 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13542 mips32_op
= OPC_SLLV
;
13545 mips32_op
= OPC_SRLV
;
13548 mips32_op
= OPC_SRAV
;
13551 mips32_op
= OPC_ROTRV
;
13553 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13555 /* Logical operations */
13557 mips32_op
= OPC_AND
;
13560 mips32_op
= OPC_OR
;
13563 mips32_op
= OPC_NOR
;
13566 mips32_op
= OPC_XOR
;
13568 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13570 /* Set less than */
13572 mips32_op
= OPC_SLT
;
13575 mips32_op
= OPC_SLTU
;
13577 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13580 goto pool32a_invalid
;
13584 minor
= (ctx
->opcode
>> 6) & 0xf;
13586 /* Conditional moves */
13587 case MOVN
: /* MUL */
13588 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13590 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13593 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13596 case MOVZ
: /* MUH */
13597 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13599 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13602 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13606 check_insn(ctx
, ISA_MIPS32R6
);
13607 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13610 check_insn(ctx
, ISA_MIPS32R6
);
13611 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13613 case LWXS
: /* DIV */
13614 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13616 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13619 gen_ldxs(ctx
, rs
, rt
, rd
);
13623 check_insn(ctx
, ISA_MIPS32R6
);
13624 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13627 check_insn(ctx
, ISA_MIPS32R6
);
13628 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13631 check_insn(ctx
, ISA_MIPS32R6
);
13632 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13635 goto pool32a_invalid
;
13639 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13642 check_insn(ctx
, ISA_MIPS32R6
);
13643 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13644 extract32(ctx
->opcode
, 9, 2));
13647 check_insn(ctx
, ISA_MIPS32R6
);
13648 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13649 extract32(ctx
->opcode
, 9, 2));
13652 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13655 gen_pool32axf(env
, ctx
, rt
, rs
);
13658 generate_exception_end(ctx
, EXCP_BREAK
);
13661 check_insn(ctx
, ISA_MIPS32R6
);
13662 generate_exception_end(ctx
, EXCP_RI
);
13666 MIPS_INVAL("pool32a");
13667 generate_exception_end(ctx
, EXCP_RI
);
13672 minor
= (ctx
->opcode
>> 12) & 0xf;
13675 check_cp0_enabled(ctx
);
13676 /* Treat as no-op. */
13680 /* COP2: Not implemented. */
13681 generate_exception_err(ctx
, EXCP_CpU
, 2);
13683 #ifdef TARGET_MIPS64
13686 check_insn(ctx
, ISA_MIPS3
);
13687 check_mips_64(ctx
);
13692 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13694 #ifdef TARGET_MIPS64
13697 check_insn(ctx
, ISA_MIPS3
);
13698 check_mips_64(ctx
);
13703 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13706 MIPS_INVAL("pool32b");
13707 generate_exception_end(ctx
, EXCP_RI
);
13712 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13713 minor
= ctx
->opcode
& 0x3f;
13714 check_cp1_enabled(ctx
);
13717 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13718 mips32_op
= OPC_ALNV_PS
;
13721 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13722 mips32_op
= OPC_MADD_S
;
13725 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13726 mips32_op
= OPC_MADD_D
;
13729 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13730 mips32_op
= OPC_MADD_PS
;
13733 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13734 mips32_op
= OPC_MSUB_S
;
13737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13738 mips32_op
= OPC_MSUB_D
;
13741 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13742 mips32_op
= OPC_MSUB_PS
;
13745 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13746 mips32_op
= OPC_NMADD_S
;
13749 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13750 mips32_op
= OPC_NMADD_D
;
13753 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13754 mips32_op
= OPC_NMADD_PS
;
13757 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13758 mips32_op
= OPC_NMSUB_S
;
13761 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13762 mips32_op
= OPC_NMSUB_D
;
13765 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13766 mips32_op
= OPC_NMSUB_PS
;
13768 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13770 case CABS_COND_FMT
:
13771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13772 cond
= (ctx
->opcode
>> 6) & 0xf;
13773 cc
= (ctx
->opcode
>> 13) & 0x7;
13774 fmt
= (ctx
->opcode
>> 10) & 0x3;
13777 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13780 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13783 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13786 goto pool32f_invalid
;
13790 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13791 cond
= (ctx
->opcode
>> 6) & 0xf;
13792 cc
= (ctx
->opcode
>> 13) & 0x7;
13793 fmt
= (ctx
->opcode
>> 10) & 0x3;
13796 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13799 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13802 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13805 goto pool32f_invalid
;
13809 check_insn(ctx
, ISA_MIPS32R6
);
13810 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13813 check_insn(ctx
, ISA_MIPS32R6
);
13814 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13817 gen_pool32fxf(ctx
, rt
, rs
);
13821 switch ((ctx
->opcode
>> 6) & 0x7) {
13823 mips32_op
= OPC_PLL_PS
;
13826 mips32_op
= OPC_PLU_PS
;
13829 mips32_op
= OPC_PUL_PS
;
13832 mips32_op
= OPC_PUU_PS
;
13835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13836 mips32_op
= OPC_CVT_PS_S
;
13838 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13841 goto pool32f_invalid
;
13845 check_insn(ctx
, ISA_MIPS32R6
);
13846 switch ((ctx
->opcode
>> 9) & 0x3) {
13848 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13851 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13854 goto pool32f_invalid
;
13859 switch ((ctx
->opcode
>> 6) & 0x7) {
13861 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13862 mips32_op
= OPC_LWXC1
;
13865 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13866 mips32_op
= OPC_SWXC1
;
13869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13870 mips32_op
= OPC_LDXC1
;
13873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13874 mips32_op
= OPC_SDXC1
;
13877 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13878 mips32_op
= OPC_LUXC1
;
13881 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13882 mips32_op
= OPC_SUXC1
;
13884 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13887 goto pool32f_invalid
;
13891 check_insn(ctx
, ISA_MIPS32R6
);
13892 switch ((ctx
->opcode
>> 9) & 0x3) {
13894 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13897 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13900 goto pool32f_invalid
;
13905 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13906 fmt
= (ctx
->opcode
>> 9) & 0x3;
13907 switch ((ctx
->opcode
>> 6) & 0x7) {
13911 mips32_op
= OPC_RSQRT2_S
;
13914 mips32_op
= OPC_RSQRT2_D
;
13917 mips32_op
= OPC_RSQRT2_PS
;
13920 goto pool32f_invalid
;
13926 mips32_op
= OPC_RECIP2_S
;
13929 mips32_op
= OPC_RECIP2_D
;
13932 mips32_op
= OPC_RECIP2_PS
;
13935 goto pool32f_invalid
;
13939 mips32_op
= OPC_ADDR_PS
;
13942 mips32_op
= OPC_MULR_PS
;
13944 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13947 goto pool32f_invalid
;
13951 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13952 cc
= (ctx
->opcode
>> 13) & 0x7;
13953 fmt
= (ctx
->opcode
>> 9) & 0x3;
13954 switch ((ctx
->opcode
>> 6) & 0x7) {
13955 case MOVF_FMT
: /* RINT_FMT */
13956 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13960 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13963 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13966 goto pool32f_invalid
;
13972 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13975 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13979 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13982 goto pool32f_invalid
;
13986 case MOVT_FMT
: /* CLASS_FMT */
13987 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13991 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
13994 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
13997 goto pool32f_invalid
;
14003 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14006 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14010 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14013 goto pool32f_invalid
;
14018 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14021 goto pool32f_invalid
;
14024 #define FINSN_3ARG_SDPS(prfx) \
14025 switch ((ctx->opcode >> 8) & 0x3) { \
14027 mips32_op = OPC_##prfx##_S; \
14030 mips32_op = OPC_##prfx##_D; \
14032 case FMT_SDPS_PS: \
14034 mips32_op = OPC_##prfx##_PS; \
14037 goto pool32f_invalid; \
14040 check_insn(ctx
, ISA_MIPS32R6
);
14041 switch ((ctx
->opcode
>> 9) & 0x3) {
14043 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14046 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14049 goto pool32f_invalid
;
14053 check_insn(ctx
, ISA_MIPS32R6
);
14054 switch ((ctx
->opcode
>> 9) & 0x3) {
14056 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14059 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14062 goto pool32f_invalid
;
14066 /* regular FP ops */
14067 switch ((ctx
->opcode
>> 6) & 0x3) {
14069 FINSN_3ARG_SDPS(ADD
);
14072 FINSN_3ARG_SDPS(SUB
);
14075 FINSN_3ARG_SDPS(MUL
);
14078 fmt
= (ctx
->opcode
>> 8) & 0x3;
14080 mips32_op
= OPC_DIV_D
;
14081 } else if (fmt
== 0) {
14082 mips32_op
= OPC_DIV_S
;
14084 goto pool32f_invalid
;
14088 goto pool32f_invalid
;
14093 switch ((ctx
->opcode
>> 6) & 0x7) {
14094 case MOVN_FMT
: /* SELNEZ_FMT */
14095 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14097 switch ((ctx
->opcode
>> 9) & 0x3) {
14099 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14102 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14105 goto pool32f_invalid
;
14109 FINSN_3ARG_SDPS(MOVN
);
14113 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14114 FINSN_3ARG_SDPS(MOVN
);
14116 case MOVZ_FMT
: /* SELEQZ_FMT */
14117 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14119 switch ((ctx
->opcode
>> 9) & 0x3) {
14121 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14124 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14127 goto pool32f_invalid
;
14131 FINSN_3ARG_SDPS(MOVZ
);
14135 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14136 FINSN_3ARG_SDPS(MOVZ
);
14139 check_insn(ctx
, ISA_MIPS32R6
);
14140 switch ((ctx
->opcode
>> 9) & 0x3) {
14142 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14145 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14148 goto pool32f_invalid
;
14152 check_insn(ctx
, ISA_MIPS32R6
);
14153 switch ((ctx
->opcode
>> 9) & 0x3) {
14155 mips32_op
= OPC_MADDF_S
;
14158 mips32_op
= OPC_MADDF_D
;
14161 goto pool32f_invalid
;
14165 check_insn(ctx
, ISA_MIPS32R6
);
14166 switch ((ctx
->opcode
>> 9) & 0x3) {
14168 mips32_op
= OPC_MSUBF_S
;
14171 mips32_op
= OPC_MSUBF_D
;
14174 goto pool32f_invalid
;
14178 goto pool32f_invalid
;
14182 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14186 MIPS_INVAL("pool32f");
14187 generate_exception_end(ctx
, EXCP_RI
);
14191 generate_exception_err(ctx
, EXCP_CpU
, 1);
14195 minor
= (ctx
->opcode
>> 21) & 0x1f;
14198 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14199 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14202 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14203 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14204 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14208 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14209 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14213 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14216 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14217 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14218 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14221 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14222 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14223 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14226 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14227 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14230 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14231 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14235 case TLTI
: /* BC1EQZC */
14236 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14238 check_cp1_enabled(ctx
);
14239 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14242 mips32_op
= OPC_TLTI
;
14246 case TGEI
: /* BC1NEZC */
14247 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14249 check_cp1_enabled(ctx
);
14250 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14253 mips32_op
= OPC_TGEI
;
14258 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14259 mips32_op
= OPC_TLTIU
;
14262 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14263 mips32_op
= OPC_TGEIU
;
14265 case TNEI
: /* SYNCI */
14266 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14268 /* Break the TB to be able to sync copied instructions
14270 ctx
->bstate
= BS_STOP
;
14273 mips32_op
= OPC_TNEI
;
14278 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14279 mips32_op
= OPC_TEQI
;
14281 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14287 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14288 4, rs
, 0, imm
<< 1, 0);
14289 /* Compact branches don't have a delay slot, so just let
14290 the normal delay slot handling take us to the branch
14294 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14295 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14298 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14299 /* Break the TB to be able to sync copied instructions
14301 ctx
->bstate
= BS_STOP
;
14305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14306 /* COP2: Not implemented. */
14307 generate_exception_err(ctx
, EXCP_CpU
, 2);
14310 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14311 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14314 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14315 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14318 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14319 mips32_op
= OPC_BC1FANY4
;
14322 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14323 mips32_op
= OPC_BC1TANY4
;
14326 check_insn(ctx
, ASE_MIPS3D
);
14329 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14330 check_cp1_enabled(ctx
);
14331 gen_compute_branch1(ctx
, mips32_op
,
14332 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14334 generate_exception_err(ctx
, EXCP_CpU
, 1);
14339 /* MIPS DSP: not implemented */
14342 MIPS_INVAL("pool32i");
14343 generate_exception_end(ctx
, EXCP_RI
);
14348 minor
= (ctx
->opcode
>> 12) & 0xf;
14349 offset
= sextract32(ctx
->opcode
, 0,
14350 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14354 mips32_op
= OPC_LWL
;
14357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14358 mips32_op
= OPC_SWL
;
14361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14362 mips32_op
= OPC_LWR
;
14365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14366 mips32_op
= OPC_SWR
;
14368 #if defined(TARGET_MIPS64)
14370 check_insn(ctx
, ISA_MIPS3
);
14371 check_mips_64(ctx
);
14372 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14373 mips32_op
= OPC_LDL
;
14376 check_insn(ctx
, ISA_MIPS3
);
14377 check_mips_64(ctx
);
14378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14379 mips32_op
= OPC_SDL
;
14382 check_insn(ctx
, ISA_MIPS3
);
14383 check_mips_64(ctx
);
14384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14385 mips32_op
= OPC_LDR
;
14388 check_insn(ctx
, ISA_MIPS3
);
14389 check_mips_64(ctx
);
14390 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14391 mips32_op
= OPC_SDR
;
14394 check_insn(ctx
, ISA_MIPS3
);
14395 check_mips_64(ctx
);
14396 mips32_op
= OPC_LWU
;
14399 check_insn(ctx
, ISA_MIPS3
);
14400 check_mips_64(ctx
);
14401 mips32_op
= OPC_LLD
;
14405 mips32_op
= OPC_LL
;
14408 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14411 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14414 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14416 #if defined(TARGET_MIPS64)
14418 check_insn(ctx
, ISA_MIPS3
);
14419 check_mips_64(ctx
);
14420 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14424 /* Treat as no-op */
14425 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14426 /* hint codes 24-31 are reserved and signal RI */
14427 generate_exception(ctx
, EXCP_RI
);
14431 MIPS_INVAL("pool32c");
14432 generate_exception_end(ctx
, EXCP_RI
);
14436 case ADDI32
: /* AUI, LUI */
14437 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14439 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14442 mips32_op
= OPC_ADDI
;
14447 mips32_op
= OPC_ADDIU
;
14449 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14452 /* Logical operations */
14454 mips32_op
= OPC_ORI
;
14457 mips32_op
= OPC_XORI
;
14460 mips32_op
= OPC_ANDI
;
14462 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14465 /* Set less than immediate */
14467 mips32_op
= OPC_SLTI
;
14470 mips32_op
= OPC_SLTIU
;
14472 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14475 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14476 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14477 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14478 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14480 case JALS32
: /* BOVC, BEQC, BEQZALC */
14481 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14484 mips32_op
= OPC_BOVC
;
14485 } else if (rs
< rt
&& rs
== 0) {
14487 mips32_op
= OPC_BEQZALC
;
14490 mips32_op
= OPC_BEQC
;
14492 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14495 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14496 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14497 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14500 case BEQ32
: /* BC */
14501 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14503 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14504 sextract32(ctx
->opcode
<< 1, 0, 27));
14507 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14510 case BNE32
: /* BALC */
14511 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14513 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14514 sextract32(ctx
->opcode
<< 1, 0, 27));
14517 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14520 case J32
: /* BGTZC, BLTZC, BLTC */
14521 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14522 if (rs
== 0 && rt
!= 0) {
14524 mips32_op
= OPC_BGTZC
;
14525 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14527 mips32_op
= OPC_BLTZC
;
14530 mips32_op
= OPC_BLTC
;
14532 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14535 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14536 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14539 case JAL32
: /* BLEZC, BGEZC, BGEC */
14540 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14541 if (rs
== 0 && rt
!= 0) {
14543 mips32_op
= OPC_BLEZC
;
14544 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14546 mips32_op
= OPC_BGEZC
;
14549 mips32_op
= OPC_BGEC
;
14551 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14554 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14555 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14556 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14559 /* Floating point (COP1) */
14561 mips32_op
= OPC_LWC1
;
14564 mips32_op
= OPC_LDC1
;
14567 mips32_op
= OPC_SWC1
;
14570 mips32_op
= OPC_SDC1
;
14572 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14574 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14575 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14576 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14577 switch ((ctx
->opcode
>> 16) & 0x1f) {
14578 case ADDIUPC_00
... ADDIUPC_07
:
14579 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14582 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14585 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14587 case LWPC_08
... LWPC_0F
:
14588 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14591 generate_exception(ctx
, EXCP_RI
);
14596 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14597 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14599 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14602 case BNVC
: /* BNEC, BNEZALC */
14603 check_insn(ctx
, ISA_MIPS32R6
);
14606 mips32_op
= OPC_BNVC
;
14607 } else if (rs
< rt
&& rs
== 0) {
14609 mips32_op
= OPC_BNEZALC
;
14612 mips32_op
= OPC_BNEC
;
14614 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14616 case R6_BNEZC
: /* JIALC */
14617 check_insn(ctx
, ISA_MIPS32R6
);
14620 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14621 sextract32(ctx
->opcode
<< 1, 0, 22));
14624 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14627 case R6_BEQZC
: /* JIC */
14628 check_insn(ctx
, ISA_MIPS32R6
);
14631 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14632 sextract32(ctx
->opcode
<< 1, 0, 22));
14635 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14638 case BLEZALC
: /* BGEZALC, BGEUC */
14639 check_insn(ctx
, ISA_MIPS32R6
);
14640 if (rs
== 0 && rt
!= 0) {
14642 mips32_op
= OPC_BLEZALC
;
14643 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14645 mips32_op
= OPC_BGEZALC
;
14648 mips32_op
= OPC_BGEUC
;
14650 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14652 case BGTZALC
: /* BLTZALC, BLTUC */
14653 check_insn(ctx
, ISA_MIPS32R6
);
14654 if (rs
== 0 && rt
!= 0) {
14656 mips32_op
= OPC_BGTZALC
;
14657 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14659 mips32_op
= OPC_BLTZALC
;
14662 mips32_op
= OPC_BLTUC
;
14664 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14666 /* Loads and stores */
14668 mips32_op
= OPC_LB
;
14671 mips32_op
= OPC_LBU
;
14674 mips32_op
= OPC_LH
;
14677 mips32_op
= OPC_LHU
;
14680 mips32_op
= OPC_LW
;
14682 #ifdef TARGET_MIPS64
14684 check_insn(ctx
, ISA_MIPS3
);
14685 check_mips_64(ctx
);
14686 mips32_op
= OPC_LD
;
14689 check_insn(ctx
, ISA_MIPS3
);
14690 check_mips_64(ctx
);
14691 mips32_op
= OPC_SD
;
14695 mips32_op
= OPC_SB
;
14698 mips32_op
= OPC_SH
;
14701 mips32_op
= OPC_SW
;
14704 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14707 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14710 generate_exception_end(ctx
, EXCP_RI
);
14715 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14719 /* make sure instructions are on a halfword boundary */
14720 if (ctx
->pc
& 0x1) {
14721 env
->CP0_BadVAddr
= ctx
->pc
;
14722 generate_exception_end(ctx
, EXCP_AdEL
);
14726 op
= (ctx
->opcode
>> 10) & 0x3f;
14727 /* Enforce properly-sized instructions in a delay slot */
14728 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14729 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14731 /* POOL32A, POOL32B, POOL32I, POOL32C */
14733 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14735 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14737 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14739 /* LB32, LH32, LWC132, LDC132, LW32 */
14740 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14741 generate_exception_end(ctx
, EXCP_RI
);
14746 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14748 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14750 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14751 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14752 generate_exception_end(ctx
, EXCP_RI
);
14762 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14763 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14764 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14767 switch (ctx
->opcode
& 0x1) {
14775 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14776 /* In the Release 6 the register number location in
14777 * the instruction encoding has changed.
14779 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14781 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14787 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14788 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14789 int amount
= (ctx
->opcode
>> 1) & 0x7;
14791 amount
= amount
== 0 ? 8 : amount
;
14793 switch (ctx
->opcode
& 0x1) {
14802 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14806 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14807 gen_pool16c_r6_insn(ctx
);
14809 gen_pool16c_insn(ctx
);
14814 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14815 int rb
= 28; /* GP */
14816 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14818 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14823 if (ctx
->opcode
& 1) {
14824 generate_exception_end(ctx
, EXCP_RI
);
14827 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14828 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14829 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14830 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14835 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14836 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14837 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14838 offset
= (offset
== 0xf ? -1 : offset
);
14840 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14845 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14846 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14847 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14849 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14854 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14855 int rb
= 29; /* SP */
14856 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14858 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14863 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14864 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14865 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14867 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14872 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14873 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14874 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14876 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14881 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14882 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14883 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14885 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14890 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14891 int rb
= 29; /* SP */
14892 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14894 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14899 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14900 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14901 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14903 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14908 int rd
= uMIPS_RD5(ctx
->opcode
);
14909 int rs
= uMIPS_RS5(ctx
->opcode
);
14911 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14918 switch (ctx
->opcode
& 0x1) {
14928 switch (ctx
->opcode
& 0x1) {
14933 gen_addiur1sp(ctx
);
14937 case B16
: /* BC16 */
14938 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14939 sextract32(ctx
->opcode
, 0, 10) << 1,
14940 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14942 case BNEZ16
: /* BNEZC16 */
14943 case BEQZ16
: /* BEQZC16 */
14944 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14945 mmreg(uMIPS_RD(ctx
->opcode
)),
14946 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14947 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14952 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14953 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14955 imm
= (imm
== 0x7f ? -1 : imm
);
14956 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14962 generate_exception_end(ctx
, EXCP_RI
);
14965 decode_micromips32_opc(env
, ctx
);
14972 /* SmartMIPS extension to MIPS32 */
14974 #if defined(TARGET_MIPS64)
14976 /* MDMX extension to MIPS64 */
14980 /* MIPSDSP functions. */
14981 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14982 int rd
, int base
, int offset
)
14987 t0
= tcg_temp_new();
14990 gen_load_gpr(t0
, offset
);
14991 } else if (offset
== 0) {
14992 gen_load_gpr(t0
, base
);
14994 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
14999 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15000 gen_store_gpr(t0
, rd
);
15003 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15004 gen_store_gpr(t0
, rd
);
15007 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15008 gen_store_gpr(t0
, rd
);
15010 #if defined(TARGET_MIPS64)
15012 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15013 gen_store_gpr(t0
, rd
);
15020 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15021 int ret
, int v1
, int v2
)
15027 /* Treat as NOP. */
15031 v1_t
= tcg_temp_new();
15032 v2_t
= tcg_temp_new();
15034 gen_load_gpr(v1_t
, v1
);
15035 gen_load_gpr(v2_t
, v2
);
15038 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15039 case OPC_MULT_G_2E
:
15043 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15045 case OPC_ADDUH_R_QB
:
15046 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15049 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15051 case OPC_ADDQH_R_PH
:
15052 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15055 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15057 case OPC_ADDQH_R_W
:
15058 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15061 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15063 case OPC_SUBUH_R_QB
:
15064 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15067 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15069 case OPC_SUBQH_R_PH
:
15070 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15073 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15075 case OPC_SUBQH_R_W
:
15076 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15080 case OPC_ABSQ_S_PH_DSP
:
15082 case OPC_ABSQ_S_QB
:
15084 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15086 case OPC_ABSQ_S_PH
:
15088 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15092 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15094 case OPC_PRECEQ_W_PHL
:
15096 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15097 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15099 case OPC_PRECEQ_W_PHR
:
15101 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15102 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15103 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15105 case OPC_PRECEQU_PH_QBL
:
15107 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15109 case OPC_PRECEQU_PH_QBR
:
15111 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15113 case OPC_PRECEQU_PH_QBLA
:
15115 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15117 case OPC_PRECEQU_PH_QBRA
:
15119 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15121 case OPC_PRECEU_PH_QBL
:
15123 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15125 case OPC_PRECEU_PH_QBR
:
15127 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15129 case OPC_PRECEU_PH_QBLA
:
15131 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15133 case OPC_PRECEU_PH_QBRA
:
15135 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15139 case OPC_ADDU_QB_DSP
:
15143 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15145 case OPC_ADDQ_S_PH
:
15147 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15151 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15155 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15157 case OPC_ADDU_S_QB
:
15159 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15163 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15165 case OPC_ADDU_S_PH
:
15167 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15171 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15173 case OPC_SUBQ_S_PH
:
15175 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15179 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15183 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15185 case OPC_SUBU_S_QB
:
15187 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15191 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15193 case OPC_SUBU_S_PH
:
15195 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15199 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15203 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15207 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15209 case OPC_RADDU_W_QB
:
15211 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15215 case OPC_CMPU_EQ_QB_DSP
:
15217 case OPC_PRECR_QB_PH
:
15219 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15221 case OPC_PRECRQ_QB_PH
:
15223 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15225 case OPC_PRECR_SRA_PH_W
:
15228 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15229 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15231 tcg_temp_free_i32(sa_t
);
15234 case OPC_PRECR_SRA_R_PH_W
:
15237 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15238 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15240 tcg_temp_free_i32(sa_t
);
15243 case OPC_PRECRQ_PH_W
:
15245 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15247 case OPC_PRECRQ_RS_PH_W
:
15249 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15251 case OPC_PRECRQU_S_QB_PH
:
15253 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15257 #ifdef TARGET_MIPS64
15258 case OPC_ABSQ_S_QH_DSP
:
15260 case OPC_PRECEQ_L_PWL
:
15262 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15264 case OPC_PRECEQ_L_PWR
:
15266 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15268 case OPC_PRECEQ_PW_QHL
:
15270 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15272 case OPC_PRECEQ_PW_QHR
:
15274 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15276 case OPC_PRECEQ_PW_QHLA
:
15278 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15280 case OPC_PRECEQ_PW_QHRA
:
15282 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15284 case OPC_PRECEQU_QH_OBL
:
15286 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15288 case OPC_PRECEQU_QH_OBR
:
15290 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15292 case OPC_PRECEQU_QH_OBLA
:
15294 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15296 case OPC_PRECEQU_QH_OBRA
:
15298 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15300 case OPC_PRECEU_QH_OBL
:
15302 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15304 case OPC_PRECEU_QH_OBR
:
15306 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15308 case OPC_PRECEU_QH_OBLA
:
15310 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15312 case OPC_PRECEU_QH_OBRA
:
15314 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15316 case OPC_ABSQ_S_OB
:
15318 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15320 case OPC_ABSQ_S_PW
:
15322 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15324 case OPC_ABSQ_S_QH
:
15326 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15330 case OPC_ADDU_OB_DSP
:
15332 case OPC_RADDU_L_OB
:
15334 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15338 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15340 case OPC_SUBQ_S_PW
:
15342 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15346 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15348 case OPC_SUBQ_S_QH
:
15350 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15354 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 case OPC_SUBU_S_OB
:
15358 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15362 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15364 case OPC_SUBU_S_QH
:
15366 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15370 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15372 case OPC_SUBUH_R_OB
:
15374 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15378 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15380 case OPC_ADDQ_S_PW
:
15382 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15386 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15388 case OPC_ADDQ_S_QH
:
15390 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15394 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15396 case OPC_ADDU_S_OB
:
15398 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15402 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15404 case OPC_ADDU_S_QH
:
15406 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15410 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15412 case OPC_ADDUH_R_OB
:
15414 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15418 case OPC_CMPU_EQ_OB_DSP
:
15420 case OPC_PRECR_OB_QH
:
15422 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15424 case OPC_PRECR_SRA_QH_PW
:
15427 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15428 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15429 tcg_temp_free_i32(ret_t
);
15432 case OPC_PRECR_SRA_R_QH_PW
:
15435 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15436 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15437 tcg_temp_free_i32(sa_v
);
15440 case OPC_PRECRQ_OB_QH
:
15442 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15444 case OPC_PRECRQ_PW_L
:
15446 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15448 case OPC_PRECRQ_QH_PW
:
15450 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15452 case OPC_PRECRQ_RS_QH_PW
:
15454 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15456 case OPC_PRECRQU_S_OB_QH
:
15458 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15465 tcg_temp_free(v1_t
);
15466 tcg_temp_free(v2_t
);
15469 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15470 int ret
, int v1
, int v2
)
15478 /* Treat as NOP. */
15482 t0
= tcg_temp_new();
15483 v1_t
= tcg_temp_new();
15484 v2_t
= tcg_temp_new();
15486 tcg_gen_movi_tl(t0
, v1
);
15487 gen_load_gpr(v1_t
, v1
);
15488 gen_load_gpr(v2_t
, v2
);
15491 case OPC_SHLL_QB_DSP
:
15493 op2
= MASK_SHLL_QB(ctx
->opcode
);
15497 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15501 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15505 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15509 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15511 case OPC_SHLL_S_PH
:
15513 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15515 case OPC_SHLLV_S_PH
:
15517 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15521 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15523 case OPC_SHLLV_S_W
:
15525 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15529 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15533 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15537 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15541 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15545 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15547 case OPC_SHRA_R_QB
:
15549 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15553 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15555 case OPC_SHRAV_R_QB
:
15557 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15561 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15563 case OPC_SHRA_R_PH
:
15565 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15569 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15571 case OPC_SHRAV_R_PH
:
15573 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15577 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15579 case OPC_SHRAV_R_W
:
15581 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15583 default: /* Invalid */
15584 MIPS_INVAL("MASK SHLL.QB");
15585 generate_exception_end(ctx
, EXCP_RI
);
15590 #ifdef TARGET_MIPS64
15591 case OPC_SHLL_OB_DSP
:
15592 op2
= MASK_SHLL_OB(ctx
->opcode
);
15596 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15600 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15602 case OPC_SHLL_S_PW
:
15604 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15606 case OPC_SHLLV_S_PW
:
15608 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15612 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15616 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15620 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15624 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15626 case OPC_SHLL_S_QH
:
15628 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15630 case OPC_SHLLV_S_QH
:
15632 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15636 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15640 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15642 case OPC_SHRA_R_OB
:
15644 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15646 case OPC_SHRAV_R_OB
:
15648 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15652 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15656 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15658 case OPC_SHRA_R_PW
:
15660 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15662 case OPC_SHRAV_R_PW
:
15664 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15668 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15672 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15674 case OPC_SHRA_R_QH
:
15676 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15678 case OPC_SHRAV_R_QH
:
15680 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15684 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15688 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15692 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15696 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15698 default: /* Invalid */
15699 MIPS_INVAL("MASK SHLL.OB");
15700 generate_exception_end(ctx
, EXCP_RI
);
15708 tcg_temp_free(v1_t
);
15709 tcg_temp_free(v2_t
);
15712 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15713 int ret
, int v1
, int v2
, int check_ret
)
15719 if ((ret
== 0) && (check_ret
== 1)) {
15720 /* Treat as NOP. */
15724 t0
= tcg_temp_new_i32();
15725 v1_t
= tcg_temp_new();
15726 v2_t
= tcg_temp_new();
15728 tcg_gen_movi_i32(t0
, ret
);
15729 gen_load_gpr(v1_t
, v1
);
15730 gen_load_gpr(v2_t
, v2
);
15733 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15734 * the same mask and op1. */
15735 case OPC_MULT_G_2E
:
15739 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15742 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15745 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15747 case OPC_MULQ_RS_W
:
15748 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15752 case OPC_DPA_W_PH_DSP
:
15754 case OPC_DPAU_H_QBL
:
15756 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15758 case OPC_DPAU_H_QBR
:
15760 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15762 case OPC_DPSU_H_QBL
:
15764 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15766 case OPC_DPSU_H_QBR
:
15768 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15772 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15774 case OPC_DPAX_W_PH
:
15776 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15778 case OPC_DPAQ_S_W_PH
:
15780 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15782 case OPC_DPAQX_S_W_PH
:
15784 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15786 case OPC_DPAQX_SA_W_PH
:
15788 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15792 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15794 case OPC_DPSX_W_PH
:
15796 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15798 case OPC_DPSQ_S_W_PH
:
15800 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15802 case OPC_DPSQX_S_W_PH
:
15804 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15806 case OPC_DPSQX_SA_W_PH
:
15808 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15810 case OPC_MULSAQ_S_W_PH
:
15812 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15814 case OPC_DPAQ_SA_L_W
:
15816 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15818 case OPC_DPSQ_SA_L_W
:
15820 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15822 case OPC_MAQ_S_W_PHL
:
15824 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15826 case OPC_MAQ_S_W_PHR
:
15828 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15830 case OPC_MAQ_SA_W_PHL
:
15832 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15834 case OPC_MAQ_SA_W_PHR
:
15836 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15838 case OPC_MULSA_W_PH
:
15840 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15844 #ifdef TARGET_MIPS64
15845 case OPC_DPAQ_W_QH_DSP
:
15847 int ac
= ret
& 0x03;
15848 tcg_gen_movi_i32(t0
, ac
);
15853 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15857 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15861 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15865 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15869 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15871 case OPC_DPAQ_S_W_QH
:
15873 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15875 case OPC_DPAQ_SA_L_PW
:
15877 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15879 case OPC_DPAU_H_OBL
:
15881 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15883 case OPC_DPAU_H_OBR
:
15885 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15889 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15891 case OPC_DPSQ_S_W_QH
:
15893 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15895 case OPC_DPSQ_SA_L_PW
:
15897 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15899 case OPC_DPSU_H_OBL
:
15901 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15903 case OPC_DPSU_H_OBR
:
15905 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15907 case OPC_MAQ_S_L_PWL
:
15909 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15911 case OPC_MAQ_S_L_PWR
:
15913 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15915 case OPC_MAQ_S_W_QHLL
:
15917 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15919 case OPC_MAQ_SA_W_QHLL
:
15921 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15923 case OPC_MAQ_S_W_QHLR
:
15925 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15927 case OPC_MAQ_SA_W_QHLR
:
15929 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15931 case OPC_MAQ_S_W_QHRL
:
15933 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15935 case OPC_MAQ_SA_W_QHRL
:
15937 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15939 case OPC_MAQ_S_W_QHRR
:
15941 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15943 case OPC_MAQ_SA_W_QHRR
:
15945 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15947 case OPC_MULSAQ_S_L_PW
:
15949 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15951 case OPC_MULSAQ_S_W_QH
:
15953 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15959 case OPC_ADDU_QB_DSP
:
15961 case OPC_MULEU_S_PH_QBL
:
15963 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15965 case OPC_MULEU_S_PH_QBR
:
15967 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15969 case OPC_MULQ_RS_PH
:
15971 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15973 case OPC_MULEQ_S_W_PHL
:
15975 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15977 case OPC_MULEQ_S_W_PHR
:
15979 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15981 case OPC_MULQ_S_PH
:
15983 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15987 #ifdef TARGET_MIPS64
15988 case OPC_ADDU_OB_DSP
:
15990 case OPC_MULEQ_S_PW_QHL
:
15992 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15994 case OPC_MULEQ_S_PW_QHR
:
15996 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15998 case OPC_MULEU_S_QH_OBL
:
16000 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16002 case OPC_MULEU_S_QH_OBR
:
16004 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16006 case OPC_MULQ_RS_QH
:
16008 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16015 tcg_temp_free_i32(t0
);
16016 tcg_temp_free(v1_t
);
16017 tcg_temp_free(v2_t
);
16020 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16028 /* Treat as NOP. */
16032 t0
= tcg_temp_new();
16033 val_t
= tcg_temp_new();
16034 gen_load_gpr(val_t
, val
);
16037 case OPC_ABSQ_S_PH_DSP
:
16041 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16046 target_long result
;
16047 imm
= (ctx
->opcode
>> 16) & 0xFF;
16048 result
= (uint32_t)imm
<< 24 |
16049 (uint32_t)imm
<< 16 |
16050 (uint32_t)imm
<< 8 |
16052 result
= (int32_t)result
;
16053 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16058 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16059 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16060 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16061 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16062 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16063 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16068 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16069 imm
= (int16_t)(imm
<< 6) >> 6;
16070 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16071 (target_long
)((int32_t)imm
<< 16 | \
16077 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16078 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16079 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16080 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16084 #ifdef TARGET_MIPS64
16085 case OPC_ABSQ_S_QH_DSP
:
16092 imm
= (ctx
->opcode
>> 16) & 0xFF;
16093 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16094 temp
= (temp
<< 16) | temp
;
16095 temp
= (temp
<< 32) | temp
;
16096 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16104 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16105 imm
= (int16_t)(imm
<< 6) >> 6;
16106 temp
= ((target_long
)imm
<< 32) \
16107 | ((target_long
)imm
& 0xFFFFFFFF);
16108 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16116 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16117 imm
= (int16_t)(imm
<< 6) >> 6;
16119 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16120 ((uint64_t)(uint16_t)imm
<< 32) |
16121 ((uint64_t)(uint16_t)imm
<< 16) |
16122 (uint64_t)(uint16_t)imm
;
16123 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16128 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16129 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16130 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16131 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16132 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16133 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16134 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16138 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16139 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16140 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16144 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16145 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16146 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16147 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16148 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16155 tcg_temp_free(val_t
);
16158 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16159 uint32_t op1
, uint32_t op2
,
16160 int ret
, int v1
, int v2
, int check_ret
)
16166 if ((ret
== 0) && (check_ret
== 1)) {
16167 /* Treat as NOP. */
16171 t1
= tcg_temp_new();
16172 v1_t
= tcg_temp_new();
16173 v2_t
= tcg_temp_new();
16175 gen_load_gpr(v1_t
, v1
);
16176 gen_load_gpr(v2_t
, v2
);
16179 case OPC_CMPU_EQ_QB_DSP
:
16181 case OPC_CMPU_EQ_QB
:
16183 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16185 case OPC_CMPU_LT_QB
:
16187 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16189 case OPC_CMPU_LE_QB
:
16191 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16193 case OPC_CMPGU_EQ_QB
:
16195 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16197 case OPC_CMPGU_LT_QB
:
16199 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16201 case OPC_CMPGU_LE_QB
:
16203 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16205 case OPC_CMPGDU_EQ_QB
:
16207 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16208 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16209 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16210 tcg_gen_shli_tl(t1
, t1
, 24);
16211 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16213 case OPC_CMPGDU_LT_QB
:
16215 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16216 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16217 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16218 tcg_gen_shli_tl(t1
, t1
, 24);
16219 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16221 case OPC_CMPGDU_LE_QB
:
16223 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16224 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16225 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16226 tcg_gen_shli_tl(t1
, t1
, 24);
16227 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16229 case OPC_CMP_EQ_PH
:
16231 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16233 case OPC_CMP_LT_PH
:
16235 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16237 case OPC_CMP_LE_PH
:
16239 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16243 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16247 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16249 case OPC_PACKRL_PH
:
16251 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16255 #ifdef TARGET_MIPS64
16256 case OPC_CMPU_EQ_OB_DSP
:
16258 case OPC_CMP_EQ_PW
:
16260 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16262 case OPC_CMP_LT_PW
:
16264 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16266 case OPC_CMP_LE_PW
:
16268 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16270 case OPC_CMP_EQ_QH
:
16272 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16274 case OPC_CMP_LT_QH
:
16276 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16278 case OPC_CMP_LE_QH
:
16280 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16282 case OPC_CMPGDU_EQ_OB
:
16284 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16286 case OPC_CMPGDU_LT_OB
:
16288 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16290 case OPC_CMPGDU_LE_OB
:
16292 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16294 case OPC_CMPGU_EQ_OB
:
16296 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16298 case OPC_CMPGU_LT_OB
:
16300 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16302 case OPC_CMPGU_LE_OB
:
16304 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16306 case OPC_CMPU_EQ_OB
:
16308 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16310 case OPC_CMPU_LT_OB
:
16312 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16314 case OPC_CMPU_LE_OB
:
16316 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16318 case OPC_PACKRL_PW
:
16320 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16324 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16328 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16332 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16340 tcg_temp_free(v1_t
);
16341 tcg_temp_free(v2_t
);
16344 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16345 uint32_t op1
, int rt
, int rs
, int sa
)
16352 /* Treat as NOP. */
16356 t0
= tcg_temp_new();
16357 gen_load_gpr(t0
, rs
);
16360 case OPC_APPEND_DSP
:
16361 switch (MASK_APPEND(ctx
->opcode
)) {
16364 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16366 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16370 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16371 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16372 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16373 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16375 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16379 if (sa
!= 0 && sa
!= 2) {
16380 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16381 tcg_gen_ext32u_tl(t0
, t0
);
16382 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16383 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16385 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16387 default: /* Invalid */
16388 MIPS_INVAL("MASK APPEND");
16389 generate_exception_end(ctx
, EXCP_RI
);
16393 #ifdef TARGET_MIPS64
16394 case OPC_DAPPEND_DSP
:
16395 switch (MASK_DAPPEND(ctx
->opcode
)) {
16398 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16402 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16403 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16404 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16408 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16409 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16410 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16415 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16416 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16417 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16418 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16421 default: /* Invalid */
16422 MIPS_INVAL("MASK DAPPEND");
16423 generate_exception_end(ctx
, EXCP_RI
);
16432 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16433 int ret
, int v1
, int v2
, int check_ret
)
16442 if ((ret
== 0) && (check_ret
== 1)) {
16443 /* Treat as NOP. */
16447 t0
= tcg_temp_new();
16448 t1
= tcg_temp_new();
16449 v1_t
= tcg_temp_new();
16450 v2_t
= tcg_temp_new();
16452 gen_load_gpr(v1_t
, v1
);
16453 gen_load_gpr(v2_t
, v2
);
16456 case OPC_EXTR_W_DSP
:
16460 tcg_gen_movi_tl(t0
, v2
);
16461 tcg_gen_movi_tl(t1
, v1
);
16462 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16465 tcg_gen_movi_tl(t0
, v2
);
16466 tcg_gen_movi_tl(t1
, v1
);
16467 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16469 case OPC_EXTR_RS_W
:
16470 tcg_gen_movi_tl(t0
, v2
);
16471 tcg_gen_movi_tl(t1
, v1
);
16472 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16475 tcg_gen_movi_tl(t0
, v2
);
16476 tcg_gen_movi_tl(t1
, v1
);
16477 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16479 case OPC_EXTRV_S_H
:
16480 tcg_gen_movi_tl(t0
, v2
);
16481 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16484 tcg_gen_movi_tl(t0
, v2
);
16485 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16487 case OPC_EXTRV_R_W
:
16488 tcg_gen_movi_tl(t0
, v2
);
16489 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16491 case OPC_EXTRV_RS_W
:
16492 tcg_gen_movi_tl(t0
, v2
);
16493 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16496 tcg_gen_movi_tl(t0
, v2
);
16497 tcg_gen_movi_tl(t1
, v1
);
16498 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16501 tcg_gen_movi_tl(t0
, v2
);
16502 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16505 tcg_gen_movi_tl(t0
, v2
);
16506 tcg_gen_movi_tl(t1
, v1
);
16507 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16510 tcg_gen_movi_tl(t0
, v2
);
16511 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16514 imm
= (ctx
->opcode
>> 20) & 0x3F;
16515 tcg_gen_movi_tl(t0
, ret
);
16516 tcg_gen_movi_tl(t1
, imm
);
16517 gen_helper_shilo(t0
, t1
, cpu_env
);
16520 tcg_gen_movi_tl(t0
, ret
);
16521 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16524 tcg_gen_movi_tl(t0
, ret
);
16525 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16528 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16529 tcg_gen_movi_tl(t0
, imm
);
16530 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16533 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16534 tcg_gen_movi_tl(t0
, imm
);
16535 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16539 #ifdef TARGET_MIPS64
16540 case OPC_DEXTR_W_DSP
:
16544 tcg_gen_movi_tl(t0
, ret
);
16545 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16549 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16550 int ac
= (ctx
->opcode
>> 11) & 0x03;
16551 tcg_gen_movi_tl(t0
, shift
);
16552 tcg_gen_movi_tl(t1
, ac
);
16553 gen_helper_dshilo(t0
, t1
, cpu_env
);
16558 int ac
= (ctx
->opcode
>> 11) & 0x03;
16559 tcg_gen_movi_tl(t0
, ac
);
16560 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16564 tcg_gen_movi_tl(t0
, v2
);
16565 tcg_gen_movi_tl(t1
, v1
);
16567 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16570 tcg_gen_movi_tl(t0
, v2
);
16571 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16574 tcg_gen_movi_tl(t0
, v2
);
16575 tcg_gen_movi_tl(t1
, v1
);
16576 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16579 tcg_gen_movi_tl(t0
, v2
);
16580 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16583 tcg_gen_movi_tl(t0
, v2
);
16584 tcg_gen_movi_tl(t1
, v1
);
16585 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16587 case OPC_DEXTR_R_L
:
16588 tcg_gen_movi_tl(t0
, v2
);
16589 tcg_gen_movi_tl(t1
, v1
);
16590 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16592 case OPC_DEXTR_RS_L
:
16593 tcg_gen_movi_tl(t0
, v2
);
16594 tcg_gen_movi_tl(t1
, v1
);
16595 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16598 tcg_gen_movi_tl(t0
, v2
);
16599 tcg_gen_movi_tl(t1
, v1
);
16600 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16602 case OPC_DEXTR_R_W
:
16603 tcg_gen_movi_tl(t0
, v2
);
16604 tcg_gen_movi_tl(t1
, v1
);
16605 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16607 case OPC_DEXTR_RS_W
:
16608 tcg_gen_movi_tl(t0
, v2
);
16609 tcg_gen_movi_tl(t1
, v1
);
16610 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16612 case OPC_DEXTR_S_H
:
16613 tcg_gen_movi_tl(t0
, v2
);
16614 tcg_gen_movi_tl(t1
, v1
);
16615 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16617 case OPC_DEXTRV_S_H
:
16618 tcg_gen_movi_tl(t0
, v2
);
16619 tcg_gen_movi_tl(t1
, v1
);
16620 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16623 tcg_gen_movi_tl(t0
, v2
);
16624 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16626 case OPC_DEXTRV_R_L
:
16627 tcg_gen_movi_tl(t0
, v2
);
16628 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16630 case OPC_DEXTRV_RS_L
:
16631 tcg_gen_movi_tl(t0
, v2
);
16632 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16635 tcg_gen_movi_tl(t0
, v2
);
16636 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16638 case OPC_DEXTRV_R_W
:
16639 tcg_gen_movi_tl(t0
, v2
);
16640 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16642 case OPC_DEXTRV_RS_W
:
16643 tcg_gen_movi_tl(t0
, v2
);
16644 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16653 tcg_temp_free(v1_t
);
16654 tcg_temp_free(v2_t
);
16657 /* End MIPSDSP functions. */
16659 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16661 int rs
, rt
, rd
, sa
;
16664 rs
= (ctx
->opcode
>> 21) & 0x1f;
16665 rt
= (ctx
->opcode
>> 16) & 0x1f;
16666 rd
= (ctx
->opcode
>> 11) & 0x1f;
16667 sa
= (ctx
->opcode
>> 6) & 0x1f;
16669 op1
= MASK_SPECIAL(ctx
->opcode
);
16672 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16674 case OPC_MULT
... OPC_DIVU
:
16675 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16685 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16688 MIPS_INVAL("special_r6 muldiv");
16689 generate_exception_end(ctx
, EXCP_RI
);
16695 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16699 if (rt
== 0 && sa
== 1) {
16700 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16701 We need additionally to check other fields */
16702 gen_cl(ctx
, op1
, rd
, rs
);
16704 generate_exception_end(ctx
, EXCP_RI
);
16708 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16709 gen_helper_do_semihosting(cpu_env
);
16711 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16712 generate_exception_end(ctx
, EXCP_RI
);
16714 generate_exception_end(ctx
, EXCP_DBp
);
16718 #if defined(TARGET_MIPS64)
16720 check_mips_64(ctx
);
16721 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16725 if (rt
== 0 && sa
== 1) {
16726 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16727 We need additionally to check other fields */
16728 check_mips_64(ctx
);
16729 gen_cl(ctx
, op1
, rd
, rs
);
16731 generate_exception_end(ctx
, EXCP_RI
);
16734 case OPC_DMULT
... OPC_DDIVU
:
16735 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16745 check_mips_64(ctx
);
16746 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16749 MIPS_INVAL("special_r6 muldiv");
16750 generate_exception_end(ctx
, EXCP_RI
);
16755 default: /* Invalid */
16756 MIPS_INVAL("special_r6");
16757 generate_exception_end(ctx
, EXCP_RI
);
16762 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16764 int rs
, rt
, rd
, sa
;
16767 rs
= (ctx
->opcode
>> 21) & 0x1f;
16768 rt
= (ctx
->opcode
>> 16) & 0x1f;
16769 rd
= (ctx
->opcode
>> 11) & 0x1f;
16770 sa
= (ctx
->opcode
>> 6) & 0x1f;
16772 op1
= MASK_SPECIAL(ctx
->opcode
);
16774 case OPC_MOVN
: /* Conditional move */
16776 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16777 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16778 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16780 case OPC_MFHI
: /* Move from HI/LO */
16782 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16785 case OPC_MTLO
: /* Move to HI/LO */
16786 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16789 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16790 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16791 check_cp1_enabled(ctx
);
16792 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16793 (ctx
->opcode
>> 16) & 1);
16795 generate_exception_err(ctx
, EXCP_CpU
, 1);
16801 check_insn(ctx
, INSN_VR54XX
);
16802 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16803 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16805 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16810 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16812 #if defined(TARGET_MIPS64)
16813 case OPC_DMULT
... OPC_DDIVU
:
16814 check_insn(ctx
, ISA_MIPS3
);
16815 check_mips_64(ctx
);
16816 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16820 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16823 #ifdef MIPS_STRICT_STANDARD
16824 MIPS_INVAL("SPIM");
16825 generate_exception_end(ctx
, EXCP_RI
);
16827 /* Implemented as RI exception for now. */
16828 MIPS_INVAL("spim (unofficial)");
16829 generate_exception_end(ctx
, EXCP_RI
);
16832 default: /* Invalid */
16833 MIPS_INVAL("special_legacy");
16834 generate_exception_end(ctx
, EXCP_RI
);
16839 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16841 int rs
, rt
, rd
, sa
;
16844 rs
= (ctx
->opcode
>> 21) & 0x1f;
16845 rt
= (ctx
->opcode
>> 16) & 0x1f;
16846 rd
= (ctx
->opcode
>> 11) & 0x1f;
16847 sa
= (ctx
->opcode
>> 6) & 0x1f;
16849 op1
= MASK_SPECIAL(ctx
->opcode
);
16851 case OPC_SLL
: /* Shift with immediate */
16852 if (sa
== 5 && rd
== 0 &&
16853 rs
== 0 && rt
== 0) { /* PAUSE */
16854 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16855 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16856 generate_exception_end(ctx
, EXCP_RI
);
16862 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16865 switch ((ctx
->opcode
>> 21) & 0x1f) {
16867 /* rotr is decoded as srl on non-R2 CPUs */
16868 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16873 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16876 generate_exception_end(ctx
, EXCP_RI
);
16880 case OPC_ADD
... OPC_SUBU
:
16881 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16883 case OPC_SLLV
: /* Shifts */
16885 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16888 switch ((ctx
->opcode
>> 6) & 0x1f) {
16890 /* rotrv is decoded as srlv on non-R2 CPUs */
16891 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16896 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16899 generate_exception_end(ctx
, EXCP_RI
);
16903 case OPC_SLT
: /* Set on less than */
16905 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16907 case OPC_AND
: /* Logic*/
16911 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16914 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16916 case OPC_TGE
... OPC_TEQ
: /* Traps */
16918 check_insn(ctx
, ISA_MIPS2
);
16919 gen_trap(ctx
, op1
, rs
, rt
, -1);
16921 case OPC_LSA
: /* OPC_PMON */
16922 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16923 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16924 decode_opc_special_r6(env
, ctx
);
16926 /* Pmon entry point, also R4010 selsl */
16927 #ifdef MIPS_STRICT_STANDARD
16928 MIPS_INVAL("PMON / selsl");
16929 generate_exception_end(ctx
, EXCP_RI
);
16931 gen_helper_0e0i(pmon
, sa
);
16936 generate_exception_end(ctx
, EXCP_SYSCALL
);
16939 generate_exception_end(ctx
, EXCP_BREAK
);
16942 check_insn(ctx
, ISA_MIPS2
);
16943 /* Treat as NOP. */
16946 #if defined(TARGET_MIPS64)
16947 /* MIPS64 specific opcodes */
16952 check_insn(ctx
, ISA_MIPS3
);
16953 check_mips_64(ctx
);
16954 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16957 switch ((ctx
->opcode
>> 21) & 0x1f) {
16959 /* drotr is decoded as dsrl on non-R2 CPUs */
16960 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16965 check_insn(ctx
, ISA_MIPS3
);
16966 check_mips_64(ctx
);
16967 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16970 generate_exception_end(ctx
, EXCP_RI
);
16975 switch ((ctx
->opcode
>> 21) & 0x1f) {
16977 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16978 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16983 check_insn(ctx
, ISA_MIPS3
);
16984 check_mips_64(ctx
);
16985 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16988 generate_exception_end(ctx
, EXCP_RI
);
16992 case OPC_DADD
... OPC_DSUBU
:
16993 check_insn(ctx
, ISA_MIPS3
);
16994 check_mips_64(ctx
);
16995 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16999 check_insn(ctx
, ISA_MIPS3
);
17000 check_mips_64(ctx
);
17001 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17004 switch ((ctx
->opcode
>> 6) & 0x1f) {
17006 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17007 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17012 check_insn(ctx
, ISA_MIPS3
);
17013 check_mips_64(ctx
);
17014 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17017 generate_exception_end(ctx
, EXCP_RI
);
17022 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17023 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17024 decode_opc_special_r6(env
, ctx
);
17029 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17030 decode_opc_special_r6(env
, ctx
);
17032 decode_opc_special_legacy(env
, ctx
);
17037 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17042 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17044 rs
= (ctx
->opcode
>> 21) & 0x1f;
17045 rt
= (ctx
->opcode
>> 16) & 0x1f;
17046 rd
= (ctx
->opcode
>> 11) & 0x1f;
17048 op1
= MASK_SPECIAL2(ctx
->opcode
);
17050 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17051 case OPC_MSUB
... OPC_MSUBU
:
17052 check_insn(ctx
, ISA_MIPS32
);
17053 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17056 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17059 case OPC_DIVU_G_2F
:
17060 case OPC_MULT_G_2F
:
17061 case OPC_MULTU_G_2F
:
17063 case OPC_MODU_G_2F
:
17064 check_insn(ctx
, INSN_LOONGSON2F
);
17065 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17069 check_insn(ctx
, ISA_MIPS32
);
17070 gen_cl(ctx
, op1
, rd
, rs
);
17073 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17074 gen_helper_do_semihosting(cpu_env
);
17076 /* XXX: not clear which exception should be raised
17077 * when in debug mode...
17079 check_insn(ctx
, ISA_MIPS32
);
17080 generate_exception_end(ctx
, EXCP_DBp
);
17083 #if defined(TARGET_MIPS64)
17086 check_insn(ctx
, ISA_MIPS64
);
17087 check_mips_64(ctx
);
17088 gen_cl(ctx
, op1
, rd
, rs
);
17090 case OPC_DMULT_G_2F
:
17091 case OPC_DMULTU_G_2F
:
17092 case OPC_DDIV_G_2F
:
17093 case OPC_DDIVU_G_2F
:
17094 case OPC_DMOD_G_2F
:
17095 case OPC_DMODU_G_2F
:
17096 check_insn(ctx
, INSN_LOONGSON2F
);
17097 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17100 default: /* Invalid */
17101 MIPS_INVAL("special2_legacy");
17102 generate_exception_end(ctx
, EXCP_RI
);
17107 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17109 int rs
, rt
, rd
, sa
;
17113 rs
= (ctx
->opcode
>> 21) & 0x1f;
17114 rt
= (ctx
->opcode
>> 16) & 0x1f;
17115 rd
= (ctx
->opcode
>> 11) & 0x1f;
17116 sa
= (ctx
->opcode
>> 6) & 0x1f;
17117 imm
= (int16_t)ctx
->opcode
>> 7;
17119 op1
= MASK_SPECIAL3(ctx
->opcode
);
17123 /* hint codes 24-31 are reserved and signal RI */
17124 generate_exception_end(ctx
, EXCP_RI
);
17126 /* Treat as NOP. */
17129 /* Treat as NOP. */
17132 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17135 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17140 /* Treat as NOP. */
17143 op2
= MASK_BSHFL(ctx
->opcode
);
17145 case OPC_ALIGN
... OPC_ALIGN_END
:
17146 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17149 gen_bitswap(ctx
, op2
, rd
, rt
);
17154 #if defined(TARGET_MIPS64)
17156 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17159 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17162 check_mips_64(ctx
);
17165 /* Treat as NOP. */
17168 op2
= MASK_DBSHFL(ctx
->opcode
);
17170 case OPC_DALIGN
... OPC_DALIGN_END
:
17171 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17174 gen_bitswap(ctx
, op2
, rd
, rt
);
17181 default: /* Invalid */
17182 MIPS_INVAL("special3_r6");
17183 generate_exception_end(ctx
, EXCP_RI
);
17188 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17193 rs
= (ctx
->opcode
>> 21) & 0x1f;
17194 rt
= (ctx
->opcode
>> 16) & 0x1f;
17195 rd
= (ctx
->opcode
>> 11) & 0x1f;
17197 op1
= MASK_SPECIAL3(ctx
->opcode
);
17199 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17200 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17201 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17202 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17203 * the same mask and op1. */
17204 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17205 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17208 case OPC_ADDUH_R_QB
:
17210 case OPC_ADDQH_R_PH
:
17212 case OPC_ADDQH_R_W
:
17214 case OPC_SUBUH_R_QB
:
17216 case OPC_SUBQH_R_PH
:
17218 case OPC_SUBQH_R_W
:
17219 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17224 case OPC_MULQ_RS_W
:
17225 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17228 MIPS_INVAL("MASK ADDUH.QB");
17229 generate_exception_end(ctx
, EXCP_RI
);
17232 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17233 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17235 generate_exception_end(ctx
, EXCP_RI
);
17239 op2
= MASK_LX(ctx
->opcode
);
17241 #if defined(TARGET_MIPS64)
17247 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17249 default: /* Invalid */
17250 MIPS_INVAL("MASK LX");
17251 generate_exception_end(ctx
, EXCP_RI
);
17255 case OPC_ABSQ_S_PH_DSP
:
17256 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17258 case OPC_ABSQ_S_QB
:
17259 case OPC_ABSQ_S_PH
:
17261 case OPC_PRECEQ_W_PHL
:
17262 case OPC_PRECEQ_W_PHR
:
17263 case OPC_PRECEQU_PH_QBL
:
17264 case OPC_PRECEQU_PH_QBR
:
17265 case OPC_PRECEQU_PH_QBLA
:
17266 case OPC_PRECEQU_PH_QBRA
:
17267 case OPC_PRECEU_PH_QBL
:
17268 case OPC_PRECEU_PH_QBR
:
17269 case OPC_PRECEU_PH_QBLA
:
17270 case OPC_PRECEU_PH_QBRA
:
17271 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17278 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17281 MIPS_INVAL("MASK ABSQ_S.PH");
17282 generate_exception_end(ctx
, EXCP_RI
);
17286 case OPC_ADDU_QB_DSP
:
17287 op2
= MASK_ADDU_QB(ctx
->opcode
);
17290 case OPC_ADDQ_S_PH
:
17293 case OPC_ADDU_S_QB
:
17295 case OPC_ADDU_S_PH
:
17297 case OPC_SUBQ_S_PH
:
17300 case OPC_SUBU_S_QB
:
17302 case OPC_SUBU_S_PH
:
17306 case OPC_RADDU_W_QB
:
17307 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17309 case OPC_MULEU_S_PH_QBL
:
17310 case OPC_MULEU_S_PH_QBR
:
17311 case OPC_MULQ_RS_PH
:
17312 case OPC_MULEQ_S_W_PHL
:
17313 case OPC_MULEQ_S_W_PHR
:
17314 case OPC_MULQ_S_PH
:
17315 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17317 default: /* Invalid */
17318 MIPS_INVAL("MASK ADDU.QB");
17319 generate_exception_end(ctx
, EXCP_RI
);
17324 case OPC_CMPU_EQ_QB_DSP
:
17325 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17327 case OPC_PRECR_SRA_PH_W
:
17328 case OPC_PRECR_SRA_R_PH_W
:
17329 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17331 case OPC_PRECR_QB_PH
:
17332 case OPC_PRECRQ_QB_PH
:
17333 case OPC_PRECRQ_PH_W
:
17334 case OPC_PRECRQ_RS_PH_W
:
17335 case OPC_PRECRQU_S_QB_PH
:
17336 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17338 case OPC_CMPU_EQ_QB
:
17339 case OPC_CMPU_LT_QB
:
17340 case OPC_CMPU_LE_QB
:
17341 case OPC_CMP_EQ_PH
:
17342 case OPC_CMP_LT_PH
:
17343 case OPC_CMP_LE_PH
:
17344 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17346 case OPC_CMPGU_EQ_QB
:
17347 case OPC_CMPGU_LT_QB
:
17348 case OPC_CMPGU_LE_QB
:
17349 case OPC_CMPGDU_EQ_QB
:
17350 case OPC_CMPGDU_LT_QB
:
17351 case OPC_CMPGDU_LE_QB
:
17354 case OPC_PACKRL_PH
:
17355 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17357 default: /* Invalid */
17358 MIPS_INVAL("MASK CMPU.EQ.QB");
17359 generate_exception_end(ctx
, EXCP_RI
);
17363 case OPC_SHLL_QB_DSP
:
17364 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17366 case OPC_DPA_W_PH_DSP
:
17367 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17369 case OPC_DPAU_H_QBL
:
17370 case OPC_DPAU_H_QBR
:
17371 case OPC_DPSU_H_QBL
:
17372 case OPC_DPSU_H_QBR
:
17374 case OPC_DPAX_W_PH
:
17375 case OPC_DPAQ_S_W_PH
:
17376 case OPC_DPAQX_S_W_PH
:
17377 case OPC_DPAQX_SA_W_PH
:
17379 case OPC_DPSX_W_PH
:
17380 case OPC_DPSQ_S_W_PH
:
17381 case OPC_DPSQX_S_W_PH
:
17382 case OPC_DPSQX_SA_W_PH
:
17383 case OPC_MULSAQ_S_W_PH
:
17384 case OPC_DPAQ_SA_L_W
:
17385 case OPC_DPSQ_SA_L_W
:
17386 case OPC_MAQ_S_W_PHL
:
17387 case OPC_MAQ_S_W_PHR
:
17388 case OPC_MAQ_SA_W_PHL
:
17389 case OPC_MAQ_SA_W_PHR
:
17390 case OPC_MULSA_W_PH
:
17391 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17393 default: /* Invalid */
17394 MIPS_INVAL("MASK DPAW.PH");
17395 generate_exception_end(ctx
, EXCP_RI
);
17400 op2
= MASK_INSV(ctx
->opcode
);
17411 t0
= tcg_temp_new();
17412 t1
= tcg_temp_new();
17414 gen_load_gpr(t0
, rt
);
17415 gen_load_gpr(t1
, rs
);
17417 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17423 default: /* Invalid */
17424 MIPS_INVAL("MASK INSV");
17425 generate_exception_end(ctx
, EXCP_RI
);
17429 case OPC_APPEND_DSP
:
17430 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17432 case OPC_EXTR_W_DSP
:
17433 op2
= MASK_EXTR_W(ctx
->opcode
);
17437 case OPC_EXTR_RS_W
:
17439 case OPC_EXTRV_S_H
:
17441 case OPC_EXTRV_R_W
:
17442 case OPC_EXTRV_RS_W
:
17447 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17450 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17456 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17458 default: /* Invalid */
17459 MIPS_INVAL("MASK EXTR.W");
17460 generate_exception_end(ctx
, EXCP_RI
);
17464 #if defined(TARGET_MIPS64)
17465 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17466 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17467 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17468 check_insn(ctx
, INSN_LOONGSON2E
);
17469 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17471 case OPC_ABSQ_S_QH_DSP
:
17472 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17474 case OPC_PRECEQ_L_PWL
:
17475 case OPC_PRECEQ_L_PWR
:
17476 case OPC_PRECEQ_PW_QHL
:
17477 case OPC_PRECEQ_PW_QHR
:
17478 case OPC_PRECEQ_PW_QHLA
:
17479 case OPC_PRECEQ_PW_QHRA
:
17480 case OPC_PRECEQU_QH_OBL
:
17481 case OPC_PRECEQU_QH_OBR
:
17482 case OPC_PRECEQU_QH_OBLA
:
17483 case OPC_PRECEQU_QH_OBRA
:
17484 case OPC_PRECEU_QH_OBL
:
17485 case OPC_PRECEU_QH_OBR
:
17486 case OPC_PRECEU_QH_OBLA
:
17487 case OPC_PRECEU_QH_OBRA
:
17488 case OPC_ABSQ_S_OB
:
17489 case OPC_ABSQ_S_PW
:
17490 case OPC_ABSQ_S_QH
:
17491 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17499 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17501 default: /* Invalid */
17502 MIPS_INVAL("MASK ABSQ_S.QH");
17503 generate_exception_end(ctx
, EXCP_RI
);
17507 case OPC_ADDU_OB_DSP
:
17508 op2
= MASK_ADDU_OB(ctx
->opcode
);
17510 case OPC_RADDU_L_OB
:
17512 case OPC_SUBQ_S_PW
:
17514 case OPC_SUBQ_S_QH
:
17516 case OPC_SUBU_S_OB
:
17518 case OPC_SUBU_S_QH
:
17520 case OPC_SUBUH_R_OB
:
17522 case OPC_ADDQ_S_PW
:
17524 case OPC_ADDQ_S_QH
:
17526 case OPC_ADDU_S_OB
:
17528 case OPC_ADDU_S_QH
:
17530 case OPC_ADDUH_R_OB
:
17531 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17533 case OPC_MULEQ_S_PW_QHL
:
17534 case OPC_MULEQ_S_PW_QHR
:
17535 case OPC_MULEU_S_QH_OBL
:
17536 case OPC_MULEU_S_QH_OBR
:
17537 case OPC_MULQ_RS_QH
:
17538 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17540 default: /* Invalid */
17541 MIPS_INVAL("MASK ADDU.OB");
17542 generate_exception_end(ctx
, EXCP_RI
);
17546 case OPC_CMPU_EQ_OB_DSP
:
17547 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17549 case OPC_PRECR_SRA_QH_PW
:
17550 case OPC_PRECR_SRA_R_QH_PW
:
17551 /* Return value is rt. */
17552 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17554 case OPC_PRECR_OB_QH
:
17555 case OPC_PRECRQ_OB_QH
:
17556 case OPC_PRECRQ_PW_L
:
17557 case OPC_PRECRQ_QH_PW
:
17558 case OPC_PRECRQ_RS_QH_PW
:
17559 case OPC_PRECRQU_S_OB_QH
:
17560 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17562 case OPC_CMPU_EQ_OB
:
17563 case OPC_CMPU_LT_OB
:
17564 case OPC_CMPU_LE_OB
:
17565 case OPC_CMP_EQ_QH
:
17566 case OPC_CMP_LT_QH
:
17567 case OPC_CMP_LE_QH
:
17568 case OPC_CMP_EQ_PW
:
17569 case OPC_CMP_LT_PW
:
17570 case OPC_CMP_LE_PW
:
17571 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17573 case OPC_CMPGDU_EQ_OB
:
17574 case OPC_CMPGDU_LT_OB
:
17575 case OPC_CMPGDU_LE_OB
:
17576 case OPC_CMPGU_EQ_OB
:
17577 case OPC_CMPGU_LT_OB
:
17578 case OPC_CMPGU_LE_OB
:
17579 case OPC_PACKRL_PW
:
17583 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17585 default: /* Invalid */
17586 MIPS_INVAL("MASK CMPU_EQ.OB");
17587 generate_exception_end(ctx
, EXCP_RI
);
17591 case OPC_DAPPEND_DSP
:
17592 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17594 case OPC_DEXTR_W_DSP
:
17595 op2
= MASK_DEXTR_W(ctx
->opcode
);
17602 case OPC_DEXTR_R_L
:
17603 case OPC_DEXTR_RS_L
:
17605 case OPC_DEXTR_R_W
:
17606 case OPC_DEXTR_RS_W
:
17607 case OPC_DEXTR_S_H
:
17609 case OPC_DEXTRV_R_L
:
17610 case OPC_DEXTRV_RS_L
:
17611 case OPC_DEXTRV_S_H
:
17613 case OPC_DEXTRV_R_W
:
17614 case OPC_DEXTRV_RS_W
:
17615 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17620 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17622 default: /* Invalid */
17623 MIPS_INVAL("MASK EXTR.W");
17624 generate_exception_end(ctx
, EXCP_RI
);
17628 case OPC_DPAQ_W_QH_DSP
:
17629 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17631 case OPC_DPAU_H_OBL
:
17632 case OPC_DPAU_H_OBR
:
17633 case OPC_DPSU_H_OBL
:
17634 case OPC_DPSU_H_OBR
:
17636 case OPC_DPAQ_S_W_QH
:
17638 case OPC_DPSQ_S_W_QH
:
17639 case OPC_MULSAQ_S_W_QH
:
17640 case OPC_DPAQ_SA_L_PW
:
17641 case OPC_DPSQ_SA_L_PW
:
17642 case OPC_MULSAQ_S_L_PW
:
17643 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17645 case OPC_MAQ_S_W_QHLL
:
17646 case OPC_MAQ_S_W_QHLR
:
17647 case OPC_MAQ_S_W_QHRL
:
17648 case OPC_MAQ_S_W_QHRR
:
17649 case OPC_MAQ_SA_W_QHLL
:
17650 case OPC_MAQ_SA_W_QHLR
:
17651 case OPC_MAQ_SA_W_QHRL
:
17652 case OPC_MAQ_SA_W_QHRR
:
17653 case OPC_MAQ_S_L_PWL
:
17654 case OPC_MAQ_S_L_PWR
:
17659 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17661 default: /* Invalid */
17662 MIPS_INVAL("MASK DPAQ.W.QH");
17663 generate_exception_end(ctx
, EXCP_RI
);
17667 case OPC_DINSV_DSP
:
17668 op2
= MASK_INSV(ctx
->opcode
);
17679 t0
= tcg_temp_new();
17680 t1
= tcg_temp_new();
17682 gen_load_gpr(t0
, rt
);
17683 gen_load_gpr(t1
, rs
);
17685 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17691 default: /* Invalid */
17692 MIPS_INVAL("MASK DINSV");
17693 generate_exception_end(ctx
, EXCP_RI
);
17697 case OPC_SHLL_OB_DSP
:
17698 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17701 default: /* Invalid */
17702 MIPS_INVAL("special3_legacy");
17703 generate_exception_end(ctx
, EXCP_RI
);
17708 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17710 int rs
, rt
, rd
, sa
;
17713 rs
= (ctx
->opcode
>> 21) & 0x1f;
17714 rt
= (ctx
->opcode
>> 16) & 0x1f;
17715 rd
= (ctx
->opcode
>> 11) & 0x1f;
17716 sa
= (ctx
->opcode
>> 6) & 0x1f;
17718 op1
= MASK_SPECIAL3(ctx
->opcode
);
17722 check_insn(ctx
, ISA_MIPS32R2
);
17723 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17726 op2
= MASK_BSHFL(ctx
->opcode
);
17728 case OPC_ALIGN
... OPC_ALIGN_END
:
17730 check_insn(ctx
, ISA_MIPS32R6
);
17731 decode_opc_special3_r6(env
, ctx
);
17734 check_insn(ctx
, ISA_MIPS32R2
);
17735 gen_bshfl(ctx
, op2
, rt
, rd
);
17739 #if defined(TARGET_MIPS64)
17740 case OPC_DEXTM
... OPC_DEXT
:
17741 case OPC_DINSM
... OPC_DINS
:
17742 check_insn(ctx
, ISA_MIPS64R2
);
17743 check_mips_64(ctx
);
17744 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17747 op2
= MASK_DBSHFL(ctx
->opcode
);
17749 case OPC_DALIGN
... OPC_DALIGN_END
:
17751 check_insn(ctx
, ISA_MIPS32R6
);
17752 decode_opc_special3_r6(env
, ctx
);
17755 check_insn(ctx
, ISA_MIPS64R2
);
17756 check_mips_64(ctx
);
17757 op2
= MASK_DBSHFL(ctx
->opcode
);
17758 gen_bshfl(ctx
, op2
, rt
, rd
);
17764 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17767 check_insn(ctx
, ASE_MT
);
17769 TCGv t0
= tcg_temp_new();
17770 TCGv t1
= tcg_temp_new();
17772 gen_load_gpr(t0
, rt
);
17773 gen_load_gpr(t1
, rs
);
17774 gen_helper_fork(t0
, t1
);
17780 check_insn(ctx
, ASE_MT
);
17782 TCGv t0
= tcg_temp_new();
17784 gen_load_gpr(t0
, rs
);
17785 gen_helper_yield(t0
, cpu_env
, t0
);
17786 gen_store_gpr(t0
, rd
);
17791 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17792 decode_opc_special3_r6(env
, ctx
);
17794 decode_opc_special3_legacy(env
, ctx
);
17799 /* MIPS SIMD Architecture (MSA) */
17800 static inline int check_msa_access(DisasContext
*ctx
)
17802 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17803 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17804 generate_exception_end(ctx
, EXCP_RI
);
17808 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17809 if (ctx
->insn_flags
& ASE_MSA
) {
17810 generate_exception_end(ctx
, EXCP_MSADIS
);
17813 generate_exception_end(ctx
, EXCP_RI
);
17820 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17822 /* generates tcg ops to check if any element is 0 */
17823 /* Note this function only works with MSA_WRLEN = 128 */
17824 uint64_t eval_zero_or_big
= 0;
17825 uint64_t eval_big
= 0;
17826 TCGv_i64 t0
= tcg_temp_new_i64();
17827 TCGv_i64 t1
= tcg_temp_new_i64();
17830 eval_zero_or_big
= 0x0101010101010101ULL
;
17831 eval_big
= 0x8080808080808080ULL
;
17834 eval_zero_or_big
= 0x0001000100010001ULL
;
17835 eval_big
= 0x8000800080008000ULL
;
17838 eval_zero_or_big
= 0x0000000100000001ULL
;
17839 eval_big
= 0x8000000080000000ULL
;
17842 eval_zero_or_big
= 0x0000000000000001ULL
;
17843 eval_big
= 0x8000000000000000ULL
;
17846 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17847 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17848 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17849 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17850 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17851 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17852 tcg_gen_or_i64(t0
, t0
, t1
);
17853 /* if all bits are zero then all elements are not zero */
17854 /* if some bit is non-zero then some element is zero */
17855 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17856 tcg_gen_trunc_i64_tl(tresult
, t0
);
17857 tcg_temp_free_i64(t0
);
17858 tcg_temp_free_i64(t1
);
17861 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17863 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17864 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17865 int64_t s16
= (int16_t)ctx
->opcode
;
17867 check_msa_access(ctx
);
17869 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17870 generate_exception_end(ctx
, EXCP_RI
);
17877 TCGv_i64 t0
= tcg_temp_new_i64();
17878 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17879 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17880 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17881 tcg_gen_trunc_i64_tl(bcond
, t0
);
17882 tcg_temp_free_i64(t0
);
17889 gen_check_zero_element(bcond
, df
, wt
);
17895 gen_check_zero_element(bcond
, df
, wt
);
17896 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17900 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17902 ctx
->hflags
|= MIPS_HFLAG_BC
;
17903 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17906 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17908 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17909 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17910 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17911 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17913 TCGv_i32 twd
= tcg_const_i32(wd
);
17914 TCGv_i32 tws
= tcg_const_i32(ws
);
17915 TCGv_i32 ti8
= tcg_const_i32(i8
);
17917 switch (MASK_MSA_I8(ctx
->opcode
)) {
17919 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17922 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17925 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17928 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17931 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17934 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17937 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17943 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17944 if (df
== DF_DOUBLE
) {
17945 generate_exception_end(ctx
, EXCP_RI
);
17947 TCGv_i32 tdf
= tcg_const_i32(df
);
17948 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17949 tcg_temp_free_i32(tdf
);
17954 MIPS_INVAL("MSA instruction");
17955 generate_exception_end(ctx
, EXCP_RI
);
17959 tcg_temp_free_i32(twd
);
17960 tcg_temp_free_i32(tws
);
17961 tcg_temp_free_i32(ti8
);
17964 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17966 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17967 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17968 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17969 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17970 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17971 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17973 TCGv_i32 tdf
= tcg_const_i32(df
);
17974 TCGv_i32 twd
= tcg_const_i32(wd
);
17975 TCGv_i32 tws
= tcg_const_i32(ws
);
17976 TCGv_i32 timm
= tcg_temp_new_i32();
17977 tcg_gen_movi_i32(timm
, u5
);
17979 switch (MASK_MSA_I5(ctx
->opcode
)) {
17981 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17984 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17986 case OPC_MAXI_S_df
:
17987 tcg_gen_movi_i32(timm
, s5
);
17988 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17990 case OPC_MAXI_U_df
:
17991 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17993 case OPC_MINI_S_df
:
17994 tcg_gen_movi_i32(timm
, s5
);
17995 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17997 case OPC_MINI_U_df
:
17998 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18001 tcg_gen_movi_i32(timm
, s5
);
18002 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18004 case OPC_CLTI_S_df
:
18005 tcg_gen_movi_i32(timm
, s5
);
18006 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18008 case OPC_CLTI_U_df
:
18009 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18011 case OPC_CLEI_S_df
:
18012 tcg_gen_movi_i32(timm
, s5
);
18013 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18015 case OPC_CLEI_U_df
:
18016 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18020 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18021 tcg_gen_movi_i32(timm
, s10
);
18022 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18026 MIPS_INVAL("MSA instruction");
18027 generate_exception_end(ctx
, EXCP_RI
);
18031 tcg_temp_free_i32(tdf
);
18032 tcg_temp_free_i32(twd
);
18033 tcg_temp_free_i32(tws
);
18034 tcg_temp_free_i32(timm
);
18037 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18039 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18040 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18041 uint32_t df
= 0, m
= 0;
18042 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18043 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18050 if ((dfm
& 0x40) == 0x00) {
18053 } else if ((dfm
& 0x60) == 0x40) {
18056 } else if ((dfm
& 0x70) == 0x60) {
18059 } else if ((dfm
& 0x78) == 0x70) {
18063 generate_exception_end(ctx
, EXCP_RI
);
18067 tdf
= tcg_const_i32(df
);
18068 tm
= tcg_const_i32(m
);
18069 twd
= tcg_const_i32(wd
);
18070 tws
= tcg_const_i32(ws
);
18072 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18074 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18077 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18080 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18083 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18086 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18089 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18091 case OPC_BINSLI_df
:
18092 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18094 case OPC_BINSRI_df
:
18095 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18098 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18101 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18104 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18107 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18110 MIPS_INVAL("MSA instruction");
18111 generate_exception_end(ctx
, EXCP_RI
);
18115 tcg_temp_free_i32(tdf
);
18116 tcg_temp_free_i32(tm
);
18117 tcg_temp_free_i32(twd
);
18118 tcg_temp_free_i32(tws
);
18121 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18123 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18124 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18125 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18126 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18127 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18129 TCGv_i32 tdf
= tcg_const_i32(df
);
18130 TCGv_i32 twd
= tcg_const_i32(wd
);
18131 TCGv_i32 tws
= tcg_const_i32(ws
);
18132 TCGv_i32 twt
= tcg_const_i32(wt
);
18134 switch (MASK_MSA_3R(ctx
->opcode
)) {
18136 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18139 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18142 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18145 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18147 case OPC_SUBS_S_df
:
18148 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18151 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18154 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18157 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18160 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18163 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18165 case OPC_ADDS_A_df
:
18166 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18168 case OPC_SUBS_U_df
:
18169 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18172 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18175 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18178 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18181 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18184 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18187 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18189 case OPC_ADDS_S_df
:
18190 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18192 case OPC_SUBSUS_U_df
:
18193 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18196 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18199 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18202 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18205 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18208 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18211 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18213 case OPC_ADDS_U_df
:
18214 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18216 case OPC_SUBSUU_S_df
:
18217 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18220 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18223 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18229 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18232 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18234 case OPC_ASUB_S_df
:
18235 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18238 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18241 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18244 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18247 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18250 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18253 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18255 case OPC_ASUB_U_df
:
18256 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18259 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18262 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18265 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18268 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18270 case OPC_AVER_S_df
:
18271 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18274 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18277 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18280 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18283 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18285 case OPC_AVER_U_df
:
18286 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18289 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18292 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18295 case OPC_DOTP_S_df
:
18296 case OPC_DOTP_U_df
:
18297 case OPC_DPADD_S_df
:
18298 case OPC_DPADD_U_df
:
18299 case OPC_DPSUB_S_df
:
18300 case OPC_HADD_S_df
:
18301 case OPC_DPSUB_U_df
:
18302 case OPC_HADD_U_df
:
18303 case OPC_HSUB_S_df
:
18304 case OPC_HSUB_U_df
:
18305 if (df
== DF_BYTE
) {
18306 generate_exception_end(ctx
, EXCP_RI
);
18309 switch (MASK_MSA_3R(ctx
->opcode
)) {
18310 case OPC_DOTP_S_df
:
18311 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18313 case OPC_DOTP_U_df
:
18314 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18316 case OPC_DPADD_S_df
:
18317 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18319 case OPC_DPADD_U_df
:
18320 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18322 case OPC_DPSUB_S_df
:
18323 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18325 case OPC_HADD_S_df
:
18326 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18328 case OPC_DPSUB_U_df
:
18329 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18331 case OPC_HADD_U_df
:
18332 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18334 case OPC_HSUB_S_df
:
18335 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18337 case OPC_HSUB_U_df
:
18338 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18343 MIPS_INVAL("MSA instruction");
18344 generate_exception_end(ctx
, EXCP_RI
);
18347 tcg_temp_free_i32(twd
);
18348 tcg_temp_free_i32(tws
);
18349 tcg_temp_free_i32(twt
);
18350 tcg_temp_free_i32(tdf
);
18353 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18355 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18356 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18357 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18358 TCGv telm
= tcg_temp_new();
18359 TCGv_i32 tsr
= tcg_const_i32(source
);
18360 TCGv_i32 tdt
= tcg_const_i32(dest
);
18362 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18364 gen_load_gpr(telm
, source
);
18365 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18368 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18369 gen_store_gpr(telm
, dest
);
18372 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18375 MIPS_INVAL("MSA instruction");
18376 generate_exception_end(ctx
, EXCP_RI
);
18380 tcg_temp_free(telm
);
18381 tcg_temp_free_i32(tdt
);
18382 tcg_temp_free_i32(tsr
);
18385 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18388 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18389 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18390 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18392 TCGv_i32 tws
= tcg_const_i32(ws
);
18393 TCGv_i32 twd
= tcg_const_i32(wd
);
18394 TCGv_i32 tn
= tcg_const_i32(n
);
18395 TCGv_i32 tdf
= tcg_const_i32(df
);
18397 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18399 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18401 case OPC_SPLATI_df
:
18402 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18405 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18407 case OPC_COPY_S_df
:
18408 case OPC_COPY_U_df
:
18409 case OPC_INSERT_df
:
18410 #if !defined(TARGET_MIPS64)
18411 /* Double format valid only for MIPS64 */
18412 if (df
== DF_DOUBLE
) {
18413 generate_exception_end(ctx
, EXCP_RI
);
18417 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18418 case OPC_COPY_S_df
:
18419 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18421 case OPC_COPY_U_df
:
18422 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18424 case OPC_INSERT_df
:
18425 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18430 MIPS_INVAL("MSA instruction");
18431 generate_exception_end(ctx
, EXCP_RI
);
18433 tcg_temp_free_i32(twd
);
18434 tcg_temp_free_i32(tws
);
18435 tcg_temp_free_i32(tn
);
18436 tcg_temp_free_i32(tdf
);
18439 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18441 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18442 uint32_t df
= 0, n
= 0;
18444 if ((dfn
& 0x30) == 0x00) {
18447 } else if ((dfn
& 0x38) == 0x20) {
18450 } else if ((dfn
& 0x3c) == 0x30) {
18453 } else if ((dfn
& 0x3e) == 0x38) {
18456 } else if (dfn
== 0x3E) {
18457 /* CTCMSA, CFCMSA, MOVE.V */
18458 gen_msa_elm_3e(env
, ctx
);
18461 generate_exception_end(ctx
, EXCP_RI
);
18465 gen_msa_elm_df(env
, ctx
, df
, n
);
18468 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18470 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18471 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18472 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18473 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18474 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18476 TCGv_i32 twd
= tcg_const_i32(wd
);
18477 TCGv_i32 tws
= tcg_const_i32(ws
);
18478 TCGv_i32 twt
= tcg_const_i32(wt
);
18479 TCGv_i32 tdf
= tcg_temp_new_i32();
18481 /* adjust df value for floating-point instruction */
18482 tcg_gen_movi_i32(tdf
, df
+ 2);
18484 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18486 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18489 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18492 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18495 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18501 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18504 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18507 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18510 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18513 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18516 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18519 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18522 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18525 tcg_gen_movi_i32(tdf
, df
+ 1);
18526 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18529 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18534 case OPC_MADD_Q_df
:
18535 tcg_gen_movi_i32(tdf
, df
+ 1);
18536 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18539 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18541 case OPC_MSUB_Q_df
:
18542 tcg_gen_movi_i32(tdf
, df
+ 1);
18543 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18546 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18552 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18558 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18561 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18564 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18567 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18576 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18579 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18582 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18584 case OPC_MULR_Q_df
:
18585 tcg_gen_movi_i32(tdf
, df
+ 1);
18586 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18589 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18591 case OPC_FMIN_A_df
:
18592 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18594 case OPC_MADDR_Q_df
:
18595 tcg_gen_movi_i32(tdf
, df
+ 1);
18596 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18599 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18602 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18604 case OPC_MSUBR_Q_df
:
18605 tcg_gen_movi_i32(tdf
, df
+ 1);
18606 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18611 case OPC_FMAX_A_df
:
18612 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 MIPS_INVAL("MSA instruction");
18616 generate_exception_end(ctx
, EXCP_RI
);
18620 tcg_temp_free_i32(twd
);
18621 tcg_temp_free_i32(tws
);
18622 tcg_temp_free_i32(twt
);
18623 tcg_temp_free_i32(tdf
);
18626 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18628 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18629 (op & (0x7 << 18)))
18630 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18631 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18632 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18633 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18634 TCGv_i32 twd
= tcg_const_i32(wd
);
18635 TCGv_i32 tws
= tcg_const_i32(ws
);
18636 TCGv_i32 twt
= tcg_const_i32(wt
);
18637 TCGv_i32 tdf
= tcg_const_i32(df
);
18639 switch (MASK_MSA_2R(ctx
->opcode
)) {
18641 #if !defined(TARGET_MIPS64)
18642 /* Double format valid only for MIPS64 */
18643 if (df
== DF_DOUBLE
) {
18644 generate_exception_end(ctx
, EXCP_RI
);
18648 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18651 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18654 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18657 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18660 MIPS_INVAL("MSA instruction");
18661 generate_exception_end(ctx
, EXCP_RI
);
18665 tcg_temp_free_i32(twd
);
18666 tcg_temp_free_i32(tws
);
18667 tcg_temp_free_i32(twt
);
18668 tcg_temp_free_i32(tdf
);
18671 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18673 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18674 (op & (0xf << 17)))
18675 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18676 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18677 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18678 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18679 TCGv_i32 twd
= tcg_const_i32(wd
);
18680 TCGv_i32 tws
= tcg_const_i32(ws
);
18681 TCGv_i32 twt
= tcg_const_i32(wt
);
18682 /* adjust df value for floating-point instruction */
18683 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18685 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18686 case OPC_FCLASS_df
:
18687 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18689 case OPC_FTRUNC_S_df
:
18690 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18692 case OPC_FTRUNC_U_df
:
18693 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18696 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18698 case OPC_FRSQRT_df
:
18699 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18702 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18705 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18708 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18710 case OPC_FEXUPL_df
:
18711 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18713 case OPC_FEXUPR_df
:
18714 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18717 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18720 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18722 case OPC_FTINT_S_df
:
18723 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18725 case OPC_FTINT_U_df
:
18726 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18728 case OPC_FFINT_S_df
:
18729 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18731 case OPC_FFINT_U_df
:
18732 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18736 tcg_temp_free_i32(twd
);
18737 tcg_temp_free_i32(tws
);
18738 tcg_temp_free_i32(twt
);
18739 tcg_temp_free_i32(tdf
);
18742 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18744 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18745 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18746 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18747 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18748 TCGv_i32 twd
= tcg_const_i32(wd
);
18749 TCGv_i32 tws
= tcg_const_i32(ws
);
18750 TCGv_i32 twt
= tcg_const_i32(wt
);
18752 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18754 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18757 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18760 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18763 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18766 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18769 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18772 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18775 MIPS_INVAL("MSA instruction");
18776 generate_exception_end(ctx
, EXCP_RI
);
18780 tcg_temp_free_i32(twd
);
18781 tcg_temp_free_i32(tws
);
18782 tcg_temp_free_i32(twt
);
18785 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18787 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18795 gen_msa_vec_v(env
, ctx
);
18798 gen_msa_2r(env
, ctx
);
18801 gen_msa_2rf(env
, ctx
);
18804 MIPS_INVAL("MSA instruction");
18805 generate_exception_end(ctx
, EXCP_RI
);
18810 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18812 uint32_t opcode
= ctx
->opcode
;
18813 check_insn(ctx
, ASE_MSA
);
18814 check_msa_access(ctx
);
18816 switch (MASK_MSA_MINOR(opcode
)) {
18817 case OPC_MSA_I8_00
:
18818 case OPC_MSA_I8_01
:
18819 case OPC_MSA_I8_02
:
18820 gen_msa_i8(env
, ctx
);
18822 case OPC_MSA_I5_06
:
18823 case OPC_MSA_I5_07
:
18824 gen_msa_i5(env
, ctx
);
18826 case OPC_MSA_BIT_09
:
18827 case OPC_MSA_BIT_0A
:
18828 gen_msa_bit(env
, ctx
);
18830 case OPC_MSA_3R_0D
:
18831 case OPC_MSA_3R_0E
:
18832 case OPC_MSA_3R_0F
:
18833 case OPC_MSA_3R_10
:
18834 case OPC_MSA_3R_11
:
18835 case OPC_MSA_3R_12
:
18836 case OPC_MSA_3R_13
:
18837 case OPC_MSA_3R_14
:
18838 case OPC_MSA_3R_15
:
18839 gen_msa_3r(env
, ctx
);
18842 gen_msa_elm(env
, ctx
);
18844 case OPC_MSA_3RF_1A
:
18845 case OPC_MSA_3RF_1B
:
18846 case OPC_MSA_3RF_1C
:
18847 gen_msa_3rf(env
, ctx
);
18850 gen_msa_vec(env
, ctx
);
18861 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18862 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18863 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18864 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18866 TCGv_i32 twd
= tcg_const_i32(wd
);
18867 TCGv taddr
= tcg_temp_new();
18868 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18870 switch (MASK_MSA_MINOR(opcode
)) {
18872 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18875 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18878 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18881 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18884 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18887 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18890 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18893 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18897 tcg_temp_free_i32(twd
);
18898 tcg_temp_free(taddr
);
18902 MIPS_INVAL("MSA instruction");
18903 generate_exception_end(ctx
, EXCP_RI
);
18909 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18912 int rs
, rt
, rd
, sa
;
18916 /* make sure instructions are on a word boundary */
18917 if (ctx
->pc
& 0x3) {
18918 env
->CP0_BadVAddr
= ctx
->pc
;
18919 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18923 /* Handle blikely not taken case */
18924 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18925 TCGLabel
*l1
= gen_new_label();
18927 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18928 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18929 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18933 op
= MASK_OP_MAJOR(ctx
->opcode
);
18934 rs
= (ctx
->opcode
>> 21) & 0x1f;
18935 rt
= (ctx
->opcode
>> 16) & 0x1f;
18936 rd
= (ctx
->opcode
>> 11) & 0x1f;
18937 sa
= (ctx
->opcode
>> 6) & 0x1f;
18938 imm
= (int16_t)ctx
->opcode
;
18941 decode_opc_special(env
, ctx
);
18944 decode_opc_special2_legacy(env
, ctx
);
18947 decode_opc_special3(env
, ctx
);
18950 op1
= MASK_REGIMM(ctx
->opcode
);
18952 case OPC_BLTZL
: /* REGIMM branches */
18956 check_insn(ctx
, ISA_MIPS2
);
18957 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18961 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18965 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18967 /* OPC_NAL, OPC_BAL */
18968 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18970 generate_exception_end(ctx
, EXCP_RI
);
18973 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18976 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18978 check_insn(ctx
, ISA_MIPS2
);
18979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18980 gen_trap(ctx
, op1
, rs
, -1, imm
);
18983 check_insn(ctx
, ISA_MIPS32R6
);
18984 generate_exception_end(ctx
, EXCP_RI
);
18987 check_insn(ctx
, ISA_MIPS32R2
);
18988 /* Break the TB to be able to sync copied instructions
18990 ctx
->bstate
= BS_STOP
;
18992 case OPC_BPOSGE32
: /* MIPS DSP branch */
18993 #if defined(TARGET_MIPS64)
18997 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
18999 #if defined(TARGET_MIPS64)
19001 check_insn(ctx
, ISA_MIPS32R6
);
19002 check_mips_64(ctx
);
19004 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19008 check_insn(ctx
, ISA_MIPS32R6
);
19009 check_mips_64(ctx
);
19011 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19015 default: /* Invalid */
19016 MIPS_INVAL("regimm");
19017 generate_exception_end(ctx
, EXCP_RI
);
19022 check_cp0_enabled(ctx
);
19023 op1
= MASK_CP0(ctx
->opcode
);
19031 #if defined(TARGET_MIPS64)
19035 #ifndef CONFIG_USER_ONLY
19036 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19037 #endif /* !CONFIG_USER_ONLY */
19039 case OPC_C0_FIRST
... OPC_C0_LAST
:
19040 #ifndef CONFIG_USER_ONLY
19041 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19042 #endif /* !CONFIG_USER_ONLY */
19045 #ifndef CONFIG_USER_ONLY
19048 TCGv t0
= tcg_temp_new();
19050 op2
= MASK_MFMC0(ctx
->opcode
);
19053 check_insn(ctx
, ASE_MT
);
19054 gen_helper_dmt(t0
);
19055 gen_store_gpr(t0
, rt
);
19058 check_insn(ctx
, ASE_MT
);
19059 gen_helper_emt(t0
);
19060 gen_store_gpr(t0
, rt
);
19063 check_insn(ctx
, ASE_MT
);
19064 gen_helper_dvpe(t0
, cpu_env
);
19065 gen_store_gpr(t0
, rt
);
19068 check_insn(ctx
, ASE_MT
);
19069 gen_helper_evpe(t0
, cpu_env
);
19070 gen_store_gpr(t0
, rt
);
19073 check_insn(ctx
, ISA_MIPS32R2
);
19074 save_cpu_state(ctx
, 1);
19075 gen_helper_di(t0
, cpu_env
);
19076 gen_store_gpr(t0
, rt
);
19077 /* Stop translation as we may have switched
19078 the execution mode. */
19079 ctx
->bstate
= BS_STOP
;
19082 check_insn(ctx
, ISA_MIPS32R2
);
19083 save_cpu_state(ctx
, 1);
19084 gen_helper_ei(t0
, cpu_env
);
19085 gen_store_gpr(t0
, rt
);
19086 /* Stop translation as we may have switched
19087 the execution mode. */
19088 ctx
->bstate
= BS_STOP
;
19090 default: /* Invalid */
19091 MIPS_INVAL("mfmc0");
19092 generate_exception_end(ctx
, EXCP_RI
);
19097 #endif /* !CONFIG_USER_ONLY */
19100 check_insn(ctx
, ISA_MIPS32R2
);
19101 gen_load_srsgpr(rt
, rd
);
19104 check_insn(ctx
, ISA_MIPS32R2
);
19105 gen_store_srsgpr(rt
, rd
);
19109 generate_exception_end(ctx
, EXCP_RI
);
19113 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19114 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19115 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19116 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19119 /* Arithmetic with immediate opcode */
19120 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19124 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19126 case OPC_SLTI
: /* Set on less than with immediate opcode */
19128 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19130 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19131 case OPC_LUI
: /* OPC_AUI */
19134 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19136 case OPC_J
... OPC_JAL
: /* Jump */
19137 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19138 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19141 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19142 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19144 generate_exception_end(ctx
, EXCP_RI
);
19147 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19148 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19151 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19154 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19155 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19157 generate_exception_end(ctx
, EXCP_RI
);
19160 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19161 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19164 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19167 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19170 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19172 check_insn(ctx
, ISA_MIPS32R6
);
19173 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19174 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19177 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19180 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19182 check_insn(ctx
, ISA_MIPS32R6
);
19183 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19184 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19189 check_insn(ctx
, ISA_MIPS2
);
19190 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19194 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19196 case OPC_LL
: /* Load and stores */
19197 check_insn(ctx
, ISA_MIPS2
);
19201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19203 case OPC_LB
... OPC_LH
:
19204 case OPC_LW
... OPC_LHU
:
19205 gen_ld(ctx
, op
, rt
, rs
, imm
);
19209 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19211 case OPC_SB
... OPC_SH
:
19213 gen_st(ctx
, op
, rt
, rs
, imm
);
19216 check_insn(ctx
, ISA_MIPS2
);
19217 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19218 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19221 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19222 check_cp0_enabled(ctx
);
19223 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19224 /* Treat as NOP. */
19227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19228 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19229 /* Treat as NOP. */
19232 /* Floating point (COP1). */
19237 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19241 op1
= MASK_CP1(ctx
->opcode
);
19246 check_cp1_enabled(ctx
);
19247 check_insn(ctx
, ISA_MIPS32R2
);
19252 check_cp1_enabled(ctx
);
19253 gen_cp1(ctx
, op1
, rt
, rd
);
19255 #if defined(TARGET_MIPS64)
19258 check_cp1_enabled(ctx
);
19259 check_insn(ctx
, ISA_MIPS3
);
19260 check_mips_64(ctx
);
19261 gen_cp1(ctx
, op1
, rt
, rd
);
19264 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19265 check_cp1_enabled(ctx
);
19266 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19268 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19273 check_insn(ctx
, ASE_MIPS3D
);
19274 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19275 (rt
>> 2) & 0x7, imm
<< 2);
19279 check_cp1_enabled(ctx
);
19280 check_insn(ctx
, ISA_MIPS32R6
);
19281 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19285 check_cp1_enabled(ctx
);
19286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19288 check_insn(ctx
, ASE_MIPS3D
);
19291 check_cp1_enabled(ctx
);
19292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19293 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19294 (rt
>> 2) & 0x7, imm
<< 2);
19301 check_cp1_enabled(ctx
);
19302 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19308 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19309 check_cp1_enabled(ctx
);
19310 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19312 case R6_OPC_CMP_AF_S
:
19313 case R6_OPC_CMP_UN_S
:
19314 case R6_OPC_CMP_EQ_S
:
19315 case R6_OPC_CMP_UEQ_S
:
19316 case R6_OPC_CMP_LT_S
:
19317 case R6_OPC_CMP_ULT_S
:
19318 case R6_OPC_CMP_LE_S
:
19319 case R6_OPC_CMP_ULE_S
:
19320 case R6_OPC_CMP_SAF_S
:
19321 case R6_OPC_CMP_SUN_S
:
19322 case R6_OPC_CMP_SEQ_S
:
19323 case R6_OPC_CMP_SEUQ_S
:
19324 case R6_OPC_CMP_SLT_S
:
19325 case R6_OPC_CMP_SULT_S
:
19326 case R6_OPC_CMP_SLE_S
:
19327 case R6_OPC_CMP_SULE_S
:
19328 case R6_OPC_CMP_OR_S
:
19329 case R6_OPC_CMP_UNE_S
:
19330 case R6_OPC_CMP_NE_S
:
19331 case R6_OPC_CMP_SOR_S
:
19332 case R6_OPC_CMP_SUNE_S
:
19333 case R6_OPC_CMP_SNE_S
:
19334 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19336 case R6_OPC_CMP_AF_D
:
19337 case R6_OPC_CMP_UN_D
:
19338 case R6_OPC_CMP_EQ_D
:
19339 case R6_OPC_CMP_UEQ_D
:
19340 case R6_OPC_CMP_LT_D
:
19341 case R6_OPC_CMP_ULT_D
:
19342 case R6_OPC_CMP_LE_D
:
19343 case R6_OPC_CMP_ULE_D
:
19344 case R6_OPC_CMP_SAF_D
:
19345 case R6_OPC_CMP_SUN_D
:
19346 case R6_OPC_CMP_SEQ_D
:
19347 case R6_OPC_CMP_SEUQ_D
:
19348 case R6_OPC_CMP_SLT_D
:
19349 case R6_OPC_CMP_SULT_D
:
19350 case R6_OPC_CMP_SLE_D
:
19351 case R6_OPC_CMP_SULE_D
:
19352 case R6_OPC_CMP_OR_D
:
19353 case R6_OPC_CMP_UNE_D
:
19354 case R6_OPC_CMP_NE_D
:
19355 case R6_OPC_CMP_SOR_D
:
19356 case R6_OPC_CMP_SUNE_D
:
19357 case R6_OPC_CMP_SNE_D
:
19358 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19361 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19362 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19367 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19382 check_insn(ctx
, ASE_MSA
);
19383 gen_msa_branch(env
, ctx
, op1
);
19387 generate_exception_end(ctx
, EXCP_RI
);
19392 /* Compact branches [R6] and COP2 [non-R6] */
19393 case OPC_BC
: /* OPC_LWC2 */
19394 case OPC_BALC
: /* OPC_SWC2 */
19395 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19396 /* OPC_BC, OPC_BALC */
19397 gen_compute_compact_branch(ctx
, op
, 0, 0,
19398 sextract32(ctx
->opcode
<< 2, 0, 28));
19400 /* OPC_LWC2, OPC_SWC2 */
19401 /* COP2: Not implemented. */
19402 generate_exception_err(ctx
, EXCP_CpU
, 2);
19405 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19406 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19407 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19409 /* OPC_BEQZC, OPC_BNEZC */
19410 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19411 sextract32(ctx
->opcode
<< 2, 0, 23));
19413 /* OPC_JIC, OPC_JIALC */
19414 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19417 /* OPC_LWC2, OPC_SWC2 */
19418 /* COP2: Not implemented. */
19419 generate_exception_err(ctx
, EXCP_CpU
, 2);
19423 check_insn(ctx
, INSN_LOONGSON2F
);
19424 /* Note that these instructions use different fields. */
19425 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19429 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19430 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19431 check_cp1_enabled(ctx
);
19432 op1
= MASK_CP3(ctx
->opcode
);
19436 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19442 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19443 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19446 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19447 /* Treat as NOP. */
19450 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19464 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19465 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19469 generate_exception_end(ctx
, EXCP_RI
);
19473 generate_exception_err(ctx
, EXCP_CpU
, 1);
19477 #if defined(TARGET_MIPS64)
19478 /* MIPS64 opcodes */
19479 case OPC_LDL
... OPC_LDR
:
19481 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19485 check_insn(ctx
, ISA_MIPS3
);
19486 check_mips_64(ctx
);
19487 gen_ld(ctx
, op
, rt
, rs
, imm
);
19489 case OPC_SDL
... OPC_SDR
:
19490 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19493 check_insn(ctx
, ISA_MIPS3
);
19494 check_mips_64(ctx
);
19495 gen_st(ctx
, op
, rt
, rs
, imm
);
19498 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19499 check_insn(ctx
, ISA_MIPS3
);
19500 check_mips_64(ctx
);
19501 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19503 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19505 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19506 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19509 check_insn(ctx
, ISA_MIPS3
);
19510 check_mips_64(ctx
);
19511 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19515 check_insn(ctx
, ISA_MIPS3
);
19516 check_mips_64(ctx
);
19517 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19520 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19521 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19522 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19524 MIPS_INVAL("major opcode");
19525 generate_exception_end(ctx
, EXCP_RI
);
19529 case OPC_DAUI
: /* OPC_JALX */
19530 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19531 #if defined(TARGET_MIPS64)
19533 check_mips_64(ctx
);
19535 generate_exception(ctx
, EXCP_RI
);
19536 } else if (rt
!= 0) {
19537 TCGv t0
= tcg_temp_new();
19538 gen_load_gpr(t0
, rs
);
19539 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19543 generate_exception_end(ctx
, EXCP_RI
);
19544 MIPS_INVAL("major opcode");
19548 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19549 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19550 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19553 case OPC_MSA
: /* OPC_MDMX */
19554 /* MDMX: Not implemented. */
19558 check_insn(ctx
, ISA_MIPS32R6
);
19559 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19561 default: /* Invalid */
19562 MIPS_INVAL("major opcode");
19563 generate_exception_end(ctx
, EXCP_RI
);
19568 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19570 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19571 CPUState
*cs
= CPU(cpu
);
19573 target_ulong pc_start
;
19574 target_ulong next_page_start
;
19581 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19584 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19585 ctx
.insn_flags
= env
->insn_flags
;
19586 ctx
.CP0_Config1
= env
->CP0_Config1
;
19588 ctx
.bstate
= BS_NONE
;
19590 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19591 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19592 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19593 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19594 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19595 ctx
.PAMask
= env
->PAMask
;
19596 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19597 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19598 /* Restore delay slot state from the tb context. */
19599 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19600 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19601 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19602 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19603 restore_cpu_state(env
, &ctx
);
19604 #ifdef CONFIG_USER_ONLY
19605 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19607 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19609 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19610 MO_UNALN
: MO_ALIGN
;
19612 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19613 if (max_insns
== 0) {
19614 max_insns
= CF_COUNT_MASK
;
19616 if (max_insns
> TCG_MAX_INSNS
) {
19617 max_insns
= TCG_MAX_INSNS
;
19620 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19622 while (ctx
.bstate
== BS_NONE
) {
19623 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19626 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19627 save_cpu_state(&ctx
, 1);
19628 ctx
.bstate
= BS_BRANCH
;
19629 gen_helper_raise_exception_debug(cpu_env
);
19630 /* The address covered by the breakpoint must be included in
19631 [tb->pc, tb->pc + tb->size) in order to for it to be
19632 properly cleared -- thus we increment the PC here so that
19633 the logic setting tb->size below does the right thing. */
19635 goto done_generating
;
19638 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19642 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19643 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19644 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19646 decode_opc(env
, &ctx
);
19647 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19648 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19649 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19650 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19651 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19652 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19654 generate_exception_end(&ctx
, EXCP_RI
);
19658 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19659 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19660 MIPS_HFLAG_FBNSLOT
))) {
19661 /* force to generate branch as there is neither delay nor
19665 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19666 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19667 /* Force to generate branch as microMIPS R6 doesn't restrict
19668 branches in the forbidden slot. */
19673 gen_branch(&ctx
, insn_bytes
);
19675 ctx
.pc
+= insn_bytes
;
19677 /* Execute a branch and its delay slot as a single instruction.
19678 This is what GDB expects and is consistent with what the
19679 hardware does (e.g. if a delay slot instruction faults, the
19680 reported PC is the PC of the branch). */
19681 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19685 if (ctx
.pc
>= next_page_start
) {
19689 if (tcg_op_buf_full()) {
19693 if (num_insns
>= max_insns
)
19699 if (tb
->cflags
& CF_LAST_IO
) {
19702 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19703 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19704 gen_helper_raise_exception_debug(cpu_env
);
19706 switch (ctx
.bstate
) {
19708 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19711 save_cpu_state(&ctx
, 0);
19712 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19715 tcg_gen_exit_tb(0);
19723 gen_tb_end(tb
, num_insns
);
19725 tb
->size
= ctx
.pc
- pc_start
;
19726 tb
->icount
= num_insns
;
19730 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19731 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19732 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19738 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19742 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19744 #define printfpr(fp) \
19747 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19748 " fd:%13g fs:%13g psu: %13g\n", \
19749 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19750 (double)(fp)->fd, \
19751 (double)(fp)->fs[FP_ENDIAN_IDX], \
19752 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19755 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19756 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19757 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19758 " fd:%13g fs:%13g psu:%13g\n", \
19759 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19761 (double)tmp.fs[FP_ENDIAN_IDX], \
19762 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19767 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19768 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19769 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19770 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19771 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19772 printfpr(&env
->active_fpu
.fpr
[i
]);
19778 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19781 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19782 CPUMIPSState
*env
= &cpu
->env
;
19785 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19786 " LO=0x" TARGET_FMT_lx
" ds %04x "
19787 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19788 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19789 env
->hflags
, env
->btarget
, env
->bcond
);
19790 for (i
= 0; i
< 32; i
++) {
19792 cpu_fprintf(f
, "GPR%02d:", i
);
19793 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19795 cpu_fprintf(f
, "\n");
19798 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19799 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19800 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19802 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19803 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19804 env
->CP0_Config2
, env
->CP0_Config3
);
19805 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19806 env
->CP0_Config4
, env
->CP0_Config5
);
19807 if (env
->hflags
& MIPS_HFLAG_FPU
)
19808 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19811 void mips_tcg_init(void)
19816 /* Initialize various static tables. */
19820 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19821 TCGV_UNUSED(cpu_gpr
[0]);
19822 for (i
= 1; i
< 32; i
++)
19823 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
19824 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19827 for (i
= 0; i
< 32; i
++) {
19828 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19830 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2]);
19831 /* The scalar floating-point unit (FPU) registers are mapped on
19832 * the MSA vector registers. */
19833 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19834 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19835 msa_wr_d
[i
* 2 + 1] =
19836 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2 + 1]);
19839 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
19840 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19841 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19842 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
19843 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19845 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
19846 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19849 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
19850 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19852 bcond
= tcg_global_mem_new(TCG_AREG0
,
19853 offsetof(CPUMIPSState
, bcond
), "bcond");
19854 btarget
= tcg_global_mem_new(TCG_AREG0
,
19855 offsetof(CPUMIPSState
, btarget
), "btarget");
19856 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
19857 offsetof(CPUMIPSState
, hflags
), "hflags");
19859 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
19860 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19862 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
19863 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19869 #include "translate_init.c"
19871 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19875 const mips_def_t
*def
;
19877 def
= cpu_mips_find_by_name(cpu_model
);
19880 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19882 env
->cpu_model
= def
;
19884 #ifndef CONFIG_USER_ONLY
19885 mmu_init(env
, def
);
19887 fpu_init(env
, def
);
19888 mvp_init(env
, def
);
19890 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19895 void cpu_state_reset(CPUMIPSState
*env
)
19897 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19898 CPUState
*cs
= CPU(cpu
);
19900 /* Reset registers to their default values */
19901 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19902 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19903 #ifdef TARGET_WORDS_BIGENDIAN
19904 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19906 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19907 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19908 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19909 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19910 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19911 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19912 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19913 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19914 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19915 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19916 << env
->cpu_model
->CP0_LLAddr_shift
;
19917 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19918 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19919 env
->CCRes
= env
->cpu_model
->CCRes
;
19920 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19921 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19922 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19923 env
->current_tc
= 0;
19924 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19925 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19926 #if defined(TARGET_MIPS64)
19927 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19928 env
->SEGMask
|= 3ULL << 62;
19931 env
->PABITS
= env
->cpu_model
->PABITS
;
19932 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19933 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19934 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19935 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19936 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19937 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19938 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19939 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19940 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19941 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19942 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19943 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19944 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19945 env
->msair
= env
->cpu_model
->MSAIR
;
19946 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19948 #if defined(CONFIG_USER_ONLY)
19949 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19950 # ifdef TARGET_MIPS64
19951 /* Enable 64-bit register mode. */
19952 env
->CP0_Status
|= (1 << CP0St_PX
);
19954 # ifdef TARGET_ABI_MIPSN64
19955 /* Enable 64-bit address mode. */
19956 env
->CP0_Status
|= (1 << CP0St_UX
);
19958 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19959 hardware registers. */
19960 env
->CP0_HWREna
|= 0x0000000F;
19961 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19962 env
->CP0_Status
|= (1 << CP0St_CU1
);
19964 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19965 env
->CP0_Status
|= (1 << CP0St_MX
);
19967 # if defined(TARGET_MIPS64)
19968 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19969 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19970 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19971 env
->CP0_Status
|= (1 << CP0St_FR
);
19975 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19976 /* If the exception was raised from a delay slot,
19977 come back to the jump. */
19978 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
19979 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
19981 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
19983 env
->active_tc
.PC
= (int32_t)0xBFC00000;
19984 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
19985 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
19986 env
->CP0_Wired
= 0;
19987 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
19988 if (kvm_enabled()) {
19989 env
->CP0_EBase
|= 0x40000000;
19991 env
->CP0_EBase
|= 0x80000000;
19993 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
19994 /* vectored interrupts not implemented, timer on int 7,
19995 no performance counters. */
19996 env
->CP0_IntCtl
= 0xe0000000;
20000 for (i
= 0; i
< 7; i
++) {
20001 env
->CP0_WatchLo
[i
] = 0;
20002 env
->CP0_WatchHi
[i
] = 0x80000000;
20004 env
->CP0_WatchLo
[7] = 0;
20005 env
->CP0_WatchHi
[7] = 0;
20007 /* Count register increments in debug mode, EJTAG version 1 */
20008 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20010 cpu_mips_store_count(env
, 1);
20012 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20015 /* Only TC0 on VPE 0 starts as active. */
20016 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20017 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20018 env
->tcs
[i
].CP0_TCHalt
= 1;
20020 env
->active_tc
.CP0_TCHalt
= 1;
20023 if (cs
->cpu_index
== 0) {
20024 /* VPE0 starts up enabled. */
20025 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20026 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20028 /* TC0 starts up unhalted. */
20030 env
->active_tc
.CP0_TCHalt
= 0;
20031 env
->tcs
[0].CP0_TCHalt
= 0;
20032 /* With thread 0 active. */
20033 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20034 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20038 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20039 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20040 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20041 env
->CP0_Status
|= (1 << CP0St_FR
);
20045 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20049 compute_hflags(env
);
20050 restore_rounding_mode(env
);
20051 restore_flush_mode(env
);
20052 restore_pamask(env
);
20053 cs
->exception_index
= EXCP_NONE
;
20055 if (semihosting_get_argc()) {
20056 /* UHI interface can be used to obtain argc and argv */
20057 env
->active_tc
.gpr
[4] = -1;
20061 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20062 target_ulong
*data
)
20064 env
->active_tc
.PC
= data
[0];
20065 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20066 env
->hflags
|= data
[1];
20067 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20068 case MIPS_HFLAG_BR
:
20070 case MIPS_HFLAG_BC
:
20071 case MIPS_HFLAG_BL
:
20073 env
->btarget
= data
[2];