2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
41 #include "qemu/qemu-print.h"
43 #define MIPS_DEBUG_DISAS 0
45 /* MIPS major opcodes */
46 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
49 /* indirect opcode tables */
50 OPC_SPECIAL
= (0x00 << 26),
51 OPC_REGIMM
= (0x01 << 26),
52 OPC_CP0
= (0x10 << 26),
53 OPC_CP1
= (0x11 << 26),
54 OPC_CP2
= (0x12 << 26),
55 OPC_CP3
= (0x13 << 26),
56 OPC_SPECIAL2
= (0x1C << 26),
57 OPC_SPECIAL3
= (0x1F << 26),
58 /* arithmetic with immediate */
59 OPC_ADDI
= (0x08 << 26),
60 OPC_ADDIU
= (0x09 << 26),
61 OPC_SLTI
= (0x0A << 26),
62 OPC_SLTIU
= (0x0B << 26),
63 /* logic with immediate */
64 OPC_ANDI
= (0x0C << 26),
65 OPC_ORI
= (0x0D << 26),
66 OPC_XORI
= (0x0E << 26),
67 OPC_LUI
= (0x0F << 26),
68 /* arithmetic with immediate */
69 OPC_DADDI
= (0x18 << 26),
70 OPC_DADDIU
= (0x19 << 26),
71 /* Jump and branches */
73 OPC_JAL
= (0x03 << 26),
74 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
75 OPC_BEQL
= (0x14 << 26),
76 OPC_BNE
= (0x05 << 26),
77 OPC_BNEL
= (0x15 << 26),
78 OPC_BLEZ
= (0x06 << 26),
79 OPC_BLEZL
= (0x16 << 26),
80 OPC_BGTZ
= (0x07 << 26),
81 OPC_BGTZL
= (0x17 << 26),
82 OPC_JALX
= (0x1D << 26),
83 OPC_DAUI
= (0x1D << 26),
85 OPC_LDL
= (0x1A << 26),
86 OPC_LDR
= (0x1B << 26),
87 OPC_LB
= (0x20 << 26),
88 OPC_LH
= (0x21 << 26),
89 OPC_LWL
= (0x22 << 26),
90 OPC_LW
= (0x23 << 26),
91 OPC_LWPC
= OPC_LW
| 0x5,
92 OPC_LBU
= (0x24 << 26),
93 OPC_LHU
= (0x25 << 26),
94 OPC_LWR
= (0x26 << 26),
95 OPC_LWU
= (0x27 << 26),
96 OPC_SB
= (0x28 << 26),
97 OPC_SH
= (0x29 << 26),
98 OPC_SWL
= (0x2A << 26),
99 OPC_SW
= (0x2B << 26),
100 OPC_SDL
= (0x2C << 26),
101 OPC_SDR
= (0x2D << 26),
102 OPC_SWR
= (0x2E << 26),
103 OPC_LL
= (0x30 << 26),
104 OPC_LLD
= (0x34 << 26),
105 OPC_LD
= (0x37 << 26),
106 OPC_LDPC
= OPC_LD
| 0x5,
107 OPC_SC
= (0x38 << 26),
108 OPC_SCD
= (0x3C << 26),
109 OPC_SD
= (0x3F << 26),
110 /* Floating point load/store */
111 OPC_LWC1
= (0x31 << 26),
112 OPC_LWC2
= (0x32 << 26),
113 OPC_LDC1
= (0x35 << 26),
114 OPC_LDC2
= (0x36 << 26),
115 OPC_SWC1
= (0x39 << 26),
116 OPC_SWC2
= (0x3A << 26),
117 OPC_SDC1
= (0x3D << 26),
118 OPC_SDC2
= (0x3E << 26),
119 /* Compact Branches */
120 OPC_BLEZALC
= (0x06 << 26),
121 OPC_BGEZALC
= (0x06 << 26),
122 OPC_BGEUC
= (0x06 << 26),
123 OPC_BGTZALC
= (0x07 << 26),
124 OPC_BLTZALC
= (0x07 << 26),
125 OPC_BLTUC
= (0x07 << 26),
126 OPC_BOVC
= (0x08 << 26),
127 OPC_BEQZALC
= (0x08 << 26),
128 OPC_BEQC
= (0x08 << 26),
129 OPC_BLEZC
= (0x16 << 26),
130 OPC_BGEZC
= (0x16 << 26),
131 OPC_BGEC
= (0x16 << 26),
132 OPC_BGTZC
= (0x17 << 26),
133 OPC_BLTZC
= (0x17 << 26),
134 OPC_BLTC
= (0x17 << 26),
135 OPC_BNVC
= (0x18 << 26),
136 OPC_BNEZALC
= (0x18 << 26),
137 OPC_BNEC
= (0x18 << 26),
138 OPC_BC
= (0x32 << 26),
139 OPC_BEQZC
= (0x36 << 26),
140 OPC_JIC
= (0x36 << 26),
141 OPC_BALC
= (0x3A << 26),
142 OPC_BNEZC
= (0x3E << 26),
143 OPC_JIALC
= (0x3E << 26),
144 /* MDMX ASE specific */
145 OPC_MDMX
= (0x1E << 26),
146 /* MSA ASE, same as MDMX */
148 /* Cache and prefetch */
149 OPC_CACHE
= (0x2F << 26),
150 OPC_PREF
= (0x33 << 26),
151 /* PC-relative address computation / loads */
152 OPC_PCREL
= (0x3B << 26),
155 /* PC-relative address computation / loads */
156 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
157 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
159 /* Instructions determined by bits 19 and 20 */
160 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
161 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
162 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
164 /* Instructions determined by bits 16 ... 20 */
165 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
166 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
169 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
172 /* MIPS special opcodes */
173 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
177 OPC_SLL
= 0x00 | OPC_SPECIAL
,
178 /* NOP is SLL r0, r0, 0 */
179 /* SSNOP is SLL r0, r0, 1 */
180 /* EHB is SLL r0, r0, 3 */
181 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
182 OPC_ROTR
= OPC_SRL
| (1 << 21),
183 OPC_SRA
= 0x03 | OPC_SPECIAL
,
184 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
185 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
186 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
187 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
188 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
189 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
190 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
191 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
192 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
193 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
194 OPC_DROTR
= OPC_DSRL
| (1 << 21),
195 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
196 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
197 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
198 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
199 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
200 /* Multiplication / division */
201 OPC_MULT
= 0x18 | OPC_SPECIAL
,
202 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
203 OPC_DIV
= 0x1A | OPC_SPECIAL
,
204 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
205 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
206 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
207 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
208 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
210 /* 2 registers arithmetic / logic */
211 OPC_ADD
= 0x20 | OPC_SPECIAL
,
212 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
213 OPC_SUB
= 0x22 | OPC_SPECIAL
,
214 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
215 OPC_AND
= 0x24 | OPC_SPECIAL
,
216 OPC_OR
= 0x25 | OPC_SPECIAL
,
217 OPC_XOR
= 0x26 | OPC_SPECIAL
,
218 OPC_NOR
= 0x27 | OPC_SPECIAL
,
219 OPC_SLT
= 0x2A | OPC_SPECIAL
,
220 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
221 OPC_DADD
= 0x2C | OPC_SPECIAL
,
222 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
223 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
224 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
226 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
227 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
229 OPC_TGE
= 0x30 | OPC_SPECIAL
,
230 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
231 OPC_TLT
= 0x32 | OPC_SPECIAL
,
232 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
233 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
234 OPC_TNE
= 0x36 | OPC_SPECIAL
,
235 /* HI / LO registers load & stores */
236 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
237 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
238 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
239 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
240 /* Conditional moves */
241 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
242 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
244 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
245 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
247 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
250 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
251 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
252 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
253 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
254 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
256 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
257 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
258 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
259 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
262 /* R6 Multiply and Divide instructions have the same Opcode
263 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
264 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
267 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
268 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
269 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
270 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
271 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
272 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
273 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
274 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
276 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
277 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
278 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
279 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
280 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
281 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
282 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
283 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
285 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
286 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
287 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
288 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
289 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
291 OPC_LSA
= 0x05 | OPC_SPECIAL
,
292 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
295 /* Multiplication variants of the vr54xx. */
296 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
299 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
300 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
302 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
304 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
306 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
307 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
308 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
309 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
310 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
311 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
312 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
315 /* REGIMM (rt field) opcodes */
316 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
319 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
320 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
321 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
322 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
323 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
324 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
325 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
326 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
327 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
328 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
329 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
330 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
331 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
332 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
333 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
334 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
336 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
337 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
340 /* Special2 opcodes */
341 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
344 /* Multiply & xxx operations */
345 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
346 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
347 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
348 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
349 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
351 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
352 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
353 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
354 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
355 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
356 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
357 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
358 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
359 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
360 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
361 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
362 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
364 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
365 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
366 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
367 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
369 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
372 /* Special3 opcodes */
373 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
376 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
377 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
378 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
379 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
380 OPC_INS
= 0x04 | OPC_SPECIAL3
,
381 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
382 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
383 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
384 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
385 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
386 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
387 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
388 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
391 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
392 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
393 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
394 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
395 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
396 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
397 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
398 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
399 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
400 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
401 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
402 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
405 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
406 /* MIPS DSP Arithmetic */
407 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
408 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
410 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
411 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
412 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
413 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
414 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
415 /* MIPS DSP GPR-Based Shift Sub-class */
416 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
417 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
418 /* MIPS DSP Multiply Sub-class insns */
419 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
420 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
421 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
422 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
423 /* DSP Bit/Manipulation Sub-class */
424 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
425 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
426 /* MIPS DSP Append Sub-class */
427 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
428 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
429 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
430 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
431 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
434 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
435 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
436 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
437 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
438 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
439 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
440 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
441 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
442 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
443 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
444 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
445 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
446 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
447 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
448 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
449 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
452 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
453 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
454 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
455 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
456 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
457 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
461 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
464 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
465 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
466 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
467 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
468 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
469 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
470 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
471 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
475 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
478 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
479 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
480 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
481 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
487 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
488 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
491 /* MIPS DSP REGIMM opcodes */
493 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
494 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
497 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
500 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
501 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
502 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
503 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
506 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
522 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
526 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
527 /* MIPS DSP Multiply Sub-class insns */
528 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
533 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
536 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
537 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
539 /* MIPS DSP Arithmetic Sub-class */
540 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
551 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
552 /* MIPS DSP Multiply Sub-class insns */
553 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
556 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
559 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
561 /* MIPS DSP Arithmetic Sub-class */
562 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
574 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
575 /* DSP Bit/Manipulation Sub-class */
576 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
580 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
583 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
585 /* MIPS DSP Arithmetic Sub-class */
586 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
593 /* DSP Compare-Pick Sub-class */
594 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
608 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
611 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
613 /* MIPS DSP GPR-Based Shift Sub-class */
614 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
635 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
638 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
640 /* MIPS DSP Multiply Sub-class insns */
641 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
662 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
665 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
667 /* DSP Bit/Manipulation Sub-class */
668 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
671 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Append Sub-class */
674 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
675 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
676 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
679 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
681 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
682 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
693 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
695 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
696 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
697 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
698 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
701 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
703 /* MIPS DSP Arithmetic Sub-class */
704 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
720 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
721 /* DSP Bit/Manipulation Sub-class */
722 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
727 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
730 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
732 /* MIPS DSP Multiply Sub-class insns */
733 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
738 /* MIPS DSP Arithmetic Sub-class */
739 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
759 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
762 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
764 /* DSP Compare-Pick Sub-class */
765 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
784 /* MIPS DSP Arithmetic Sub-class */
785 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
795 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* DSP Append Sub-class */
798 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
800 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
801 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
804 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
806 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
807 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
827 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
830 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
832 /* DSP Bit/Manipulation Sub-class */
833 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
836 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP Multiply Sub-class insns */
839 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
867 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
869 /* MIPS DSP GPR-Based Shift Sub-class */
870 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
898 /* Coprocessor 0 (rs field) */
899 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
902 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
903 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
904 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
905 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
906 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
907 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
908 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
909 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
910 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
911 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
912 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
913 OPC_C0
= (0x10 << 21) | OPC_CP0
,
914 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
915 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
916 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
917 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
918 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
919 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
920 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
921 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
922 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
923 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
924 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
925 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
926 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
927 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
928 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
932 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
935 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
937 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
938 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
939 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
941 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
942 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
945 /* Coprocessor 0 (with rs == C0) */
946 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
949 OPC_TLBR
= 0x01 | OPC_C0
,
950 OPC_TLBWI
= 0x02 | OPC_C0
,
951 OPC_TLBINV
= 0x03 | OPC_C0
,
952 OPC_TLBINVF
= 0x04 | OPC_C0
,
953 OPC_TLBWR
= 0x06 | OPC_C0
,
954 OPC_TLBP
= 0x08 | OPC_C0
,
955 OPC_RFE
= 0x10 | OPC_C0
,
956 OPC_ERET
= 0x18 | OPC_C0
,
957 OPC_DERET
= 0x1F | OPC_C0
,
958 OPC_WAIT
= 0x20 | OPC_C0
,
961 /* Coprocessor 1 (rs field) */
962 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
964 /* Values for the fmt field in FP instructions */
966 /* 0 - 15 are reserved */
967 FMT_S
= 16, /* single fp */
968 FMT_D
= 17, /* double fp */
969 FMT_E
= 18, /* extended fp */
970 FMT_Q
= 19, /* quad fp */
971 FMT_W
= 20, /* 32-bit fixed */
972 FMT_L
= 21, /* 64-bit fixed */
973 FMT_PS
= 22, /* paired single fp */
974 /* 23 - 31 are reserved */
978 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
979 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
980 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
981 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
982 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
983 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
984 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
985 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
986 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
987 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
988 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
989 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
990 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
991 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
992 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
993 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
994 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
995 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
996 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
997 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
998 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
999 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
1000 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1001 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1002 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1003 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1004 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1005 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1006 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1007 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1010 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1011 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1014 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1015 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1016 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1017 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1021 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1022 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1026 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1027 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1030 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1033 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1034 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1035 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1036 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1037 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1038 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1039 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1040 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1041 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1042 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1043 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1046 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1049 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1056 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1058 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1065 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1067 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1070 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1071 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1072 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1073 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1074 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1076 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1083 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1085 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1090 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1092 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1097 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1099 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1104 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1106 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1111 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1113 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1118 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1120 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1125 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1127 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1132 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1134 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1139 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1143 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1146 OPC_LWXC1
= 0x00 | OPC_CP3
,
1147 OPC_LDXC1
= 0x01 | OPC_CP3
,
1148 OPC_LUXC1
= 0x05 | OPC_CP3
,
1149 OPC_SWXC1
= 0x08 | OPC_CP3
,
1150 OPC_SDXC1
= 0x09 | OPC_CP3
,
1151 OPC_SUXC1
= 0x0D | OPC_CP3
,
1152 OPC_PREFX
= 0x0F | OPC_CP3
,
1153 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1154 OPC_MADD_S
= 0x20 | OPC_CP3
,
1155 OPC_MADD_D
= 0x21 | OPC_CP3
,
1156 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1157 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1158 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1159 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1160 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1161 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1162 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1163 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1164 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1165 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1169 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1171 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1172 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1173 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1174 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1175 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1176 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1177 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1178 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1179 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1180 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1181 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1182 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1183 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1184 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1185 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1186 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1187 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1188 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1189 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1190 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1191 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1193 /* MI10 instruction */
1194 OPC_LD_B
= (0x20) | OPC_MSA
,
1195 OPC_LD_H
= (0x21) | OPC_MSA
,
1196 OPC_LD_W
= (0x22) | OPC_MSA
,
1197 OPC_LD_D
= (0x23) | OPC_MSA
,
1198 OPC_ST_B
= (0x24) | OPC_MSA
,
1199 OPC_ST_H
= (0x25) | OPC_MSA
,
1200 OPC_ST_W
= (0x26) | OPC_MSA
,
1201 OPC_ST_D
= (0x27) | OPC_MSA
,
1205 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1206 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1207 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1208 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1209 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1210 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1211 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1212 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1213 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1214 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1215 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1216 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1217 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1219 /* I8 instruction */
1220 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1221 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1222 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1223 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1224 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1225 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1226 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1227 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1228 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1229 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1231 /* VEC/2R/2RF instruction */
1232 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1233 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1234 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1235 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1236 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1237 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1238 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1241 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1243 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1244 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1245 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1246 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1247 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1249 /* 2RF instruction df(bit 16) = _w, _d */
1250 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1252 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1253 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1254 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1255 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1256 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1257 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1259 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1260 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1261 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1263 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1265 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1267 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1268 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1269 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1270 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1271 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1272 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1273 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1274 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1275 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1276 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1277 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1278 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1279 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1281 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1282 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1283 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1285 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1290 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1291 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1292 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1293 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1294 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1295 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1296 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1297 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1298 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1299 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1300 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1301 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1310 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1312 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1313 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1314 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1315 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1316 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1317 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1318 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1319 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1320 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1321 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1322 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1323 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1324 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1325 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1326 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1327 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1328 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1329 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1330 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1332 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1333 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1335 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1336 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1337 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1338 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1339 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1341 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1343 /* 3RF instruction _df(bit 21) = _w, _d */
1344 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1345 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1346 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1354 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1355 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1356 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1357 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1358 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1360 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1361 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1362 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1365 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1367 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1368 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1369 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1370 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1371 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1372 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1373 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1376 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1377 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1378 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1379 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1380 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1381 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1382 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1383 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1384 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1386 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1387 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1388 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1389 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1390 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1391 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1392 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1393 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1394 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1395 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1398 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1404 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1405 * ============================================
1408 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1409 * instructions set. It is designed to fit the needs of signal, graphical and
1410 * video processing applications. MXU instruction set is used in Xburst family
1411 * of microprocessors by Ingenic.
1413 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1414 * the control register.
1417 * The notation used in MXU assembler mnemonics
1418 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1420 * Register operands:
1422 * XRa, XRb, XRc, XRd - MXU registers
1423 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1425 * Non-register operands:
1427 * aptn1 - 1-bit accumulate add/subtract pattern
1428 * aptn2 - 2-bit accumulate add/subtract pattern
1429 * eptn2 - 2-bit execute add/subtract pattern
1430 * optn2 - 2-bit operand pattern
1431 * optn3 - 3-bit operand pattern
1432 * sft4 - 4-bit shift amount
1433 * strd2 - 2-bit stride amount
1437 * Level of parallelism: Operand size:
1438 * S - single operation at a time 32 - word
1439 * D - two operations in parallel 16 - half word
1440 * Q - four operations in parallel 8 - byte
1444 * ADD - Add or subtract
1445 * ADDC - Add with carry-in
1447 * ASUM - Sum together then accumulate (add or subtract)
1448 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1449 * AVG - Average between 2 operands
1450 * ABD - Absolute difference
1452 * AND - Logical bitwise 'and' operation
1454 * EXTR - Extract bits
1455 * I2M - Move from GPR register to MXU register
1456 * LDD - Load data from memory to XRF
1457 * LDI - Load data from memory to XRF (and increase the address base)
1458 * LUI - Load unsigned immediate
1460 * MULU - Unsigned multiply
1461 * MADD - 64-bit operand add 32x32 product
1462 * MSUB - 64-bit operand subtract 32x32 product
1463 * MAC - Multiply and accumulate (add or subtract)
1464 * MAD - Multiply and add or subtract
1465 * MAX - Maximum between 2 operands
1466 * MIN - Minimum between 2 operands
1467 * M2I - Move from MXU register to GPR register
1468 * MOVZ - Move if zero
1469 * MOVN - Move if non-zero
1470 * NOR - Logical bitwise 'nor' operation
1471 * OR - Logical bitwise 'or' operation
1472 * STD - Store data from XRF to memory
1473 * SDI - Store data from XRF to memory (and increase the address base)
1474 * SLT - Set of less than comparison
1475 * SAD - Sum of absolute differences
1476 * SLL - Logical shift left
1477 * SLR - Logical shift right
1478 * SAR - Arithmetic shift right
1481 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1482 * XOR - Logical bitwise 'exclusive or' operation
1486 * E - Expand results
1487 * F - Fixed point multiplication
1488 * L - Low part result
1489 * R - Doing rounding
1490 * V - Variable instead of immediate
1491 * W - Combine above L and V
1494 * The list of MXU instructions grouped by functionality
1495 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1497 * Load/Store instructions Multiplication instructions
1498 * ----------------------- ---------------------------
1500 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1501 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1502 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1503 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1504 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1505 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1506 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1507 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1508 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1509 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1513 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1514 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1515 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1516 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1517 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1518 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1519 * S16SDI XRa, Rb, s10, eptn2
1520 * S8LDD XRa, Rb, s8, eptn3
1521 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1522 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1523 * S8SDI XRa, Rb, s8, eptn3
1524 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1525 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1526 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1527 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1528 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1529 * S32CPS XRa, XRb, XRc
1530 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1531 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1532 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1533 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1534 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1535 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1536 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1537 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1538 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1539 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1540 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1541 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1542 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1543 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1544 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1545 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1546 * Q8SLT XRa, XRb, XRc
1547 * Q8SLTU XRa, XRb, XRc
1548 * Q8MOVZ XRa, XRb, XRc Shift instructions
1549 * Q8MOVN XRa, XRb, XRc ------------------
1551 * D32SLL XRa, XRb, XRc, XRd, sft4
1552 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1553 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1554 * D32SARL XRa, XRb, XRc, sft4
1555 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1556 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1557 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1558 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1559 * Q16SLL XRa, XRb, XRc, XRd, sft4
1560 * Q16SLR XRa, XRb, XRc, XRd, sft4
1561 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1562 * ------------------------- Q16SLLV XRa, XRb, Rb
1563 * Q16SLRV XRa, XRb, Rb
1564 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1565 * S32ALN XRa, XRb, XRc, Rb
1566 * S32ALNI XRa, XRb, XRc, s3
1567 * S32LUI XRa, s8, optn3 Move instructions
1568 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1569 * S32EXTRV XRa, XRb, Rs, Rt
1570 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1571 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1574 * The opcode organization of MXU instructions
1575 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1577 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1578 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1579 * other bits up to the instruction level is as follows:
1584 * ┌─ 000000 ─ OPC_MXU_S32MADD
1585 * ├─ 000001 ─ OPC_MXU_S32MADDU
1586 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1589 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1590 * │ ├─ 001 ─ OPC_MXU_S32MIN
1591 * │ ├─ 010 ─ OPC_MXU_D16MAX
1592 * │ ├─ 011 ─ OPC_MXU_D16MIN
1593 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1594 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1595 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1596 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1597 * ├─ 000100 ─ OPC_MXU_S32MSUB
1598 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1599 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1600 * │ ├─ 001 ─ OPC_MXU_D16SLT
1601 * │ ├─ 010 ─ OPC_MXU_D16AVG
1602 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1603 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1604 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1605 * │ └─ 111 ─ OPC_MXU_Q8ADD
1608 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1609 * │ ├─ 010 ─ OPC_MXU_D16CPS
1610 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1611 * │ └─ 110 ─ OPC_MXU_Q16SAT
1612 * ├─ 001000 ─ OPC_MXU_D16MUL
1614 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1615 * │ └─ 01 ─ OPC_MXU_D16MULE
1616 * ├─ 001010 ─ OPC_MXU_D16MAC
1617 * ├─ 001011 ─ OPC_MXU_D16MACF
1618 * ├─ 001100 ─ OPC_MXU_D16MADL
1619 * ├─ 001101 ─ OPC_MXU_S16MAD
1620 * ├─ 001110 ─ OPC_MXU_Q16ADD
1621 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1622 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1623 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1626 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1627 * │ └─ 1 ─ OPC_MXU_S32STDR
1630 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1631 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1634 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1635 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1638 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1639 * │ └─ 1 ─ OPC_MXU_S32LDIR
1642 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1643 * │ └─ 1 ─ OPC_MXU_S32SDIR
1646 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1647 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1650 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1651 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1652 * ├─ 011000 ─ OPC_MXU_D32ADD
1654 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1655 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1656 * │ └─ 10 ─ OPC_MXU_D32ASUM
1657 * ├─ 011010 ─ <not assigned>
1659 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1660 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1661 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1664 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1665 * │ ├─ 01 ─ OPC_MXU_D8SUM
1666 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1667 * ├─ 011110 ─ <not assigned>
1668 * ├─ 011111 ─ <not assigned>
1669 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1670 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1671 * ├─ 100010 ─ OPC_MXU_S8LDD
1672 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1673 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1674 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1675 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1676 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1679 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1680 * │ ├─ 001 ─ OPC_MXU_S32ALN
1681 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1682 * │ ├─ 011 ─ OPC_MXU_S32LUI
1683 * │ ├─ 100 ─ OPC_MXU_S32NOR
1684 * │ ├─ 101 ─ OPC_MXU_S32AND
1685 * │ ├─ 110 ─ OPC_MXU_S32OR
1686 * │ └─ 111 ─ OPC_MXU_S32XOR
1689 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1690 * │ ├─ 001 ─ OPC_MXU_LXH
1691 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1692 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1693 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1694 * ├─ 101100 ─ OPC_MXU_S16LDI
1695 * ├─ 101101 ─ OPC_MXU_S16SDI
1696 * ├─ 101110 ─ OPC_MXU_S32M2I
1697 * ├─ 101111 ─ OPC_MXU_S32I2M
1698 * ├─ 110000 ─ OPC_MXU_D32SLL
1699 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1700 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1701 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1702 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1703 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1704 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1705 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1707 * ├─ 110111 ─ OPC_MXU_Q16SAR
1709 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1710 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1713 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1714 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1715 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1716 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1717 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1718 * │ └─ 101 ─ OPC_MXU_S32MOVN
1721 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1722 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1723 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1724 * ├─ 111100 ─ OPC_MXU_Q8MADL
1725 * ├─ 111101 ─ OPC_MXU_S32SFL
1726 * ├─ 111110 ─ OPC_MXU_Q8SAD
1727 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1732 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1733 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1737 OPC_MXU_S32MADD
= 0x00,
1738 OPC_MXU_S32MADDU
= 0x01,
1739 OPC__MXU_MUL
= 0x02,
1740 OPC_MXU__POOL00
= 0x03,
1741 OPC_MXU_S32MSUB
= 0x04,
1742 OPC_MXU_S32MSUBU
= 0x05,
1743 OPC_MXU__POOL01
= 0x06,
1744 OPC_MXU__POOL02
= 0x07,
1745 OPC_MXU_D16MUL
= 0x08,
1746 OPC_MXU__POOL03
= 0x09,
1747 OPC_MXU_D16MAC
= 0x0A,
1748 OPC_MXU_D16MACF
= 0x0B,
1749 OPC_MXU_D16MADL
= 0x0C,
1750 OPC_MXU_S16MAD
= 0x0D,
1751 OPC_MXU_Q16ADD
= 0x0E,
1752 OPC_MXU_D16MACE
= 0x0F,
1753 OPC_MXU__POOL04
= 0x10,
1754 OPC_MXU__POOL05
= 0x11,
1755 OPC_MXU__POOL06
= 0x12,
1756 OPC_MXU__POOL07
= 0x13,
1757 OPC_MXU__POOL08
= 0x14,
1758 OPC_MXU__POOL09
= 0x15,
1759 OPC_MXU__POOL10
= 0x16,
1760 OPC_MXU__POOL11
= 0x17,
1761 OPC_MXU_D32ADD
= 0x18,
1762 OPC_MXU__POOL12
= 0x19,
1763 /* not assigned 0x1A */
1764 OPC_MXU__POOL13
= 0x1B,
1765 OPC_MXU__POOL14
= 0x1C,
1766 OPC_MXU_Q8ACCE
= 0x1D,
1767 /* not assigned 0x1E */
1768 /* not assigned 0x1F */
1769 /* not assigned 0x20 */
1770 /* not assigned 0x21 */
1771 OPC_MXU_S8LDD
= 0x22,
1772 OPC_MXU_S8STD
= 0x23,
1773 OPC_MXU_S8LDI
= 0x24,
1774 OPC_MXU_S8SDI
= 0x25,
1775 OPC_MXU__POOL15
= 0x26,
1776 OPC_MXU__POOL16
= 0x27,
1777 OPC_MXU__POOL17
= 0x28,
1778 /* not assigned 0x29 */
1779 OPC_MXU_S16LDD
= 0x2A,
1780 OPC_MXU_S16STD
= 0x2B,
1781 OPC_MXU_S16LDI
= 0x2C,
1782 OPC_MXU_S16SDI
= 0x2D,
1783 OPC_MXU_S32M2I
= 0x2E,
1784 OPC_MXU_S32I2M
= 0x2F,
1785 OPC_MXU_D32SLL
= 0x30,
1786 OPC_MXU_D32SLR
= 0x31,
1787 OPC_MXU_D32SARL
= 0x32,
1788 OPC_MXU_D32SAR
= 0x33,
1789 OPC_MXU_Q16SLL
= 0x34,
1790 OPC_MXU_Q16SLR
= 0x35,
1791 OPC_MXU__POOL18
= 0x36,
1792 OPC_MXU_Q16SAR
= 0x37,
1793 OPC_MXU__POOL19
= 0x38,
1794 OPC_MXU__POOL20
= 0x39,
1795 OPC_MXU__POOL21
= 0x3A,
1796 OPC_MXU_Q16SCOP
= 0x3B,
1797 OPC_MXU_Q8MADL
= 0x3C,
1798 OPC_MXU_S32SFL
= 0x3D,
1799 OPC_MXU_Q8SAD
= 0x3E,
1800 /* not assigned 0x3F */
1808 OPC_MXU_S32MAX
= 0x00,
1809 OPC_MXU_S32MIN
= 0x01,
1810 OPC_MXU_D16MAX
= 0x02,
1811 OPC_MXU_D16MIN
= 0x03,
1812 OPC_MXU_Q8MAX
= 0x04,
1813 OPC_MXU_Q8MIN
= 0x05,
1814 OPC_MXU_Q8SLT
= 0x06,
1815 OPC_MXU_Q8SLTU
= 0x07,
1822 OPC_MXU_S32SLT
= 0x00,
1823 OPC_MXU_D16SLT
= 0x01,
1824 OPC_MXU_D16AVG
= 0x02,
1825 OPC_MXU_D16AVGR
= 0x03,
1826 OPC_MXU_Q8AVG
= 0x04,
1827 OPC_MXU_Q8AVGR
= 0x05,
1828 OPC_MXU_Q8ADD
= 0x07,
1835 OPC_MXU_S32CPS
= 0x00,
1836 OPC_MXU_D16CPS
= 0x02,
1837 OPC_MXU_Q8ABD
= 0x04,
1838 OPC_MXU_Q16SAT
= 0x06,
1845 OPC_MXU_D16MULF
= 0x00,
1846 OPC_MXU_D16MULE
= 0x01,
1853 OPC_MXU_S32LDD
= 0x00,
1854 OPC_MXU_S32LDDR
= 0x01,
1861 OPC_MXU_S32STD
= 0x00,
1862 OPC_MXU_S32STDR
= 0x01,
1869 OPC_MXU_S32LDDV
= 0x00,
1870 OPC_MXU_S32LDDVR
= 0x01,
1877 OPC_MXU_S32STDV
= 0x00,
1878 OPC_MXU_S32STDVR
= 0x01,
1885 OPC_MXU_S32LDI
= 0x00,
1886 OPC_MXU_S32LDIR
= 0x01,
1893 OPC_MXU_S32SDI
= 0x00,
1894 OPC_MXU_S32SDIR
= 0x01,
1901 OPC_MXU_S32LDIV
= 0x00,
1902 OPC_MXU_S32LDIVR
= 0x01,
1909 OPC_MXU_S32SDIV
= 0x00,
1910 OPC_MXU_S32SDIVR
= 0x01,
1917 OPC_MXU_D32ACC
= 0x00,
1918 OPC_MXU_D32ACCM
= 0x01,
1919 OPC_MXU_D32ASUM
= 0x02,
1926 OPC_MXU_Q16ACC
= 0x00,
1927 OPC_MXU_Q16ACCM
= 0x01,
1928 OPC_MXU_Q16ASUM
= 0x02,
1935 OPC_MXU_Q8ADDE
= 0x00,
1936 OPC_MXU_D8SUM
= 0x01,
1937 OPC_MXU_D8SUMC
= 0x02,
1944 OPC_MXU_S32MUL
= 0x00,
1945 OPC_MXU_S32MULU
= 0x01,
1946 OPC_MXU_S32EXTR
= 0x02,
1947 OPC_MXU_S32EXTRV
= 0x03,
1954 OPC_MXU_D32SARW
= 0x00,
1955 OPC_MXU_S32ALN
= 0x01,
1956 OPC_MXU_S32ALNI
= 0x02,
1957 OPC_MXU_S32LUI
= 0x03,
1958 OPC_MXU_S32NOR
= 0x04,
1959 OPC_MXU_S32AND
= 0x05,
1960 OPC_MXU_S32OR
= 0x06,
1961 OPC_MXU_S32XOR
= 0x07,
1971 OPC_MXU_LXBU
= 0x04,
1972 OPC_MXU_LXHU
= 0x05,
1979 OPC_MXU_D32SLLV
= 0x00,
1980 OPC_MXU_D32SLRV
= 0x01,
1981 OPC_MXU_D32SARV
= 0x03,
1982 OPC_MXU_Q16SLLV
= 0x04,
1983 OPC_MXU_Q16SLRV
= 0x05,
1984 OPC_MXU_Q16SARV
= 0x07,
1991 OPC_MXU_Q8MUL
= 0x00,
1992 OPC_MXU_Q8MULSU
= 0x01,
1999 OPC_MXU_Q8MOVZ
= 0x00,
2000 OPC_MXU_Q8MOVN
= 0x01,
2001 OPC_MXU_D16MOVZ
= 0x02,
2002 OPC_MXU_D16MOVN
= 0x03,
2003 OPC_MXU_S32MOVZ
= 0x04,
2004 OPC_MXU_S32MOVN
= 0x05,
2011 OPC_MXU_Q8MAC
= 0x00,
2012 OPC_MXU_Q8MACSU
= 0x01,
2016 * Overview of the TX79-specific instruction set
2017 * =============================================
2019 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2020 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2021 * instructions and certain multimedia instructions (MMIs). These MMIs
2022 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2023 * or sixteen 8-bit paths.
2027 * The Toshiba TX System RISC TX79 Core Architecture manual,
2028 * https://wiki.qemu.org/File:C790.pdf
2030 * Three-Operand Multiply and Multiply-Add (4 instructions)
2031 * --------------------------------------------------------
2032 * MADD [rd,] rs, rt Multiply/Add
2033 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2034 * MULT [rd,] rs, rt Multiply (3-operand)
2035 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2037 * Multiply Instructions for Pipeline 1 (10 instructions)
2038 * ------------------------------------------------------
2039 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2040 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2041 * DIV1 rs, rt Divide Pipeline 1
2042 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2043 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2044 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2045 * MFHI1 rd Move From HI1 Register
2046 * MFLO1 rd Move From LO1 Register
2047 * MTHI1 rs Move To HI1 Register
2048 * MTLO1 rs Move To LO1 Register
2050 * Arithmetic (19 instructions)
2051 * ----------------------------
2052 * PADDB rd, rs, rt Parallel Add Byte
2053 * PSUBB rd, rs, rt Parallel Subtract Byte
2054 * PADDH rd, rs, rt Parallel Add Halfword
2055 * PSUBH rd, rs, rt Parallel Subtract Halfword
2056 * PADDW rd, rs, rt Parallel Add Word
2057 * PSUBW rd, rs, rt Parallel Subtract Word
2058 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2059 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2060 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2061 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2062 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2063 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2064 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2065 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2066 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2067 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2068 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2069 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2070 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2072 * Min/Max (4 instructions)
2073 * ------------------------
2074 * PMAXH rd, rs, rt Parallel Maximum Halfword
2075 * PMINH rd, rs, rt Parallel Minimum Halfword
2076 * PMAXW rd, rs, rt Parallel Maximum Word
2077 * PMINW rd, rs, rt Parallel Minimum Word
2079 * Absolute (2 instructions)
2080 * -------------------------
2081 * PABSH rd, rt Parallel Absolute Halfword
2082 * PABSW rd, rt Parallel Absolute Word
2084 * Logical (4 instructions)
2085 * ------------------------
2086 * PAND rd, rs, rt Parallel AND
2087 * POR rd, rs, rt Parallel OR
2088 * PXOR rd, rs, rt Parallel XOR
2089 * PNOR rd, rs, rt Parallel NOR
2091 * Shift (9 instructions)
2092 * ----------------------
2093 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2094 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2095 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2096 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2097 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2098 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2099 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2100 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2101 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2103 * Compare (6 instructions)
2104 * ------------------------
2105 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2106 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2107 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2108 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2109 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2110 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2112 * LZC (1 instruction)
2113 * -------------------
2114 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2116 * Quadword Load and Store (2 instructions)
2117 * ----------------------------------------
2118 * LQ rt, offset(base) Load Quadword
2119 * SQ rt, offset(base) Store Quadword
2121 * Multiply and Divide (19 instructions)
2122 * -------------------------------------
2123 * PMULTW rd, rs, rt Parallel Multiply Word
2124 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2125 * PDIVW rs, rt Parallel Divide Word
2126 * PDIVUW rs, rt Parallel Divide Unsigned Word
2127 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2128 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2129 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2130 * PMULTH rd, rs, rt Parallel Multiply Halfword
2131 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2132 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2133 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2134 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2135 * PDIVBW rs, rt Parallel Divide Broadcast Word
2136 * PMFHI rd Parallel Move From HI Register
2137 * PMFLO rd Parallel Move From LO Register
2138 * PMTHI rs Parallel Move To HI Register
2139 * PMTLO rs Parallel Move To LO Register
2140 * PMFHL rd Parallel Move From HI/LO Register
2141 * PMTHL rs Parallel Move To HI/LO Register
2143 * Pack/Extend (11 instructions)
2144 * -----------------------------
2145 * PPAC5 rd, rt Parallel Pack to 5 bits
2146 * PPACB rd, rs, rt Parallel Pack to Byte
2147 * PPACH rd, rs, rt Parallel Pack to Halfword
2148 * PPACW rd, rs, rt Parallel Pack to Word
2149 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2150 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2151 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2152 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2153 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2154 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2155 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2157 * Others (16 instructions)
2158 * ------------------------
2159 * PCPYH rd, rt Parallel Copy Halfword
2160 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2161 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2162 * PREVH rd, rt Parallel Reverse Halfword
2163 * PINTH rd, rs, rt Parallel Interleave Halfword
2164 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2165 * PEXEH rd, rt Parallel Exchange Even Halfword
2166 * PEXCH rd, rt Parallel Exchange Center Halfword
2167 * PEXEW rd, rt Parallel Exchange Even Word
2168 * PEXCW rd, rt Parallel Exchange Center Word
2169 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2170 * MFSA rd Move from Shift Amount Register
2171 * MTSA rs Move to Shift Amount Register
2172 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2173 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2174 * PROT3W rd, rt Parallel Rotate 3 Words
2176 * MMI (MultiMedia Instruction) encodings
2177 * ======================================
2179 * MMI instructions encoding table keys:
2181 * * This code is reserved for future use. An attempt to execute it
2182 * causes a Reserved Instruction exception.
2183 * % This code indicates an instruction class. The instruction word
2184 * must be further decoded by examining additional tables that show
2185 * the values for other instruction fields.
2186 * # This code is reserved for the unsupported instructions DMULT,
2187 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2188 * to execute it causes a Reserved Instruction exception.
2190 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2193 * +--------+----------------------------------------+
2195 * +--------+----------------------------------------+
2197 * opcode bits 28..26
2198 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2199 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2200 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2201 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2202 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2203 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2204 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2205 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2206 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2207 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2208 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2212 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2213 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2214 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2218 * MMI instructions with opcode field = MMI:
2221 * +--------+-------------------------------+--------+
2222 * | MMI | |function|
2223 * +--------+-------------------------------+--------+
2225 * function bits 2..0
2226 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2227 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2228 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2229 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2230 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2231 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2232 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2233 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2234 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2235 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2236 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2239 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2241 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2242 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2243 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2246 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2247 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2248 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2249 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2250 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2251 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2252 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2253 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2254 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2265 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2269 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2272 * +--------+----------------------+--------+--------+
2273 * | MMI | |function| MMI0 |
2274 * +--------+----------------------+--------+--------+
2276 * function bits 7..6
2277 * bits | 0 | 1 | 2 | 3
2278 * 10..8 | 00 | 01 | 10 | 11
2279 * -------+-------+-------+-------+-------
2280 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2281 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2282 * 2 010 | PADDB | PSUBB | PCGTB | *
2283 * 3 011 | * | * | * | *
2284 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2285 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2286 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2287 * 7 111 | * | * | PEXT5 | PPAC5
2290 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2292 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2316 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2320 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2323 * +--------+----------------------+--------+--------+
2324 * | MMI | |function| MMI1 |
2325 * +--------+----------------------+--------+--------+
2327 * function bits 7..6
2328 * bits | 0 | 1 | 2 | 3
2329 * 10..8 | 00 | 01 | 10 | 11
2330 * -------+-------+-------+-------+-------
2331 * 0 000 | * | PABSW | PCEQW | PMINW
2332 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2333 * 2 010 | * | * | PCEQB | *
2334 * 3 011 | * | * | * | *
2335 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2336 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2337 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2338 * 7 111 | * | * | * | *
2341 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2343 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2360 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2364 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2367 * +--------+----------------------+--------+--------+
2368 * | MMI | |function| MMI2 |
2369 * +--------+----------------------+--------+--------+
2371 * function bits 7..6
2372 * bits | 0 | 1 | 2 | 3
2373 * 10..8 | 00 | 01 | 10 | 11
2374 * -------+-------+-------+-------+-------
2375 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2376 * 1 001 | PMSUBW| * | * | *
2377 * 2 010 | PMFHI | PMFLO | PINTH | *
2378 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2379 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2380 * 5 101 | PMSUBH| PHMSBH| * | *
2381 * 6 110 | * | * | PEXEH | PREVH
2382 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2385 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2387 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2408 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2412 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2415 * +--------+----------------------+--------+--------+
2416 * | MMI | |function| MMI3 |
2417 * +--------+----------------------+--------+--------+
2419 * function bits 7..6
2420 * bits | 0 | 1 | 2 | 3
2421 * 10..8 | 00 | 01 | 10 | 11
2422 * -------+-------+-------+-------+-------
2423 * 0 000 |PMADDUW| * | * | PSRAVW
2424 * 1 001 | * | * | * | *
2425 * 2 010 | PMTHI | PMTLO | PINTEH| *
2426 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2427 * 4 100 | * | * | POR | PNOR
2428 * 5 101 | * | * | * | *
2429 * 6 110 | * | * | PEXCH | PCPYH
2430 * 7 111 | * | * | PEXCW | *
2433 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2435 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2447 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2450 /* global register indices */
2451 static TCGv cpu_gpr
[32], cpu_PC
;
2452 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2453 static TCGv cpu_dspctrl
, btarget
, bcond
;
2454 static TCGv cpu_lladdr
, cpu_llval
;
2455 static TCGv_i32 hflags
;
2456 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2457 static TCGv_i64 fpu_f64
[32];
2458 static TCGv_i64 msa_wr_d
[64];
2460 #if defined(TARGET_MIPS64)
2461 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2462 static TCGv_i64 cpu_mmr
[32];
2465 #if !defined(TARGET_MIPS64)
2467 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2471 #include "exec/gen-icount.h"
2473 #define gen_helper_0e0i(name, arg) do { \
2474 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2475 gen_helper_##name(cpu_env, helper_tmp); \
2476 tcg_temp_free_i32(helper_tmp); \
2479 #define gen_helper_0e1i(name, arg1, arg2) do { \
2480 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2481 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2482 tcg_temp_free_i32(helper_tmp); \
2485 #define gen_helper_1e0i(name, ret, arg1) do { \
2486 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2487 gen_helper_##name(ret, cpu_env, helper_tmp); \
2488 tcg_temp_free_i32(helper_tmp); \
2491 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2492 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2493 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2494 tcg_temp_free_i32(helper_tmp); \
2497 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2498 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2499 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2500 tcg_temp_free_i32(helper_tmp); \
2503 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2504 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2505 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2506 tcg_temp_free_i32(helper_tmp); \
2509 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2510 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2511 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2512 tcg_temp_free_i32(helper_tmp); \
2515 typedef struct DisasContext
{
2516 DisasContextBase base
;
2517 target_ulong saved_pc
;
2518 target_ulong page_start
;
2520 uint64_t insn_flags
;
2521 int32_t CP0_Config1
;
2522 int32_t CP0_Config2
;
2523 int32_t CP0_Config3
;
2524 int32_t CP0_Config5
;
2525 /* Routine used to access memory */
2527 TCGMemOp default_tcg_memop_mask
;
2528 uint32_t hflags
, saved_hflags
;
2529 target_ulong btarget
;
2540 int CP0_LLAddr_shift
;
2550 #define DISAS_STOP DISAS_TARGET_0
2551 #define DISAS_EXIT DISAS_TARGET_1
2553 static const char * const regnames
[] = {
2554 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2555 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2556 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2557 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2560 static const char * const regnames_HI
[] = {
2561 "HI0", "HI1", "HI2", "HI3",
2564 static const char * const regnames_LO
[] = {
2565 "LO0", "LO1", "LO2", "LO3",
2568 static const char * const fregnames
[] = {
2569 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2570 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2571 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2572 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2575 static const char * const msaregnames
[] = {
2576 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2577 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2578 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2579 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2580 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2581 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2582 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2583 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2584 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2585 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2586 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2587 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2588 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2589 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2590 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2591 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2594 #if !defined(TARGET_MIPS64)
2595 static const char * const mxuregnames
[] = {
2596 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2597 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2601 #define LOG_DISAS(...) \
2603 if (MIPS_DEBUG_DISAS) { \
2604 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2608 #define MIPS_INVAL(op) \
2610 if (MIPS_DEBUG_DISAS) { \
2611 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2612 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2613 ctx->base.pc_next, ctx->opcode, op, \
2614 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2615 ((ctx->opcode >> 16) & 0x1F)); \
2619 /* General purpose registers moves. */
2620 static inline void gen_load_gpr (TCGv t
, int reg
)
2623 tcg_gen_movi_tl(t
, 0);
2625 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2628 static inline void gen_store_gpr (TCGv t
, int reg
)
2631 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2634 /* Moves to/from shadow registers. */
2635 static inline void gen_load_srsgpr (int from
, int to
)
2637 TCGv t0
= tcg_temp_new();
2640 tcg_gen_movi_tl(t0
, 0);
2642 TCGv_i32 t2
= tcg_temp_new_i32();
2643 TCGv_ptr addr
= tcg_temp_new_ptr();
2645 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2646 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2647 tcg_gen_andi_i32(t2
, t2
, 0xf);
2648 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2649 tcg_gen_ext_i32_ptr(addr
, t2
);
2650 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2652 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2653 tcg_temp_free_ptr(addr
);
2654 tcg_temp_free_i32(t2
);
2656 gen_store_gpr(t0
, to
);
2660 static inline void gen_store_srsgpr (int from
, int to
)
2663 TCGv t0
= tcg_temp_new();
2664 TCGv_i32 t2
= tcg_temp_new_i32();
2665 TCGv_ptr addr
= tcg_temp_new_ptr();
2667 gen_load_gpr(t0
, from
);
2668 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2669 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2670 tcg_gen_andi_i32(t2
, t2
, 0xf);
2671 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2672 tcg_gen_ext_i32_ptr(addr
, t2
);
2673 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2675 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2676 tcg_temp_free_ptr(addr
);
2677 tcg_temp_free_i32(t2
);
2682 #if !defined(TARGET_MIPS64)
2683 /* MXU General purpose registers moves. */
2684 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2687 tcg_gen_movi_tl(t
, 0);
2688 } else if (reg
<= 15) {
2689 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2693 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2695 if (reg
> 0 && reg
<= 15) {
2696 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2700 /* MXU control register moves. */
2701 static inline void gen_load_mxu_cr(TCGv t
)
2703 tcg_gen_mov_tl(t
, mxu_CR
);
2706 static inline void gen_store_mxu_cr(TCGv t
)
2708 /* TODO: Add handling of RW rules for MXU_CR. */
2709 tcg_gen_mov_tl(mxu_CR
, t
);
2715 static inline void gen_save_pc(target_ulong pc
)
2717 tcg_gen_movi_tl(cpu_PC
, pc
);
2720 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2722 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2723 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2724 gen_save_pc(ctx
->base
.pc_next
);
2725 ctx
->saved_pc
= ctx
->base
.pc_next
;
2727 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2728 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2729 ctx
->saved_hflags
= ctx
->hflags
;
2730 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2736 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2742 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2744 ctx
->saved_hflags
= ctx
->hflags
;
2745 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2751 ctx
->btarget
= env
->btarget
;
2756 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2758 TCGv_i32 texcp
= tcg_const_i32(excp
);
2759 TCGv_i32 terr
= tcg_const_i32(err
);
2760 save_cpu_state(ctx
, 1);
2761 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2762 tcg_temp_free_i32(terr
);
2763 tcg_temp_free_i32(texcp
);
2764 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2767 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2769 gen_helper_0e0i(raise_exception
, excp
);
2772 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2774 generate_exception_err(ctx
, excp
, 0);
2777 /* Floating point register moves. */
2778 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2780 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2781 generate_exception(ctx
, EXCP_RI
);
2783 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2786 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2789 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2790 generate_exception(ctx
, EXCP_RI
);
2792 t64
= tcg_temp_new_i64();
2793 tcg_gen_extu_i32_i64(t64
, t
);
2794 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2795 tcg_temp_free_i64(t64
);
2798 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2800 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2801 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2803 gen_load_fpr32(ctx
, t
, reg
| 1);
2807 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2809 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2810 TCGv_i64 t64
= tcg_temp_new_i64();
2811 tcg_gen_extu_i32_i64(t64
, t
);
2812 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2813 tcg_temp_free_i64(t64
);
2815 gen_store_fpr32(ctx
, t
, reg
| 1);
2819 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2821 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2822 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2824 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2828 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2830 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2831 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2834 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2835 t0
= tcg_temp_new_i64();
2836 tcg_gen_shri_i64(t0
, t
, 32);
2837 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2838 tcg_temp_free_i64(t0
);
2842 static inline int get_fp_bit (int cc
)
2850 /* Addresses computation */
2851 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2853 tcg_gen_add_tl(ret
, arg0
, arg1
);
2855 #if defined(TARGET_MIPS64)
2856 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2857 tcg_gen_ext32s_i64(ret
, ret
);
2862 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2865 tcg_gen_addi_tl(ret
, base
, ofs
);
2867 #if defined(TARGET_MIPS64)
2868 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2869 tcg_gen_ext32s_i64(ret
, ret
);
2874 /* Addresses computation (translation time) */
2875 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2878 target_long sum
= base
+ offset
;
2880 #if defined(TARGET_MIPS64)
2881 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2888 /* Sign-extract the low 32-bits to a target_long. */
2889 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2891 #if defined(TARGET_MIPS64)
2892 tcg_gen_ext32s_i64(ret
, arg
);
2894 tcg_gen_extrl_i64_i32(ret
, arg
);
2898 /* Sign-extract the high 32-bits to a target_long. */
2899 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2901 #if defined(TARGET_MIPS64)
2902 tcg_gen_sari_i64(ret
, arg
, 32);
2904 tcg_gen_extrh_i64_i32(ret
, arg
);
2908 static inline void check_cp0_enabled(DisasContext
*ctx
)
2910 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2911 generate_exception_err(ctx
, EXCP_CpU
, 0);
2914 static inline void check_cp1_enabled(DisasContext
*ctx
)
2916 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2917 generate_exception_err(ctx
, EXCP_CpU
, 1);
2920 /* Verify that the processor is running with COP1X instructions enabled.
2921 This is associated with the nabla symbol in the MIPS32 and MIPS64
2924 static inline void check_cop1x(DisasContext
*ctx
)
2926 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2927 generate_exception_end(ctx
, EXCP_RI
);
2930 /* Verify that the processor is running with 64-bit floating-point
2931 operations enabled. */
2933 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2935 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2936 generate_exception_end(ctx
, EXCP_RI
);
2940 * Verify if floating point register is valid; an operation is not defined
2941 * if bit 0 of any register specification is set and the FR bit in the
2942 * Status register equals zero, since the register numbers specify an
2943 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2944 * in the Status register equals one, both even and odd register numbers
2945 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2947 * Multiple 64 bit wide registers can be checked by calling
2948 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2950 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2952 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2953 generate_exception_end(ctx
, EXCP_RI
);
2956 /* Verify that the processor is running with DSP instructions enabled.
2957 This is enabled by CP0 Status register MX(24) bit.
2960 static inline void check_dsp(DisasContext
*ctx
)
2962 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2963 if (ctx
->insn_flags
& ASE_DSP
) {
2964 generate_exception_end(ctx
, EXCP_DSPDIS
);
2966 generate_exception_end(ctx
, EXCP_RI
);
2971 static inline void check_dsp_r2(DisasContext
*ctx
)
2973 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2974 if (ctx
->insn_flags
& ASE_DSP
) {
2975 generate_exception_end(ctx
, EXCP_DSPDIS
);
2977 generate_exception_end(ctx
, EXCP_RI
);
2982 static inline void check_dsp_r3(DisasContext
*ctx
)
2984 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2985 if (ctx
->insn_flags
& ASE_DSP
) {
2986 generate_exception_end(ctx
, EXCP_DSPDIS
);
2988 generate_exception_end(ctx
, EXCP_RI
);
2993 /* This code generates a "reserved instruction" exception if the
2994 CPU does not support the instruction set corresponding to flags. */
2995 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2997 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2998 generate_exception_end(ctx
, EXCP_RI
);
3002 /* This code generates a "reserved instruction" exception if the
3003 CPU has corresponding flag set which indicates that the instruction
3004 has been removed. */
3005 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3007 if (unlikely(ctx
->insn_flags
& flags
)) {
3008 generate_exception_end(ctx
, EXCP_RI
);
3013 * The Linux kernel traps certain reserved instruction exceptions to
3014 * emulate the corresponding instructions. QEMU is the kernel in user
3015 * mode, so those traps are emulated by accepting the instructions.
3017 * A reserved instruction exception is generated for flagged CPUs if
3018 * QEMU runs in system mode.
3020 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3022 #ifndef CONFIG_USER_ONLY
3023 check_insn_opc_removed(ctx
, flags
);
3027 /* This code generates a "reserved instruction" exception if the
3028 CPU does not support 64-bit paired-single (PS) floating point data type */
3029 static inline void check_ps(DisasContext
*ctx
)
3031 if (unlikely(!ctx
->ps
)) {
3032 generate_exception(ctx
, EXCP_RI
);
3034 check_cp1_64bitmode(ctx
);
3037 #ifdef TARGET_MIPS64
3038 /* This code generates a "reserved instruction" exception if 64-bit
3039 instructions are not enabled. */
3040 static inline void check_mips_64(DisasContext
*ctx
)
3042 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
3043 generate_exception_end(ctx
, EXCP_RI
);
3047 #ifndef CONFIG_USER_ONLY
3048 static inline void check_mvh(DisasContext
*ctx
)
3050 if (unlikely(!ctx
->mvh
)) {
3051 generate_exception(ctx
, EXCP_RI
);
3057 * This code generates a "reserved instruction" exception if the
3058 * Config5 XNP bit is set.
3060 static inline void check_xnp(DisasContext
*ctx
)
3062 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3063 generate_exception_end(ctx
, EXCP_RI
);
3067 #ifndef CONFIG_USER_ONLY
3069 * This code generates a "reserved instruction" exception if the
3070 * Config3 PW bit is NOT set.
3072 static inline void check_pw(DisasContext
*ctx
)
3074 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3075 generate_exception_end(ctx
, EXCP_RI
);
3081 * This code generates a "reserved instruction" exception if the
3082 * Config3 MT bit is NOT set.
3084 static inline void check_mt(DisasContext
*ctx
)
3086 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3087 generate_exception_end(ctx
, EXCP_RI
);
3091 #ifndef CONFIG_USER_ONLY
3093 * This code generates a "coprocessor unusable" exception if CP0 is not
3094 * available, and, if that is not the case, generates a "reserved instruction"
3095 * exception if the Config5 MT bit is NOT set. This is needed for availability
3096 * control of some of MT ASE instructions.
3098 static inline void check_cp0_mt(DisasContext
*ctx
)
3100 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3101 generate_exception_err(ctx
, EXCP_CpU
, 0);
3103 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3104 generate_exception_err(ctx
, EXCP_RI
, 0);
3111 * This code generates a "reserved instruction" exception if the
3112 * Config5 NMS bit is set.
3114 static inline void check_nms(DisasContext
*ctx
)
3116 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3117 generate_exception_end(ctx
, EXCP_RI
);
3122 * This code generates a "reserved instruction" exception if the
3123 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3124 * Config2 TL, and Config5 L2C are unset.
3126 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3128 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3129 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3130 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3131 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3132 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3133 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3135 generate_exception_end(ctx
, EXCP_RI
);
3140 * This code generates a "reserved instruction" exception if the
3141 * Config5 EVA bit is NOT set.
3143 static inline void check_eva(DisasContext
*ctx
)
3145 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3146 generate_exception_end(ctx
, EXCP_RI
);
3151 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3152 calling interface for 32 and 64-bit FPRs. No sense in changing
3153 all callers for gen_load_fpr32 when we need the CTX parameter for
3155 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3156 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3157 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3158 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3159 int ft, int fs, int cc) \
3161 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3162 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3171 check_cp1_registers(ctx, fs | ft); \
3179 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3180 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3182 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3183 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3184 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3185 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3186 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3187 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3188 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3189 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3190 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3191 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3192 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3193 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3194 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3195 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3196 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3197 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3200 tcg_temp_free_i##bits (fp0); \
3201 tcg_temp_free_i##bits (fp1); \
3204 FOP_CONDS(, 0, d
, FMT_D
, 64)
3205 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3206 FOP_CONDS(, 0, s
, FMT_S
, 32)
3207 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3208 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3209 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3212 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3213 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3214 int ft, int fs, int fd) \
3216 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3217 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3218 if (ifmt == FMT_D) { \
3219 check_cp1_registers(ctx, fs | ft | fd); \
3221 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3222 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3225 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3228 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3231 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3234 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3237 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3240 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3243 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3246 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3249 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3252 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3255 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3258 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3261 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3264 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3267 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3270 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3273 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3276 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3279 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3282 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3285 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3288 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3294 tcg_temp_free_i ## bits (fp0); \
3295 tcg_temp_free_i ## bits (fp1); \
3298 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3299 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3301 #undef gen_ldcmp_fpr32
3302 #undef gen_ldcmp_fpr64
3304 /* load/store instructions. */
3305 #ifdef CONFIG_USER_ONLY
3306 #define OP_LD_ATOMIC(insn,fname) \
3307 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3308 DisasContext *ctx) \
3310 TCGv t0 = tcg_temp_new(); \
3311 tcg_gen_mov_tl(t0, arg1); \
3312 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3313 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3314 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3315 tcg_temp_free(t0); \
3318 #define OP_LD_ATOMIC(insn,fname) \
3319 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3320 DisasContext *ctx) \
3322 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3325 OP_LD_ATOMIC(ll
,ld32s
);
3326 #if defined(TARGET_MIPS64)
3327 OP_LD_ATOMIC(lld
,ld64
);
3331 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3332 int base
, int offset
)
3335 tcg_gen_movi_tl(addr
, offset
);
3336 } else if (offset
== 0) {
3337 gen_load_gpr(addr
, base
);
3339 tcg_gen_movi_tl(addr
, offset
);
3340 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3344 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3346 target_ulong pc
= ctx
->base
.pc_next
;
3348 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3349 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3354 pc
&= ~(target_ulong
)3;
3359 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3360 int rt
, int base
, int offset
)
3363 int mem_idx
= ctx
->mem_idx
;
3365 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3366 /* Loongson CPU uses a load to zero register for prefetch.
3367 We emulate it as a NOP. On other CPU we must perform the
3368 actual memory access. */
3372 t0
= tcg_temp_new();
3373 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3376 #if defined(TARGET_MIPS64)
3378 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3379 ctx
->default_tcg_memop_mask
);
3380 gen_store_gpr(t0
, rt
);
3383 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3384 ctx
->default_tcg_memop_mask
);
3385 gen_store_gpr(t0
, rt
);
3389 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3390 gen_store_gpr(t0
, rt
);
3393 t1
= tcg_temp_new();
3394 /* Do a byte access to possibly trigger a page
3395 fault with the unaligned address. */
3396 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3397 tcg_gen_andi_tl(t1
, t0
, 7);
3398 #ifndef TARGET_WORDS_BIGENDIAN
3399 tcg_gen_xori_tl(t1
, t1
, 7);
3401 tcg_gen_shli_tl(t1
, t1
, 3);
3402 tcg_gen_andi_tl(t0
, t0
, ~7);
3403 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3404 tcg_gen_shl_tl(t0
, t0
, t1
);
3405 t2
= tcg_const_tl(-1);
3406 tcg_gen_shl_tl(t2
, t2
, t1
);
3407 gen_load_gpr(t1
, rt
);
3408 tcg_gen_andc_tl(t1
, t1
, t2
);
3410 tcg_gen_or_tl(t0
, t0
, t1
);
3412 gen_store_gpr(t0
, rt
);
3415 t1
= tcg_temp_new();
3416 /* Do a byte access to possibly trigger a page
3417 fault with the unaligned address. */
3418 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3419 tcg_gen_andi_tl(t1
, t0
, 7);
3420 #ifdef TARGET_WORDS_BIGENDIAN
3421 tcg_gen_xori_tl(t1
, t1
, 7);
3423 tcg_gen_shli_tl(t1
, t1
, 3);
3424 tcg_gen_andi_tl(t0
, t0
, ~7);
3425 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3426 tcg_gen_shr_tl(t0
, t0
, t1
);
3427 tcg_gen_xori_tl(t1
, t1
, 63);
3428 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3429 tcg_gen_shl_tl(t2
, t2
, t1
);
3430 gen_load_gpr(t1
, rt
);
3431 tcg_gen_and_tl(t1
, t1
, t2
);
3433 tcg_gen_or_tl(t0
, t0
, t1
);
3435 gen_store_gpr(t0
, rt
);
3438 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3439 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3441 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3442 gen_store_gpr(t0
, rt
);
3446 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3447 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3449 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3450 gen_store_gpr(t0
, rt
);
3453 mem_idx
= MIPS_HFLAG_UM
;
3456 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3457 ctx
->default_tcg_memop_mask
);
3458 gen_store_gpr(t0
, rt
);
3461 mem_idx
= MIPS_HFLAG_UM
;
3464 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3465 ctx
->default_tcg_memop_mask
);
3466 gen_store_gpr(t0
, rt
);
3469 mem_idx
= MIPS_HFLAG_UM
;
3472 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3473 ctx
->default_tcg_memop_mask
);
3474 gen_store_gpr(t0
, rt
);
3477 mem_idx
= MIPS_HFLAG_UM
;
3480 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3481 gen_store_gpr(t0
, rt
);
3484 mem_idx
= MIPS_HFLAG_UM
;
3487 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3488 gen_store_gpr(t0
, rt
);
3491 mem_idx
= MIPS_HFLAG_UM
;
3494 t1
= tcg_temp_new();
3495 /* Do a byte access to possibly trigger a page
3496 fault with the unaligned address. */
3497 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3498 tcg_gen_andi_tl(t1
, t0
, 3);
3499 #ifndef TARGET_WORDS_BIGENDIAN
3500 tcg_gen_xori_tl(t1
, t1
, 3);
3502 tcg_gen_shli_tl(t1
, t1
, 3);
3503 tcg_gen_andi_tl(t0
, t0
, ~3);
3504 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3505 tcg_gen_shl_tl(t0
, t0
, t1
);
3506 t2
= tcg_const_tl(-1);
3507 tcg_gen_shl_tl(t2
, t2
, t1
);
3508 gen_load_gpr(t1
, rt
);
3509 tcg_gen_andc_tl(t1
, t1
, t2
);
3511 tcg_gen_or_tl(t0
, t0
, t1
);
3513 tcg_gen_ext32s_tl(t0
, t0
);
3514 gen_store_gpr(t0
, rt
);
3517 mem_idx
= MIPS_HFLAG_UM
;
3520 t1
= tcg_temp_new();
3521 /* Do a byte access to possibly trigger a page
3522 fault with the unaligned address. */
3523 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3524 tcg_gen_andi_tl(t1
, t0
, 3);
3525 #ifdef TARGET_WORDS_BIGENDIAN
3526 tcg_gen_xori_tl(t1
, t1
, 3);
3528 tcg_gen_shli_tl(t1
, t1
, 3);
3529 tcg_gen_andi_tl(t0
, t0
, ~3);
3530 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3531 tcg_gen_shr_tl(t0
, t0
, t1
);
3532 tcg_gen_xori_tl(t1
, t1
, 31);
3533 t2
= tcg_const_tl(0xfffffffeull
);
3534 tcg_gen_shl_tl(t2
, t2
, t1
);
3535 gen_load_gpr(t1
, rt
);
3536 tcg_gen_and_tl(t1
, t1
, t2
);
3538 tcg_gen_or_tl(t0
, t0
, t1
);
3540 tcg_gen_ext32s_tl(t0
, t0
);
3541 gen_store_gpr(t0
, rt
);
3544 mem_idx
= MIPS_HFLAG_UM
;
3548 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3549 gen_store_gpr(t0
, rt
);
3555 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3556 uint32_t reg1
, uint32_t reg2
)
3558 TCGv taddr
= tcg_temp_new();
3559 TCGv_i64 tval
= tcg_temp_new_i64();
3560 TCGv tmp1
= tcg_temp_new();
3561 TCGv tmp2
= tcg_temp_new();
3563 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3564 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3565 #ifdef TARGET_WORDS_BIGENDIAN
3566 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3568 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3570 gen_store_gpr(tmp1
, reg1
);
3571 tcg_temp_free(tmp1
);
3572 gen_store_gpr(tmp2
, reg2
);
3573 tcg_temp_free(tmp2
);
3574 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3575 tcg_temp_free_i64(tval
);
3576 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3577 tcg_temp_free(taddr
);
3581 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3582 int base
, int offset
)
3584 TCGv t0
= tcg_temp_new();
3585 TCGv t1
= tcg_temp_new();
3586 int mem_idx
= ctx
->mem_idx
;
3588 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3589 gen_load_gpr(t1
, rt
);
3591 #if defined(TARGET_MIPS64)
3593 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3594 ctx
->default_tcg_memop_mask
);
3597 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3600 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3604 mem_idx
= MIPS_HFLAG_UM
;
3607 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3608 ctx
->default_tcg_memop_mask
);
3611 mem_idx
= MIPS_HFLAG_UM
;
3614 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3615 ctx
->default_tcg_memop_mask
);
3618 mem_idx
= MIPS_HFLAG_UM
;
3621 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3624 mem_idx
= MIPS_HFLAG_UM
;
3627 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3630 mem_idx
= MIPS_HFLAG_UM
;
3633 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3641 /* Store conditional */
3642 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3643 TCGMemOp tcg_mo
, bool eva
)
3646 TCGLabel
*l1
= gen_new_label();
3647 TCGLabel
*done
= gen_new_label();
3649 t0
= tcg_temp_new();
3650 addr
= tcg_temp_new();
3651 /* compare the address against that of the preceeding LL */
3652 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3653 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3654 tcg_temp_free(addr
);
3655 tcg_gen_movi_tl(t0
, 0);
3656 gen_store_gpr(t0
, rt
);
3660 /* generate cmpxchg */
3661 val
= tcg_temp_new();
3662 gen_load_gpr(val
, rt
);
3663 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3664 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3665 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3666 gen_store_gpr(t0
, rt
);
3669 gen_set_label(done
);
3674 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3675 uint32_t reg1
, uint32_t reg2
, bool eva
)
3677 TCGv taddr
= tcg_temp_local_new();
3678 TCGv lladdr
= tcg_temp_local_new();
3679 TCGv_i64 tval
= tcg_temp_new_i64();
3680 TCGv_i64 llval
= tcg_temp_new_i64();
3681 TCGv_i64 val
= tcg_temp_new_i64();
3682 TCGv tmp1
= tcg_temp_new();
3683 TCGv tmp2
= tcg_temp_new();
3684 TCGLabel
*lab_fail
= gen_new_label();
3685 TCGLabel
*lab_done
= gen_new_label();
3687 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3689 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3690 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3692 gen_load_gpr(tmp1
, reg1
);
3693 gen_load_gpr(tmp2
, reg2
);
3695 #ifdef TARGET_WORDS_BIGENDIAN
3696 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3698 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3701 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3702 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3703 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3705 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3707 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3709 gen_set_label(lab_fail
);
3712 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3714 gen_set_label(lab_done
);
3715 tcg_gen_movi_tl(lladdr
, -1);
3716 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3719 /* Load and store */
3720 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3723 /* Don't do NOP if destination is zero: we must perform the actual
3728 TCGv_i32 fp0
= tcg_temp_new_i32();
3729 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3730 ctx
->default_tcg_memop_mask
);
3731 gen_store_fpr32(ctx
, fp0
, ft
);
3732 tcg_temp_free_i32(fp0
);
3737 TCGv_i32 fp0
= tcg_temp_new_i32();
3738 gen_load_fpr32(ctx
, fp0
, ft
);
3739 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3740 ctx
->default_tcg_memop_mask
);
3741 tcg_temp_free_i32(fp0
);
3746 TCGv_i64 fp0
= tcg_temp_new_i64();
3747 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3748 ctx
->default_tcg_memop_mask
);
3749 gen_store_fpr64(ctx
, fp0
, ft
);
3750 tcg_temp_free_i64(fp0
);
3755 TCGv_i64 fp0
= tcg_temp_new_i64();
3756 gen_load_fpr64(ctx
, fp0
, ft
);
3757 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3758 ctx
->default_tcg_memop_mask
);
3759 tcg_temp_free_i64(fp0
);
3763 MIPS_INVAL("flt_ldst");
3764 generate_exception_end(ctx
, EXCP_RI
);
3769 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3770 int rs
, int16_t imm
)
3772 TCGv t0
= tcg_temp_new();
3774 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3775 check_cp1_enabled(ctx
);
3779 check_insn(ctx
, ISA_MIPS2
);
3782 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3783 gen_flt_ldst(ctx
, op
, rt
, t0
);
3786 generate_exception_err(ctx
, EXCP_CpU
, 1);
3791 /* Arithmetic with immediate operand */
3792 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3793 int rt
, int rs
, int imm
)
3795 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3797 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3798 /* If no destination, treat it as a NOP.
3799 For addi, we must generate the overflow exception when needed. */
3805 TCGv t0
= tcg_temp_local_new();
3806 TCGv t1
= tcg_temp_new();
3807 TCGv t2
= tcg_temp_new();
3808 TCGLabel
*l1
= gen_new_label();
3810 gen_load_gpr(t1
, rs
);
3811 tcg_gen_addi_tl(t0
, t1
, uimm
);
3812 tcg_gen_ext32s_tl(t0
, t0
);
3814 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3815 tcg_gen_xori_tl(t2
, t0
, uimm
);
3816 tcg_gen_and_tl(t1
, t1
, t2
);
3818 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3820 /* operands of same sign, result different sign */
3821 generate_exception(ctx
, EXCP_OVERFLOW
);
3823 tcg_gen_ext32s_tl(t0
, t0
);
3824 gen_store_gpr(t0
, rt
);
3830 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3831 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3833 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3836 #if defined(TARGET_MIPS64)
3839 TCGv t0
= tcg_temp_local_new();
3840 TCGv t1
= tcg_temp_new();
3841 TCGv t2
= tcg_temp_new();
3842 TCGLabel
*l1
= gen_new_label();
3844 gen_load_gpr(t1
, rs
);
3845 tcg_gen_addi_tl(t0
, t1
, uimm
);
3847 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3848 tcg_gen_xori_tl(t2
, t0
, uimm
);
3849 tcg_gen_and_tl(t1
, t1
, t2
);
3851 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3853 /* operands of same sign, result different sign */
3854 generate_exception(ctx
, EXCP_OVERFLOW
);
3856 gen_store_gpr(t0
, rt
);
3862 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3864 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3871 /* Logic with immediate operand */
3872 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3873 int rt
, int rs
, int16_t imm
)
3878 /* If no destination, treat it as a NOP. */
3881 uimm
= (uint16_t)imm
;
3884 if (likely(rs
!= 0))
3885 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3887 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3891 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3893 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3896 if (likely(rs
!= 0))
3897 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3899 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3902 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3904 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3905 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3907 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3916 /* Set on less than with immediate operand */
3917 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3918 int rt
, int rs
, int16_t imm
)
3920 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3924 /* If no destination, treat it as a NOP. */
3927 t0
= tcg_temp_new();
3928 gen_load_gpr(t0
, rs
);
3931 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3934 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3940 /* Shifts with immediate operand */
3941 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3942 int rt
, int rs
, int16_t imm
)
3944 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3948 /* If no destination, treat it as a NOP. */
3952 t0
= tcg_temp_new();
3953 gen_load_gpr(t0
, rs
);
3956 tcg_gen_shli_tl(t0
, t0
, uimm
);
3957 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3960 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3964 tcg_gen_ext32u_tl(t0
, t0
);
3965 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3967 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3972 TCGv_i32 t1
= tcg_temp_new_i32();
3974 tcg_gen_trunc_tl_i32(t1
, t0
);
3975 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3976 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3977 tcg_temp_free_i32(t1
);
3979 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3982 #if defined(TARGET_MIPS64)
3984 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3987 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3990 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3994 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3996 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4000 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4003 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4006 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4009 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4017 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4018 int rd
, int rs
, int rt
)
4020 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4021 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4022 /* If no destination, treat it as a NOP.
4023 For add & sub, we must generate the overflow exception when needed. */
4030 TCGv t0
= tcg_temp_local_new();
4031 TCGv t1
= tcg_temp_new();
4032 TCGv t2
= tcg_temp_new();
4033 TCGLabel
*l1
= gen_new_label();
4035 gen_load_gpr(t1
, rs
);
4036 gen_load_gpr(t2
, rt
);
4037 tcg_gen_add_tl(t0
, t1
, t2
);
4038 tcg_gen_ext32s_tl(t0
, t0
);
4039 tcg_gen_xor_tl(t1
, t1
, t2
);
4040 tcg_gen_xor_tl(t2
, t0
, t2
);
4041 tcg_gen_andc_tl(t1
, t2
, t1
);
4043 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4045 /* operands of same sign, result different sign */
4046 generate_exception(ctx
, EXCP_OVERFLOW
);
4048 gen_store_gpr(t0
, rd
);
4053 if (rs
!= 0 && rt
!= 0) {
4054 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4055 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4056 } else if (rs
== 0 && rt
!= 0) {
4057 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4058 } else if (rs
!= 0 && rt
== 0) {
4059 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4061 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4066 TCGv t0
= tcg_temp_local_new();
4067 TCGv t1
= tcg_temp_new();
4068 TCGv t2
= tcg_temp_new();
4069 TCGLabel
*l1
= gen_new_label();
4071 gen_load_gpr(t1
, rs
);
4072 gen_load_gpr(t2
, rt
);
4073 tcg_gen_sub_tl(t0
, t1
, t2
);
4074 tcg_gen_ext32s_tl(t0
, t0
);
4075 tcg_gen_xor_tl(t2
, t1
, t2
);
4076 tcg_gen_xor_tl(t1
, t0
, t1
);
4077 tcg_gen_and_tl(t1
, t1
, t2
);
4079 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4081 /* operands of different sign, first operand and result different sign */
4082 generate_exception(ctx
, EXCP_OVERFLOW
);
4084 gen_store_gpr(t0
, rd
);
4089 if (rs
!= 0 && rt
!= 0) {
4090 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4091 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4092 } else if (rs
== 0 && rt
!= 0) {
4093 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4094 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4095 } else if (rs
!= 0 && rt
== 0) {
4096 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4098 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4101 #if defined(TARGET_MIPS64)
4104 TCGv t0
= tcg_temp_local_new();
4105 TCGv t1
= tcg_temp_new();
4106 TCGv t2
= tcg_temp_new();
4107 TCGLabel
*l1
= gen_new_label();
4109 gen_load_gpr(t1
, rs
);
4110 gen_load_gpr(t2
, rt
);
4111 tcg_gen_add_tl(t0
, t1
, t2
);
4112 tcg_gen_xor_tl(t1
, t1
, t2
);
4113 tcg_gen_xor_tl(t2
, t0
, t2
);
4114 tcg_gen_andc_tl(t1
, t2
, t1
);
4116 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4118 /* operands of same sign, result different sign */
4119 generate_exception(ctx
, EXCP_OVERFLOW
);
4121 gen_store_gpr(t0
, rd
);
4126 if (rs
!= 0 && rt
!= 0) {
4127 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4128 } else if (rs
== 0 && rt
!= 0) {
4129 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4130 } else if (rs
!= 0 && rt
== 0) {
4131 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4133 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4138 TCGv t0
= tcg_temp_local_new();
4139 TCGv t1
= tcg_temp_new();
4140 TCGv t2
= tcg_temp_new();
4141 TCGLabel
*l1
= gen_new_label();
4143 gen_load_gpr(t1
, rs
);
4144 gen_load_gpr(t2
, rt
);
4145 tcg_gen_sub_tl(t0
, t1
, t2
);
4146 tcg_gen_xor_tl(t2
, t1
, t2
);
4147 tcg_gen_xor_tl(t1
, t0
, t1
);
4148 tcg_gen_and_tl(t1
, t1
, t2
);
4150 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4152 /* operands of different sign, first operand and result different sign */
4153 generate_exception(ctx
, EXCP_OVERFLOW
);
4155 gen_store_gpr(t0
, rd
);
4160 if (rs
!= 0 && rt
!= 0) {
4161 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4162 } else if (rs
== 0 && rt
!= 0) {
4163 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4164 } else if (rs
!= 0 && rt
== 0) {
4165 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4167 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4172 if (likely(rs
!= 0 && rt
!= 0)) {
4173 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4174 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4176 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4182 /* Conditional move */
4183 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4184 int rd
, int rs
, int rt
)
4189 /* If no destination, treat it as a NOP. */
4193 t0
= tcg_temp_new();
4194 gen_load_gpr(t0
, rt
);
4195 t1
= tcg_const_tl(0);
4196 t2
= tcg_temp_new();
4197 gen_load_gpr(t2
, rs
);
4200 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4203 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4206 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4209 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4218 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4219 int rd
, int rs
, int rt
)
4222 /* If no destination, treat it as a NOP. */
4228 if (likely(rs
!= 0 && rt
!= 0)) {
4229 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4231 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4235 if (rs
!= 0 && rt
!= 0) {
4236 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4237 } else if (rs
== 0 && rt
!= 0) {
4238 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4239 } else if (rs
!= 0 && rt
== 0) {
4240 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4242 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4246 if (likely(rs
!= 0 && rt
!= 0)) {
4247 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4248 } else if (rs
== 0 && rt
!= 0) {
4249 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4250 } else if (rs
!= 0 && rt
== 0) {
4251 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4253 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4257 if (likely(rs
!= 0 && rt
!= 0)) {
4258 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4259 } else if (rs
== 0 && rt
!= 0) {
4260 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4261 } else if (rs
!= 0 && rt
== 0) {
4262 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4264 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4270 /* Set on lower than */
4271 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4272 int rd
, int rs
, int rt
)
4277 /* If no destination, treat it as a NOP. */
4281 t0
= tcg_temp_new();
4282 t1
= tcg_temp_new();
4283 gen_load_gpr(t0
, rs
);
4284 gen_load_gpr(t1
, rt
);
4287 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4290 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4298 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4299 int rd
, int rs
, int rt
)
4304 /* If no destination, treat it as a NOP.
4305 For add & sub, we must generate the overflow exception when needed. */
4309 t0
= tcg_temp_new();
4310 t1
= tcg_temp_new();
4311 gen_load_gpr(t0
, rs
);
4312 gen_load_gpr(t1
, rt
);
4315 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4316 tcg_gen_shl_tl(t0
, t1
, t0
);
4317 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4320 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4321 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4324 tcg_gen_ext32u_tl(t1
, t1
);
4325 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4326 tcg_gen_shr_tl(t0
, t1
, t0
);
4327 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4331 TCGv_i32 t2
= tcg_temp_new_i32();
4332 TCGv_i32 t3
= tcg_temp_new_i32();
4334 tcg_gen_trunc_tl_i32(t2
, t0
);
4335 tcg_gen_trunc_tl_i32(t3
, t1
);
4336 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4337 tcg_gen_rotr_i32(t2
, t3
, t2
);
4338 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4339 tcg_temp_free_i32(t2
);
4340 tcg_temp_free_i32(t3
);
4343 #if defined(TARGET_MIPS64)
4345 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4346 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4349 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4350 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4353 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4354 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4357 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4358 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4366 #if defined(TARGET_MIPS64)
4367 /* Copy GPR to and from TX79 HI1/LO1 register. */
4368 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4370 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4377 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4380 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4384 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4386 tcg_gen_movi_tl(cpu_HI
[1], 0);
4391 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4393 tcg_gen_movi_tl(cpu_LO
[1], 0);
4397 MIPS_INVAL("mfthilo1 TX79");
4398 generate_exception_end(ctx
, EXCP_RI
);
4404 /* Arithmetic on HI/LO registers */
4405 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4407 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4418 #if defined(TARGET_MIPS64)
4420 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4424 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4428 #if defined(TARGET_MIPS64)
4430 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4434 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4439 #if defined(TARGET_MIPS64)
4441 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4445 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4448 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4453 #if defined(TARGET_MIPS64)
4455 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4459 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4462 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4468 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4471 TCGv t0
= tcg_const_tl(addr
);
4472 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4473 gen_store_gpr(t0
, reg
);
4477 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4483 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4486 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4487 addr
= addr_add(ctx
, pc
, offset
);
4488 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4492 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4493 addr
= addr_add(ctx
, pc
, offset
);
4494 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4496 #if defined(TARGET_MIPS64)
4499 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4500 addr
= addr_add(ctx
, pc
, offset
);
4501 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4505 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4508 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4509 addr
= addr_add(ctx
, pc
, offset
);
4510 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4515 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4516 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4517 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4520 #if defined(TARGET_MIPS64)
4521 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4522 case R6_OPC_LDPC
+ (1 << 16):
4523 case R6_OPC_LDPC
+ (2 << 16):
4524 case R6_OPC_LDPC
+ (3 << 16):
4526 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4527 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4528 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4532 MIPS_INVAL("OPC_PCREL");
4533 generate_exception_end(ctx
, EXCP_RI
);
4540 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4549 t0
= tcg_temp_new();
4550 t1
= tcg_temp_new();
4552 gen_load_gpr(t0
, rs
);
4553 gen_load_gpr(t1
, rt
);
4558 TCGv t2
= tcg_temp_new();
4559 TCGv t3
= tcg_temp_new();
4560 tcg_gen_ext32s_tl(t0
, t0
);
4561 tcg_gen_ext32s_tl(t1
, t1
);
4562 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4563 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4564 tcg_gen_and_tl(t2
, t2
, t3
);
4565 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4566 tcg_gen_or_tl(t2
, t2
, t3
);
4567 tcg_gen_movi_tl(t3
, 0);
4568 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4569 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4570 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4577 TCGv t2
= tcg_temp_new();
4578 TCGv t3
= tcg_temp_new();
4579 tcg_gen_ext32s_tl(t0
, t0
);
4580 tcg_gen_ext32s_tl(t1
, t1
);
4581 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4582 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4583 tcg_gen_and_tl(t2
, t2
, t3
);
4584 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4585 tcg_gen_or_tl(t2
, t2
, t3
);
4586 tcg_gen_movi_tl(t3
, 0);
4587 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4588 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4589 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4596 TCGv t2
= tcg_const_tl(0);
4597 TCGv t3
= tcg_const_tl(1);
4598 tcg_gen_ext32u_tl(t0
, t0
);
4599 tcg_gen_ext32u_tl(t1
, t1
);
4600 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4601 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4602 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4609 TCGv t2
= tcg_const_tl(0);
4610 TCGv t3
= tcg_const_tl(1);
4611 tcg_gen_ext32u_tl(t0
, t0
);
4612 tcg_gen_ext32u_tl(t1
, t1
);
4613 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4614 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4615 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4622 TCGv_i32 t2
= tcg_temp_new_i32();
4623 TCGv_i32 t3
= tcg_temp_new_i32();
4624 tcg_gen_trunc_tl_i32(t2
, t0
);
4625 tcg_gen_trunc_tl_i32(t3
, t1
);
4626 tcg_gen_mul_i32(t2
, t2
, t3
);
4627 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4628 tcg_temp_free_i32(t2
);
4629 tcg_temp_free_i32(t3
);
4634 TCGv_i32 t2
= tcg_temp_new_i32();
4635 TCGv_i32 t3
= tcg_temp_new_i32();
4636 tcg_gen_trunc_tl_i32(t2
, t0
);
4637 tcg_gen_trunc_tl_i32(t3
, t1
);
4638 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4639 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4640 tcg_temp_free_i32(t2
);
4641 tcg_temp_free_i32(t3
);
4646 TCGv_i32 t2
= tcg_temp_new_i32();
4647 TCGv_i32 t3
= tcg_temp_new_i32();
4648 tcg_gen_trunc_tl_i32(t2
, t0
);
4649 tcg_gen_trunc_tl_i32(t3
, t1
);
4650 tcg_gen_mul_i32(t2
, t2
, t3
);
4651 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4652 tcg_temp_free_i32(t2
);
4653 tcg_temp_free_i32(t3
);
4658 TCGv_i32 t2
= tcg_temp_new_i32();
4659 TCGv_i32 t3
= tcg_temp_new_i32();
4660 tcg_gen_trunc_tl_i32(t2
, t0
);
4661 tcg_gen_trunc_tl_i32(t3
, t1
);
4662 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4663 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4664 tcg_temp_free_i32(t2
);
4665 tcg_temp_free_i32(t3
);
4668 #if defined(TARGET_MIPS64)
4671 TCGv t2
= tcg_temp_new();
4672 TCGv t3
= tcg_temp_new();
4673 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4674 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4675 tcg_gen_and_tl(t2
, t2
, t3
);
4676 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4677 tcg_gen_or_tl(t2
, t2
, t3
);
4678 tcg_gen_movi_tl(t3
, 0);
4679 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4680 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4687 TCGv t2
= tcg_temp_new();
4688 TCGv t3
= tcg_temp_new();
4689 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4690 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4691 tcg_gen_and_tl(t2
, t2
, t3
);
4692 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4693 tcg_gen_or_tl(t2
, t2
, t3
);
4694 tcg_gen_movi_tl(t3
, 0);
4695 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4696 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4703 TCGv t2
= tcg_const_tl(0);
4704 TCGv t3
= tcg_const_tl(1);
4705 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4706 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4713 TCGv t2
= tcg_const_tl(0);
4714 TCGv t3
= tcg_const_tl(1);
4715 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4716 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4722 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4726 TCGv t2
= tcg_temp_new();
4727 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4732 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4736 TCGv t2
= tcg_temp_new();
4737 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4743 MIPS_INVAL("r6 mul/div");
4744 generate_exception_end(ctx
, EXCP_RI
);
4752 #if defined(TARGET_MIPS64)
4753 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4757 t0
= tcg_temp_new();
4758 t1
= tcg_temp_new();
4760 gen_load_gpr(t0
, rs
);
4761 gen_load_gpr(t1
, rt
);
4766 TCGv t2
= tcg_temp_new();
4767 TCGv t3
= tcg_temp_new();
4768 tcg_gen_ext32s_tl(t0
, t0
);
4769 tcg_gen_ext32s_tl(t1
, t1
);
4770 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4771 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4772 tcg_gen_and_tl(t2
, t2
, t3
);
4773 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4774 tcg_gen_or_tl(t2
, t2
, t3
);
4775 tcg_gen_movi_tl(t3
, 0);
4776 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4777 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4778 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4779 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4780 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4787 TCGv t2
= tcg_const_tl(0);
4788 TCGv t3
= tcg_const_tl(1);
4789 tcg_gen_ext32u_tl(t0
, t0
);
4790 tcg_gen_ext32u_tl(t1
, t1
);
4791 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4792 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4793 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4794 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4795 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4801 MIPS_INVAL("div1 TX79");
4802 generate_exception_end(ctx
, EXCP_RI
);
4811 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4812 int acc
, int rs
, int rt
)
4816 t0
= tcg_temp_new();
4817 t1
= tcg_temp_new();
4819 gen_load_gpr(t0
, rs
);
4820 gen_load_gpr(t1
, rt
);
4829 TCGv t2
= tcg_temp_new();
4830 TCGv t3
= tcg_temp_new();
4831 tcg_gen_ext32s_tl(t0
, t0
);
4832 tcg_gen_ext32s_tl(t1
, t1
);
4833 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4834 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4835 tcg_gen_and_tl(t2
, t2
, t3
);
4836 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4837 tcg_gen_or_tl(t2
, t2
, t3
);
4838 tcg_gen_movi_tl(t3
, 0);
4839 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4840 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4841 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4842 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4843 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4850 TCGv t2
= tcg_const_tl(0);
4851 TCGv t3
= tcg_const_tl(1);
4852 tcg_gen_ext32u_tl(t0
, t0
);
4853 tcg_gen_ext32u_tl(t1
, t1
);
4854 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4855 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4856 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4857 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4858 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4865 TCGv_i32 t2
= tcg_temp_new_i32();
4866 TCGv_i32 t3
= tcg_temp_new_i32();
4867 tcg_gen_trunc_tl_i32(t2
, t0
);
4868 tcg_gen_trunc_tl_i32(t3
, t1
);
4869 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4870 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4871 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4872 tcg_temp_free_i32(t2
);
4873 tcg_temp_free_i32(t3
);
4878 TCGv_i32 t2
= tcg_temp_new_i32();
4879 TCGv_i32 t3
= tcg_temp_new_i32();
4880 tcg_gen_trunc_tl_i32(t2
, t0
);
4881 tcg_gen_trunc_tl_i32(t3
, t1
);
4882 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4883 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4884 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4885 tcg_temp_free_i32(t2
);
4886 tcg_temp_free_i32(t3
);
4889 #if defined(TARGET_MIPS64)
4892 TCGv t2
= tcg_temp_new();
4893 TCGv t3
= tcg_temp_new();
4894 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4895 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4896 tcg_gen_and_tl(t2
, t2
, t3
);
4897 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4898 tcg_gen_or_tl(t2
, t2
, t3
);
4899 tcg_gen_movi_tl(t3
, 0);
4900 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4901 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4902 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4909 TCGv t2
= tcg_const_tl(0);
4910 TCGv t3
= tcg_const_tl(1);
4911 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4912 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4913 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4919 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4922 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4927 TCGv_i64 t2
= tcg_temp_new_i64();
4928 TCGv_i64 t3
= tcg_temp_new_i64();
4930 tcg_gen_ext_tl_i64(t2
, t0
);
4931 tcg_gen_ext_tl_i64(t3
, t1
);
4932 tcg_gen_mul_i64(t2
, t2
, t3
);
4933 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4934 tcg_gen_add_i64(t2
, t2
, t3
);
4935 tcg_temp_free_i64(t3
);
4936 gen_move_low32(cpu_LO
[acc
], t2
);
4937 gen_move_high32(cpu_HI
[acc
], t2
);
4938 tcg_temp_free_i64(t2
);
4943 TCGv_i64 t2
= tcg_temp_new_i64();
4944 TCGv_i64 t3
= tcg_temp_new_i64();
4946 tcg_gen_ext32u_tl(t0
, t0
);
4947 tcg_gen_ext32u_tl(t1
, t1
);
4948 tcg_gen_extu_tl_i64(t2
, t0
);
4949 tcg_gen_extu_tl_i64(t3
, t1
);
4950 tcg_gen_mul_i64(t2
, t2
, t3
);
4951 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4952 tcg_gen_add_i64(t2
, t2
, t3
);
4953 tcg_temp_free_i64(t3
);
4954 gen_move_low32(cpu_LO
[acc
], t2
);
4955 gen_move_high32(cpu_HI
[acc
], t2
);
4956 tcg_temp_free_i64(t2
);
4961 TCGv_i64 t2
= tcg_temp_new_i64();
4962 TCGv_i64 t3
= tcg_temp_new_i64();
4964 tcg_gen_ext_tl_i64(t2
, t0
);
4965 tcg_gen_ext_tl_i64(t3
, t1
);
4966 tcg_gen_mul_i64(t2
, t2
, t3
);
4967 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4968 tcg_gen_sub_i64(t2
, t3
, t2
);
4969 tcg_temp_free_i64(t3
);
4970 gen_move_low32(cpu_LO
[acc
], t2
);
4971 gen_move_high32(cpu_HI
[acc
], t2
);
4972 tcg_temp_free_i64(t2
);
4977 TCGv_i64 t2
= tcg_temp_new_i64();
4978 TCGv_i64 t3
= tcg_temp_new_i64();
4980 tcg_gen_ext32u_tl(t0
, t0
);
4981 tcg_gen_ext32u_tl(t1
, t1
);
4982 tcg_gen_extu_tl_i64(t2
, t0
);
4983 tcg_gen_extu_tl_i64(t3
, t1
);
4984 tcg_gen_mul_i64(t2
, t2
, t3
);
4985 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4986 tcg_gen_sub_i64(t2
, t3
, t2
);
4987 tcg_temp_free_i64(t3
);
4988 gen_move_low32(cpu_LO
[acc
], t2
);
4989 gen_move_high32(cpu_HI
[acc
], t2
);
4990 tcg_temp_free_i64(t2
);
4994 MIPS_INVAL("mul/div");
4995 generate_exception_end(ctx
, EXCP_RI
);
5004 * These MULT[U] and MADD[U] instructions implemented in for example
5005 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5006 * architectures are special three-operand variants with the syntax
5008 * MULT[U][1] rd, rs, rt
5012 * (rd, LO, HI) <- rs * rt
5016 * MADD[U][1] rd, rs, rt
5020 * (rd, LO, HI) <- (LO, HI) + rs * rt
5022 * where the low-order 32-bits of the result is placed into both the
5023 * GPR rd and the special register LO. The high-order 32-bits of the
5024 * result is placed into the special register HI.
5026 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5027 * which is the zero register that always reads as 0.
5029 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5030 int rd
, int rs
, int rt
)
5032 TCGv t0
= tcg_temp_new();
5033 TCGv t1
= tcg_temp_new();
5036 gen_load_gpr(t0
, rs
);
5037 gen_load_gpr(t1
, rt
);
5045 TCGv_i32 t2
= tcg_temp_new_i32();
5046 TCGv_i32 t3
= tcg_temp_new_i32();
5047 tcg_gen_trunc_tl_i32(t2
, t0
);
5048 tcg_gen_trunc_tl_i32(t3
, t1
);
5049 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5051 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5053 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5054 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5055 tcg_temp_free_i32(t2
);
5056 tcg_temp_free_i32(t3
);
5059 case MMI_OPC_MULTU1
:
5064 TCGv_i32 t2
= tcg_temp_new_i32();
5065 TCGv_i32 t3
= tcg_temp_new_i32();
5066 tcg_gen_trunc_tl_i32(t2
, t0
);
5067 tcg_gen_trunc_tl_i32(t3
, t1
);
5068 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5070 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5072 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5073 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5074 tcg_temp_free_i32(t2
);
5075 tcg_temp_free_i32(t3
);
5083 TCGv_i64 t2
= tcg_temp_new_i64();
5084 TCGv_i64 t3
= tcg_temp_new_i64();
5086 tcg_gen_ext_tl_i64(t2
, t0
);
5087 tcg_gen_ext_tl_i64(t3
, t1
);
5088 tcg_gen_mul_i64(t2
, t2
, t3
);
5089 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5090 tcg_gen_add_i64(t2
, t2
, t3
);
5091 tcg_temp_free_i64(t3
);
5092 gen_move_low32(cpu_LO
[acc
], t2
);
5093 gen_move_high32(cpu_HI
[acc
], t2
);
5095 gen_move_low32(cpu_gpr
[rd
], t2
);
5097 tcg_temp_free_i64(t2
);
5100 case MMI_OPC_MADDU1
:
5105 TCGv_i64 t2
= tcg_temp_new_i64();
5106 TCGv_i64 t3
= tcg_temp_new_i64();
5108 tcg_gen_ext32u_tl(t0
, t0
);
5109 tcg_gen_ext32u_tl(t1
, t1
);
5110 tcg_gen_extu_tl_i64(t2
, t0
);
5111 tcg_gen_extu_tl_i64(t3
, t1
);
5112 tcg_gen_mul_i64(t2
, t2
, t3
);
5113 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5114 tcg_gen_add_i64(t2
, t2
, t3
);
5115 tcg_temp_free_i64(t3
);
5116 gen_move_low32(cpu_LO
[acc
], t2
);
5117 gen_move_high32(cpu_HI
[acc
], t2
);
5119 gen_move_low32(cpu_gpr
[rd
], t2
);
5121 tcg_temp_free_i64(t2
);
5125 MIPS_INVAL("mul/madd TXx9");
5126 generate_exception_end(ctx
, EXCP_RI
);
5135 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
5136 int rd
, int rs
, int rt
)
5138 TCGv t0
= tcg_temp_new();
5139 TCGv t1
= tcg_temp_new();
5141 gen_load_gpr(t0
, rs
);
5142 gen_load_gpr(t1
, rt
);
5145 case OPC_VR54XX_MULS
:
5146 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5148 case OPC_VR54XX_MULSU
:
5149 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5151 case OPC_VR54XX_MACC
:
5152 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5154 case OPC_VR54XX_MACCU
:
5155 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5157 case OPC_VR54XX_MSAC
:
5158 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5160 case OPC_VR54XX_MSACU
:
5161 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5163 case OPC_VR54XX_MULHI
:
5164 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5166 case OPC_VR54XX_MULHIU
:
5167 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5169 case OPC_VR54XX_MULSHI
:
5170 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5172 case OPC_VR54XX_MULSHIU
:
5173 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5175 case OPC_VR54XX_MACCHI
:
5176 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5178 case OPC_VR54XX_MACCHIU
:
5179 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5181 case OPC_VR54XX_MSACHI
:
5182 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5184 case OPC_VR54XX_MSACHIU
:
5185 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5188 MIPS_INVAL("mul vr54xx");
5189 generate_exception_end(ctx
, EXCP_RI
);
5192 gen_store_gpr(t0
, rd
);
5199 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
5209 gen_load_gpr(t0
, rs
);
5214 #if defined(TARGET_MIPS64)
5218 tcg_gen_not_tl(t0
, t0
);
5227 tcg_gen_ext32u_tl(t0
, t0
);
5228 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5229 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5231 #if defined(TARGET_MIPS64)
5236 tcg_gen_clzi_i64(t0
, t0
, 64);
5242 /* Godson integer instructions */
5243 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5244 int rd
, int rs
, int rt
)
5256 case OPC_MULTU_G_2E
:
5257 case OPC_MULTU_G_2F
:
5258 #if defined(TARGET_MIPS64)
5259 case OPC_DMULT_G_2E
:
5260 case OPC_DMULT_G_2F
:
5261 case OPC_DMULTU_G_2E
:
5262 case OPC_DMULTU_G_2F
:
5264 t0
= tcg_temp_new();
5265 t1
= tcg_temp_new();
5268 t0
= tcg_temp_local_new();
5269 t1
= tcg_temp_local_new();
5273 gen_load_gpr(t0
, rs
);
5274 gen_load_gpr(t1
, rt
);
5279 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5280 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5282 case OPC_MULTU_G_2E
:
5283 case OPC_MULTU_G_2F
:
5284 tcg_gen_ext32u_tl(t0
, t0
);
5285 tcg_gen_ext32u_tl(t1
, t1
);
5286 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5287 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5292 TCGLabel
*l1
= gen_new_label();
5293 TCGLabel
*l2
= gen_new_label();
5294 TCGLabel
*l3
= gen_new_label();
5295 tcg_gen_ext32s_tl(t0
, t0
);
5296 tcg_gen_ext32s_tl(t1
, t1
);
5297 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5298 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5301 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5302 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5303 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5306 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5307 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5314 TCGLabel
*l1
= gen_new_label();
5315 TCGLabel
*l2
= gen_new_label();
5316 tcg_gen_ext32u_tl(t0
, t0
);
5317 tcg_gen_ext32u_tl(t1
, t1
);
5318 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5319 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5322 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5323 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5330 TCGLabel
*l1
= gen_new_label();
5331 TCGLabel
*l2
= gen_new_label();
5332 TCGLabel
*l3
= gen_new_label();
5333 tcg_gen_ext32u_tl(t0
, t0
);
5334 tcg_gen_ext32u_tl(t1
, t1
);
5335 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5336 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5337 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5339 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5342 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5343 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5350 TCGLabel
*l1
= gen_new_label();
5351 TCGLabel
*l2
= gen_new_label();
5352 tcg_gen_ext32u_tl(t0
, t0
);
5353 tcg_gen_ext32u_tl(t1
, t1
);
5354 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5355 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5358 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5359 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5363 #if defined(TARGET_MIPS64)
5364 case OPC_DMULT_G_2E
:
5365 case OPC_DMULT_G_2F
:
5366 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5368 case OPC_DMULTU_G_2E
:
5369 case OPC_DMULTU_G_2F
:
5370 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5375 TCGLabel
*l1
= gen_new_label();
5376 TCGLabel
*l2
= gen_new_label();
5377 TCGLabel
*l3
= gen_new_label();
5378 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5379 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5382 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5383 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5384 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5387 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5391 case OPC_DDIVU_G_2E
:
5392 case OPC_DDIVU_G_2F
:
5394 TCGLabel
*l1
= gen_new_label();
5395 TCGLabel
*l2
= gen_new_label();
5396 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5397 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5400 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5407 TCGLabel
*l1
= gen_new_label();
5408 TCGLabel
*l2
= gen_new_label();
5409 TCGLabel
*l3
= gen_new_label();
5410 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5411 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5412 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5414 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5417 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5421 case OPC_DMODU_G_2E
:
5422 case OPC_DMODU_G_2F
:
5424 TCGLabel
*l1
= gen_new_label();
5425 TCGLabel
*l2
= gen_new_label();
5426 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5427 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5430 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5441 /* Loongson multimedia instructions */
5442 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5444 uint32_t opc
, shift_max
;
5447 opc
= MASK_LMI(ctx
->opcode
);
5453 t0
= tcg_temp_local_new_i64();
5454 t1
= tcg_temp_local_new_i64();
5457 t0
= tcg_temp_new_i64();
5458 t1
= tcg_temp_new_i64();
5462 check_cp1_enabled(ctx
);
5463 gen_load_fpr64(ctx
, t0
, rs
);
5464 gen_load_fpr64(ctx
, t1
, rt
);
5466 #define LMI_HELPER(UP, LO) \
5467 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5468 #define LMI_HELPER_1(UP, LO) \
5469 case OPC_##UP: gen_helper_##LO(t0, t0); break
5470 #define LMI_DIRECT(UP, LO, OP) \
5471 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5474 LMI_HELPER(PADDSH
, paddsh
);
5475 LMI_HELPER(PADDUSH
, paddush
);
5476 LMI_HELPER(PADDH
, paddh
);
5477 LMI_HELPER(PADDW
, paddw
);
5478 LMI_HELPER(PADDSB
, paddsb
);
5479 LMI_HELPER(PADDUSB
, paddusb
);
5480 LMI_HELPER(PADDB
, paddb
);
5482 LMI_HELPER(PSUBSH
, psubsh
);
5483 LMI_HELPER(PSUBUSH
, psubush
);
5484 LMI_HELPER(PSUBH
, psubh
);
5485 LMI_HELPER(PSUBW
, psubw
);
5486 LMI_HELPER(PSUBSB
, psubsb
);
5487 LMI_HELPER(PSUBUSB
, psubusb
);
5488 LMI_HELPER(PSUBB
, psubb
);
5490 LMI_HELPER(PSHUFH
, pshufh
);
5491 LMI_HELPER(PACKSSWH
, packsswh
);
5492 LMI_HELPER(PACKSSHB
, packsshb
);
5493 LMI_HELPER(PACKUSHB
, packushb
);
5495 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5496 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5497 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5498 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5499 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5500 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5502 LMI_HELPER(PAVGH
, pavgh
);
5503 LMI_HELPER(PAVGB
, pavgb
);
5504 LMI_HELPER(PMAXSH
, pmaxsh
);
5505 LMI_HELPER(PMINSH
, pminsh
);
5506 LMI_HELPER(PMAXUB
, pmaxub
);
5507 LMI_HELPER(PMINUB
, pminub
);
5509 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5510 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5511 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5512 LMI_HELPER(PCMPGTH
, pcmpgth
);
5513 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5514 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5516 LMI_HELPER(PSLLW
, psllw
);
5517 LMI_HELPER(PSLLH
, psllh
);
5518 LMI_HELPER(PSRLW
, psrlw
);
5519 LMI_HELPER(PSRLH
, psrlh
);
5520 LMI_HELPER(PSRAW
, psraw
);
5521 LMI_HELPER(PSRAH
, psrah
);
5523 LMI_HELPER(PMULLH
, pmullh
);
5524 LMI_HELPER(PMULHH
, pmulhh
);
5525 LMI_HELPER(PMULHUH
, pmulhuh
);
5526 LMI_HELPER(PMADDHW
, pmaddhw
);
5528 LMI_HELPER(PASUBUB
, pasubub
);
5529 LMI_HELPER_1(BIADD
, biadd
);
5530 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5532 LMI_DIRECT(PADDD
, paddd
, add
);
5533 LMI_DIRECT(PSUBD
, psubd
, sub
);
5534 LMI_DIRECT(XOR_CP2
, xor, xor);
5535 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5536 LMI_DIRECT(AND_CP2
, and, and);
5537 LMI_DIRECT(OR_CP2
, or, or);
5540 tcg_gen_andc_i64(t0
, t1
, t0
);
5544 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5547 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5550 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5553 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5557 tcg_gen_andi_i64(t1
, t1
, 3);
5558 tcg_gen_shli_i64(t1
, t1
, 4);
5559 tcg_gen_shr_i64(t0
, t0
, t1
);
5560 tcg_gen_ext16u_i64(t0
, t0
);
5564 tcg_gen_add_i64(t0
, t0
, t1
);
5565 tcg_gen_ext32s_i64(t0
, t0
);
5568 tcg_gen_sub_i64(t0
, t0
, t1
);
5569 tcg_gen_ext32s_i64(t0
, t0
);
5591 /* Make sure shift count isn't TCG undefined behaviour. */
5592 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5597 tcg_gen_shl_i64(t0
, t0
, t1
);
5601 /* Since SRA is UndefinedResult without sign-extended inputs,
5602 we can treat SRA and DSRA the same. */
5603 tcg_gen_sar_i64(t0
, t0
, t1
);
5606 /* We want to shift in zeros for SRL; zero-extend first. */
5607 tcg_gen_ext32u_i64(t0
, t0
);
5610 tcg_gen_shr_i64(t0
, t0
, t1
);
5614 if (shift_max
== 32) {
5615 tcg_gen_ext32s_i64(t0
, t0
);
5618 /* Shifts larger than MAX produce zero. */
5619 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5620 tcg_gen_neg_i64(t1
, t1
);
5621 tcg_gen_and_i64(t0
, t0
, t1
);
5627 TCGv_i64 t2
= tcg_temp_new_i64();
5628 TCGLabel
*lab
= gen_new_label();
5630 tcg_gen_mov_i64(t2
, t0
);
5631 tcg_gen_add_i64(t0
, t1
, t2
);
5632 if (opc
== OPC_ADD_CP2
) {
5633 tcg_gen_ext32s_i64(t0
, t0
);
5635 tcg_gen_xor_i64(t1
, t1
, t2
);
5636 tcg_gen_xor_i64(t2
, t2
, t0
);
5637 tcg_gen_andc_i64(t1
, t2
, t1
);
5638 tcg_temp_free_i64(t2
);
5639 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5640 generate_exception(ctx
, EXCP_OVERFLOW
);
5648 TCGv_i64 t2
= tcg_temp_new_i64();
5649 TCGLabel
*lab
= gen_new_label();
5651 tcg_gen_mov_i64(t2
, t0
);
5652 tcg_gen_sub_i64(t0
, t1
, t2
);
5653 if (opc
== OPC_SUB_CP2
) {
5654 tcg_gen_ext32s_i64(t0
, t0
);
5656 tcg_gen_xor_i64(t1
, t1
, t2
);
5657 tcg_gen_xor_i64(t2
, t2
, t0
);
5658 tcg_gen_and_i64(t1
, t1
, t2
);
5659 tcg_temp_free_i64(t2
);
5660 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5661 generate_exception(ctx
, EXCP_OVERFLOW
);
5667 tcg_gen_ext32u_i64(t0
, t0
);
5668 tcg_gen_ext32u_i64(t1
, t1
);
5669 tcg_gen_mul_i64(t0
, t0
, t1
);
5678 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5679 FD field is the CC field? */
5681 MIPS_INVAL("loongson_cp2");
5682 generate_exception_end(ctx
, EXCP_RI
);
5689 gen_store_fpr64(ctx
, t0
, rd
);
5691 tcg_temp_free_i64(t0
);
5692 tcg_temp_free_i64(t1
);
5696 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5697 int rs
, int rt
, int16_t imm
)
5700 TCGv t0
= tcg_temp_new();
5701 TCGv t1
= tcg_temp_new();
5704 /* Load needed operands */
5712 /* Compare two registers */
5714 gen_load_gpr(t0
, rs
);
5715 gen_load_gpr(t1
, rt
);
5725 /* Compare register to immediate */
5726 if (rs
!= 0 || imm
!= 0) {
5727 gen_load_gpr(t0
, rs
);
5728 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5735 case OPC_TEQ
: /* rs == rs */
5736 case OPC_TEQI
: /* r0 == 0 */
5737 case OPC_TGE
: /* rs >= rs */
5738 case OPC_TGEI
: /* r0 >= 0 */
5739 case OPC_TGEU
: /* rs >= rs unsigned */
5740 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5742 generate_exception_end(ctx
, EXCP_TRAP
);
5744 case OPC_TLT
: /* rs < rs */
5745 case OPC_TLTI
: /* r0 < 0 */
5746 case OPC_TLTU
: /* rs < rs unsigned */
5747 case OPC_TLTIU
: /* r0 < 0 unsigned */
5748 case OPC_TNE
: /* rs != rs */
5749 case OPC_TNEI
: /* r0 != 0 */
5750 /* Never trap: treat as NOP. */
5754 TCGLabel
*l1
= gen_new_label();
5759 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5763 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5767 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5771 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5775 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5779 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5782 generate_exception(ctx
, EXCP_TRAP
);
5789 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5791 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5795 #ifndef CONFIG_USER_ONLY
5796 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5802 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5804 if (use_goto_tb(ctx
, dest
)) {
5807 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5810 if (ctx
->base
.singlestep_enabled
) {
5811 save_cpu_state(ctx
, 0);
5812 gen_helper_raise_exception_debug(cpu_env
);
5814 tcg_gen_lookup_and_goto_ptr();
5818 /* Branches (before delay slot) */
5819 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5821 int rs
, int rt
, int32_t offset
,
5824 target_ulong btgt
= -1;
5826 int bcond_compute
= 0;
5827 TCGv t0
= tcg_temp_new();
5828 TCGv t1
= tcg_temp_new();
5830 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5831 #ifdef MIPS_DEBUG_DISAS
5832 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5833 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5835 generate_exception_end(ctx
, EXCP_RI
);
5839 /* Load needed operands */
5845 /* Compare two registers */
5847 gen_load_gpr(t0
, rs
);
5848 gen_load_gpr(t1
, rt
);
5851 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5865 /* Compare to zero */
5867 gen_load_gpr(t0
, rs
);
5870 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5873 #if defined(TARGET_MIPS64)
5875 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5877 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5880 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5885 /* Jump to immediate */
5886 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5891 /* Jump to register */
5892 if (offset
!= 0 && offset
!= 16) {
5893 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5894 others are reserved. */
5895 MIPS_INVAL("jump hint");
5896 generate_exception_end(ctx
, EXCP_RI
);
5899 gen_load_gpr(btarget
, rs
);
5902 MIPS_INVAL("branch/jump");
5903 generate_exception_end(ctx
, EXCP_RI
);
5906 if (bcond_compute
== 0) {
5907 /* No condition to be computed */
5909 case OPC_BEQ
: /* rx == rx */
5910 case OPC_BEQL
: /* rx == rx likely */
5911 case OPC_BGEZ
: /* 0 >= 0 */
5912 case OPC_BGEZL
: /* 0 >= 0 likely */
5913 case OPC_BLEZ
: /* 0 <= 0 */
5914 case OPC_BLEZL
: /* 0 <= 0 likely */
5916 ctx
->hflags
|= MIPS_HFLAG_B
;
5918 case OPC_BGEZAL
: /* 0 >= 0 */
5919 case OPC_BGEZALL
: /* 0 >= 0 likely */
5920 /* Always take and link */
5922 ctx
->hflags
|= MIPS_HFLAG_B
;
5924 case OPC_BNE
: /* rx != rx */
5925 case OPC_BGTZ
: /* 0 > 0 */
5926 case OPC_BLTZ
: /* 0 < 0 */
5929 case OPC_BLTZAL
: /* 0 < 0 */
5930 /* Handle as an unconditional branch to get correct delay
5933 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5934 ctx
->hflags
|= MIPS_HFLAG_B
;
5936 case OPC_BLTZALL
: /* 0 < 0 likely */
5937 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5938 /* Skip the instruction in the delay slot */
5939 ctx
->base
.pc_next
+= 4;
5941 case OPC_BNEL
: /* rx != rx likely */
5942 case OPC_BGTZL
: /* 0 > 0 likely */
5943 case OPC_BLTZL
: /* 0 < 0 likely */
5944 /* Skip the instruction in the delay slot */
5945 ctx
->base
.pc_next
+= 4;
5948 ctx
->hflags
|= MIPS_HFLAG_B
;
5951 ctx
->hflags
|= MIPS_HFLAG_BX
;
5955 ctx
->hflags
|= MIPS_HFLAG_B
;
5958 ctx
->hflags
|= MIPS_HFLAG_BR
;
5962 ctx
->hflags
|= MIPS_HFLAG_BR
;
5965 MIPS_INVAL("branch/jump");
5966 generate_exception_end(ctx
, EXCP_RI
);
5972 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5975 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5978 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5981 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5984 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5987 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5990 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5994 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5998 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6001 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6004 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6007 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6010 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6013 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6016 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6018 #if defined(TARGET_MIPS64)
6020 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6024 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6027 ctx
->hflags
|= MIPS_HFLAG_BC
;
6030 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6033 ctx
->hflags
|= MIPS_HFLAG_BL
;
6036 MIPS_INVAL("conditional branch/jump");
6037 generate_exception_end(ctx
, EXCP_RI
);
6042 ctx
->btarget
= btgt
;
6044 switch (delayslot_size
) {
6046 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6049 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6054 int post_delay
= insn_bytes
+ delayslot_size
;
6055 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6057 tcg_gen_movi_tl(cpu_gpr
[blink
],
6058 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6062 if (insn_bytes
== 2)
6063 ctx
->hflags
|= MIPS_HFLAG_B16
;
6069 /* nanoMIPS Branches */
6070 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6072 int rs
, int rt
, int32_t offset
)
6074 target_ulong btgt
= -1;
6075 int bcond_compute
= 0;
6076 TCGv t0
= tcg_temp_new();
6077 TCGv t1
= tcg_temp_new();
6079 /* Load needed operands */
6083 /* Compare two registers */
6085 gen_load_gpr(t0
, rs
);
6086 gen_load_gpr(t1
, rt
);
6089 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6092 /* Compare to zero */
6094 gen_load_gpr(t0
, rs
);
6097 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6100 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6102 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6106 /* Jump to register */
6107 if (offset
!= 0 && offset
!= 16) {
6108 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6109 others are reserved. */
6110 MIPS_INVAL("jump hint");
6111 generate_exception_end(ctx
, EXCP_RI
);
6114 gen_load_gpr(btarget
, rs
);
6117 MIPS_INVAL("branch/jump");
6118 generate_exception_end(ctx
, EXCP_RI
);
6121 if (bcond_compute
== 0) {
6122 /* No condition to be computed */
6124 case OPC_BEQ
: /* rx == rx */
6126 ctx
->hflags
|= MIPS_HFLAG_B
;
6128 case OPC_BGEZAL
: /* 0 >= 0 */
6129 /* Always take and link */
6130 tcg_gen_movi_tl(cpu_gpr
[31],
6131 ctx
->base
.pc_next
+ insn_bytes
);
6132 ctx
->hflags
|= MIPS_HFLAG_B
;
6134 case OPC_BNE
: /* rx != rx */
6135 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6136 /* Skip the instruction in the delay slot */
6137 ctx
->base
.pc_next
+= 4;
6140 ctx
->hflags
|= MIPS_HFLAG_BR
;
6144 tcg_gen_movi_tl(cpu_gpr
[rt
],
6145 ctx
->base
.pc_next
+ insn_bytes
);
6147 ctx
->hflags
|= MIPS_HFLAG_BR
;
6150 MIPS_INVAL("branch/jump");
6151 generate_exception_end(ctx
, EXCP_RI
);
6157 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6160 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6163 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6164 tcg_gen_movi_tl(cpu_gpr
[31],
6165 ctx
->base
.pc_next
+ insn_bytes
);
6168 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6170 ctx
->hflags
|= MIPS_HFLAG_BC
;
6173 MIPS_INVAL("conditional branch/jump");
6174 generate_exception_end(ctx
, EXCP_RI
);
6179 ctx
->btarget
= btgt
;
6182 if (insn_bytes
== 2) {
6183 ctx
->hflags
|= MIPS_HFLAG_B16
;
6190 /* special3 bitfield operations */
6191 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
6192 int rs
, int lsb
, int msb
)
6194 TCGv t0
= tcg_temp_new();
6195 TCGv t1
= tcg_temp_new();
6197 gen_load_gpr(t1
, rs
);
6200 if (lsb
+ msb
> 31) {
6204 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6206 /* The two checks together imply that lsb == 0,
6207 so this is a simple sign-extension. */
6208 tcg_gen_ext32s_tl(t0
, t1
);
6211 #if defined(TARGET_MIPS64)
6220 if (lsb
+ msb
> 63) {
6223 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6230 gen_load_gpr(t0
, rt
);
6231 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6232 tcg_gen_ext32s_tl(t0
, t0
);
6234 #if defined(TARGET_MIPS64)
6245 gen_load_gpr(t0
, rt
);
6246 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6251 MIPS_INVAL("bitops");
6252 generate_exception_end(ctx
, EXCP_RI
);
6257 gen_store_gpr(t0
, rt
);
6262 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6267 /* If no destination, treat it as a NOP. */
6271 t0
= tcg_temp_new();
6272 gen_load_gpr(t0
, rt
);
6276 TCGv t1
= tcg_temp_new();
6277 TCGv t2
= tcg_const_tl(0x00FF00FF);
6279 tcg_gen_shri_tl(t1
, t0
, 8);
6280 tcg_gen_and_tl(t1
, t1
, t2
);
6281 tcg_gen_and_tl(t0
, t0
, t2
);
6282 tcg_gen_shli_tl(t0
, t0
, 8);
6283 tcg_gen_or_tl(t0
, t0
, t1
);
6286 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6290 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6293 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6295 #if defined(TARGET_MIPS64)
6298 TCGv t1
= tcg_temp_new();
6299 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6301 tcg_gen_shri_tl(t1
, t0
, 8);
6302 tcg_gen_and_tl(t1
, t1
, t2
);
6303 tcg_gen_and_tl(t0
, t0
, t2
);
6304 tcg_gen_shli_tl(t0
, t0
, 8);
6305 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6312 TCGv t1
= tcg_temp_new();
6313 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6315 tcg_gen_shri_tl(t1
, t0
, 16);
6316 tcg_gen_and_tl(t1
, t1
, t2
);
6317 tcg_gen_and_tl(t0
, t0
, t2
);
6318 tcg_gen_shli_tl(t0
, t0
, 16);
6319 tcg_gen_or_tl(t0
, t0
, t1
);
6320 tcg_gen_shri_tl(t1
, t0
, 32);
6321 tcg_gen_shli_tl(t0
, t0
, 32);
6322 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6329 MIPS_INVAL("bsfhl");
6330 generate_exception_end(ctx
, EXCP_RI
);
6337 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6346 t0
= tcg_temp_new();
6347 t1
= tcg_temp_new();
6348 gen_load_gpr(t0
, rs
);
6349 gen_load_gpr(t1
, rt
);
6350 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6351 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6352 if (opc
== OPC_LSA
) {
6353 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6362 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6370 t0
= tcg_temp_new();
6371 if (bits
== 0 || bits
== wordsz
) {
6373 gen_load_gpr(t0
, rt
);
6375 gen_load_gpr(t0
, rs
);
6379 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6381 #if defined(TARGET_MIPS64)
6383 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6388 TCGv t1
= tcg_temp_new();
6389 gen_load_gpr(t0
, rt
);
6390 gen_load_gpr(t1
, rs
);
6394 TCGv_i64 t2
= tcg_temp_new_i64();
6395 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6396 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6397 gen_move_low32(cpu_gpr
[rd
], t2
);
6398 tcg_temp_free_i64(t2
);
6401 #if defined(TARGET_MIPS64)
6403 tcg_gen_shli_tl(t0
, t0
, bits
);
6404 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6405 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6415 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6418 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6421 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6424 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6427 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6434 t0
= tcg_temp_new();
6435 gen_load_gpr(t0
, rt
);
6438 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6440 #if defined(TARGET_MIPS64)
6442 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6449 #ifndef CONFIG_USER_ONLY
6450 /* CP0 (MMU and control) */
6451 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6453 TCGv_i64 t0
= tcg_temp_new_i64();
6454 TCGv_i64 t1
= tcg_temp_new_i64();
6456 tcg_gen_ext_tl_i64(t0
, arg
);
6457 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6458 #if defined(TARGET_MIPS64)
6459 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6461 tcg_gen_concat32_i64(t1
, t1
, t0
);
6463 tcg_gen_st_i64(t1
, cpu_env
, off
);
6464 tcg_temp_free_i64(t1
);
6465 tcg_temp_free_i64(t0
);
6468 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6470 TCGv_i64 t0
= tcg_temp_new_i64();
6471 TCGv_i64 t1
= tcg_temp_new_i64();
6473 tcg_gen_ext_tl_i64(t0
, arg
);
6474 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6475 tcg_gen_concat32_i64(t1
, t1
, t0
);
6476 tcg_gen_st_i64(t1
, cpu_env
, off
);
6477 tcg_temp_free_i64(t1
);
6478 tcg_temp_free_i64(t0
);
6481 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6483 TCGv_i64 t0
= tcg_temp_new_i64();
6485 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6486 #if defined(TARGET_MIPS64)
6487 tcg_gen_shri_i64(t0
, t0
, 30);
6489 tcg_gen_shri_i64(t0
, t0
, 32);
6491 gen_move_low32(arg
, t0
);
6492 tcg_temp_free_i64(t0
);
6495 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6497 TCGv_i64 t0
= tcg_temp_new_i64();
6499 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6500 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6501 gen_move_low32(arg
, t0
);
6502 tcg_temp_free_i64(t0
);
6505 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6507 TCGv_i32 t0
= tcg_temp_new_i32();
6509 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6510 tcg_gen_ext_i32_tl(arg
, t0
);
6511 tcg_temp_free_i32(t0
);
6514 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6516 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6517 tcg_gen_ext32s_tl(arg
, arg
);
6520 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6522 TCGv_i32 t0
= tcg_temp_new_i32();
6524 tcg_gen_trunc_tl_i32(t0
, arg
);
6525 tcg_gen_st_i32(t0
, cpu_env
, off
);
6526 tcg_temp_free_i32(t0
);
6529 #define CP0_CHECK(c) \
6532 goto cp0_unimplemented; \
6536 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6538 const char *register_name
= "invalid";
6541 case CP0_REGISTER_02
:
6544 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6545 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6546 register_name
= "EntryLo0";
6549 goto cp0_unimplemented
;
6552 case CP0_REGISTER_03
:
6555 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6556 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6557 register_name
= "EntryLo1";
6560 goto cp0_unimplemented
;
6563 case CP0_REGISTER_09
:
6566 CP0_CHECK(ctx
->saar
);
6567 gen_helper_mfhc0_saar(arg
, cpu_env
);
6568 register_name
= "SAAR";
6571 goto cp0_unimplemented
;
6574 case CP0_REGISTER_17
:
6577 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6578 ctx
->CP0_LLAddr_shift
);
6579 register_name
= "LLAddr";
6582 CP0_CHECK(ctx
->mrp
);
6583 gen_helper_mfhc0_maar(arg
, cpu_env
);
6584 register_name
= "MAAR";
6587 goto cp0_unimplemented
;
6590 case CP0_REGISTER_28
:
6596 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6597 register_name
= "TagLo";
6600 goto cp0_unimplemented
;
6604 goto cp0_unimplemented
;
6606 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6610 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6611 register_name
, reg
, sel
);
6612 tcg_gen_movi_tl(arg
, 0);
6615 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6617 const char *register_name
= "invalid";
6618 uint64_t mask
= ctx
->PAMask
>> 36;
6621 case CP0_REGISTER_02
:
6624 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6625 tcg_gen_andi_tl(arg
, arg
, mask
);
6626 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6627 register_name
= "EntryLo0";
6630 goto cp0_unimplemented
;
6633 case CP0_REGISTER_03
:
6636 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6637 tcg_gen_andi_tl(arg
, arg
, mask
);
6638 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6639 register_name
= "EntryLo1";
6642 goto cp0_unimplemented
;
6645 case CP0_REGISTER_09
:
6648 CP0_CHECK(ctx
->saar
);
6649 gen_helper_mthc0_saar(cpu_env
, arg
);
6650 register_name
= "SAAR";
6653 goto cp0_unimplemented
;
6655 case CP0_REGISTER_17
:
6658 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6659 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6660 relevant for modern MIPS cores supporting MTHC0, therefore
6661 treating MTHC0 to LLAddr as NOP. */
6662 register_name
= "LLAddr";
6665 CP0_CHECK(ctx
->mrp
);
6666 gen_helper_mthc0_maar(cpu_env
, arg
);
6667 register_name
= "MAAR";
6670 goto cp0_unimplemented
;
6673 case CP0_REGISTER_28
:
6679 tcg_gen_andi_tl(arg
, arg
, mask
);
6680 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6681 register_name
= "TagLo";
6684 goto cp0_unimplemented
;
6688 goto cp0_unimplemented
;
6690 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6693 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6694 register_name
, reg
, sel
);
6697 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6699 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6700 tcg_gen_movi_tl(arg
, 0);
6702 tcg_gen_movi_tl(arg
, ~0);
6706 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6708 const char *register_name
= "invalid";
6711 check_insn(ctx
, ISA_MIPS32
);
6714 case CP0_REGISTER_00
:
6717 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6718 register_name
= "Index";
6721 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6722 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6723 register_name
= "MVPControl";
6726 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6727 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6728 register_name
= "MVPConf0";
6731 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6732 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6733 register_name
= "MVPConf1";
6737 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6738 register_name
= "VPControl";
6741 goto cp0_unimplemented
;
6744 case CP0_REGISTER_01
:
6747 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6748 gen_helper_mfc0_random(arg
, cpu_env
);
6749 register_name
= "Random";
6752 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6754 register_name
= "VPEControl";
6757 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6758 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6759 register_name
= "VPEConf0";
6762 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6763 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6764 register_name
= "VPEConf1";
6767 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6768 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6769 register_name
= "YQMask";
6772 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6773 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6774 register_name
= "VPESchedule";
6777 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6778 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6779 register_name
= "VPEScheFBack";
6782 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6783 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6784 register_name
= "VPEOpt";
6787 goto cp0_unimplemented
;
6790 case CP0_REGISTER_02
:
6794 TCGv_i64 tmp
= tcg_temp_new_i64();
6795 tcg_gen_ld_i64(tmp
, cpu_env
,
6796 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6797 #if defined(TARGET_MIPS64)
6799 /* Move RI/XI fields to bits 31:30 */
6800 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6801 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6804 gen_move_low32(arg
, tmp
);
6805 tcg_temp_free_i64(tmp
);
6807 register_name
= "EntryLo0";
6810 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6811 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6812 register_name
= "TCStatus";
6815 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6816 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6817 register_name
= "TCBind";
6820 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6821 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6822 register_name
= "TCRestart";
6825 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6826 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6827 register_name
= "TCHalt";
6830 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6831 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6832 register_name
= "TCContext";
6835 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6836 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6837 register_name
= "TCSchedule";
6840 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6841 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6842 register_name
= "TCScheFBack";
6845 goto cp0_unimplemented
;
6848 case CP0_REGISTER_03
:
6852 TCGv_i64 tmp
= tcg_temp_new_i64();
6853 tcg_gen_ld_i64(tmp
, cpu_env
,
6854 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6855 #if defined(TARGET_MIPS64)
6857 /* Move RI/XI fields to bits 31:30 */
6858 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6859 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6862 gen_move_low32(arg
, tmp
);
6863 tcg_temp_free_i64(tmp
);
6865 register_name
= "EntryLo1";
6869 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6870 register_name
= "GlobalNumber";
6873 goto cp0_unimplemented
;
6876 case CP0_REGISTER_04
:
6879 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6880 tcg_gen_ext32s_tl(arg
, arg
);
6881 register_name
= "Context";
6884 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6885 register_name
= "ContextConfig";
6886 goto cp0_unimplemented
;
6888 CP0_CHECK(ctx
->ulri
);
6889 tcg_gen_ld_tl(arg
, cpu_env
,
6890 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6891 tcg_gen_ext32s_tl(arg
, arg
);
6892 register_name
= "UserLocal";
6895 goto cp0_unimplemented
;
6898 case CP0_REGISTER_05
:
6901 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6902 register_name
= "PageMask";
6905 check_insn(ctx
, ISA_MIPS32R2
);
6906 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6907 register_name
= "PageGrain";
6911 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6912 tcg_gen_ext32s_tl(arg
, arg
);
6913 register_name
= "SegCtl0";
6917 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6918 tcg_gen_ext32s_tl(arg
, arg
);
6919 register_name
= "SegCtl1";
6923 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6924 tcg_gen_ext32s_tl(arg
, arg
);
6925 register_name
= "SegCtl2";
6929 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6930 register_name
= "PWBase";
6934 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6935 register_name
= "PWField";
6939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6940 register_name
= "PWSize";
6943 goto cp0_unimplemented
;
6946 case CP0_REGISTER_06
:
6949 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6950 register_name
= "Wired";
6953 check_insn(ctx
, ISA_MIPS32R2
);
6954 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6955 register_name
= "SRSConf0";
6958 check_insn(ctx
, ISA_MIPS32R2
);
6959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6960 register_name
= "SRSConf1";
6963 check_insn(ctx
, ISA_MIPS32R2
);
6964 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6965 register_name
= "SRSConf2";
6968 check_insn(ctx
, ISA_MIPS32R2
);
6969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6970 register_name
= "SRSConf3";
6973 check_insn(ctx
, ISA_MIPS32R2
);
6974 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6975 register_name
= "SRSConf4";
6979 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6980 register_name
= "PWCtl";
6983 goto cp0_unimplemented
;
6986 case CP0_REGISTER_07
:
6989 check_insn(ctx
, ISA_MIPS32R2
);
6990 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6991 register_name
= "HWREna";
6994 goto cp0_unimplemented
;
6997 case CP0_REGISTER_08
:
7000 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7001 tcg_gen_ext32s_tl(arg
, arg
);
7002 register_name
= "BadVAddr";
7006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7007 register_name
= "BadInstr";
7011 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7012 register_name
= "BadInstrP";
7016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7017 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7018 register_name
= "BadInstrX";
7021 goto cp0_unimplemented
;
7024 case CP0_REGISTER_09
:
7027 /* Mark as an IO operation because we read the time. */
7028 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7031 gen_helper_mfc0_count(arg
, cpu_env
);
7032 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7035 /* Break the TB to be able to take timer interrupts immediately
7036 after reading count. DISAS_STOP isn't sufficient, we need to
7037 ensure we break completely out of translated code. */
7038 gen_save_pc(ctx
->base
.pc_next
+ 4);
7039 ctx
->base
.is_jmp
= DISAS_EXIT
;
7040 register_name
= "Count";
7043 CP0_CHECK(ctx
->saar
);
7044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7045 register_name
= "SAARI";
7048 CP0_CHECK(ctx
->saar
);
7049 gen_helper_mfc0_saar(arg
, cpu_env
);
7050 register_name
= "SAAR";
7053 goto cp0_unimplemented
;
7056 case CP0_REGISTER_10
:
7059 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7060 tcg_gen_ext32s_tl(arg
, arg
);
7061 register_name
= "EntryHi";
7064 goto cp0_unimplemented
;
7067 case CP0_REGISTER_11
:
7070 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7071 register_name
= "Compare";
7073 /* 6,7 are implementation dependent */
7075 goto cp0_unimplemented
;
7078 case CP0_REGISTER_12
:
7081 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7082 register_name
= "Status";
7085 check_insn(ctx
, ISA_MIPS32R2
);
7086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7087 register_name
= "IntCtl";
7090 check_insn(ctx
, ISA_MIPS32R2
);
7091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7092 register_name
= "SRSCtl";
7095 check_insn(ctx
, ISA_MIPS32R2
);
7096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7097 register_name
= "SRSMap";
7100 goto cp0_unimplemented
;
7103 case CP0_REGISTER_13
:
7106 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7107 register_name
= "Cause";
7110 goto cp0_unimplemented
;
7113 case CP0_REGISTER_14
:
7116 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7117 tcg_gen_ext32s_tl(arg
, arg
);
7118 register_name
= "EPC";
7121 goto cp0_unimplemented
;
7124 case CP0_REGISTER_15
:
7127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7128 register_name
= "PRid";
7131 check_insn(ctx
, ISA_MIPS32R2
);
7132 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7133 tcg_gen_ext32s_tl(arg
, arg
);
7134 register_name
= "EBase";
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 CP0_CHECK(ctx
->cmgcr
);
7139 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7140 tcg_gen_ext32s_tl(arg
, arg
);
7141 register_name
= "CMGCRBase";
7144 goto cp0_unimplemented
;
7147 case CP0_REGISTER_16
:
7150 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7151 register_name
= "Config";
7154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7155 register_name
= "Config1";
7158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7159 register_name
= "Config2";
7162 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7163 register_name
= "Config3";
7166 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7167 register_name
= "Config4";
7170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7171 register_name
= "Config5";
7173 /* 6,7 are implementation dependent */
7175 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7176 register_name
= "Config6";
7179 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7180 register_name
= "Config7";
7183 goto cp0_unimplemented
;
7186 case CP0_REGISTER_17
:
7189 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7190 register_name
= "LLAddr";
7193 CP0_CHECK(ctx
->mrp
);
7194 gen_helper_mfc0_maar(arg
, cpu_env
);
7195 register_name
= "MAAR";
7198 CP0_CHECK(ctx
->mrp
);
7199 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7200 register_name
= "MAARI";
7203 goto cp0_unimplemented
;
7206 case CP0_REGISTER_18
:
7216 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7217 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7218 register_name
= "WatchLo";
7221 goto cp0_unimplemented
;
7224 case CP0_REGISTER_19
:
7234 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7235 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7236 register_name
= "WatchHi";
7239 goto cp0_unimplemented
;
7242 case CP0_REGISTER_20
:
7245 #if defined(TARGET_MIPS64)
7246 check_insn(ctx
, ISA_MIPS3
);
7247 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7248 tcg_gen_ext32s_tl(arg
, arg
);
7249 register_name
= "XContext";
7253 goto cp0_unimplemented
;
7256 case CP0_REGISTER_21
:
7257 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7258 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7262 register_name
= "Framemask";
7265 goto cp0_unimplemented
;
7268 case CP0_REGISTER_22
:
7269 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7270 register_name
= "'Diagnostic"; /* implementation dependent */
7272 case CP0_REGISTER_23
:
7275 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7276 register_name
= "Debug";
7279 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7280 register_name
= "TraceControl";
7281 goto cp0_unimplemented
;
7283 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7284 register_name
= "TraceControl2";
7285 goto cp0_unimplemented
;
7287 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7288 register_name
= "UserTraceData";
7289 goto cp0_unimplemented
;
7291 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7292 register_name
= "TraceBPC";
7293 goto cp0_unimplemented
;
7295 goto cp0_unimplemented
;
7298 case CP0_REGISTER_24
:
7302 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7303 tcg_gen_ext32s_tl(arg
, arg
);
7304 register_name
= "DEPC";
7307 goto cp0_unimplemented
;
7310 case CP0_REGISTER_25
:
7313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7314 register_name
= "Performance0";
7317 // gen_helper_mfc0_performance1(arg);
7318 register_name
= "Performance1";
7319 goto cp0_unimplemented
;
7321 // gen_helper_mfc0_performance2(arg);
7322 register_name
= "Performance2";
7323 goto cp0_unimplemented
;
7325 // gen_helper_mfc0_performance3(arg);
7326 register_name
= "Performance3";
7327 goto cp0_unimplemented
;
7329 // gen_helper_mfc0_performance4(arg);
7330 register_name
= "Performance4";
7331 goto cp0_unimplemented
;
7333 // gen_helper_mfc0_performance5(arg);
7334 register_name
= "Performance5";
7335 goto cp0_unimplemented
;
7337 // gen_helper_mfc0_performance6(arg);
7338 register_name
= "Performance6";
7339 goto cp0_unimplemented
;
7341 // gen_helper_mfc0_performance7(arg);
7342 register_name
= "Performance7";
7343 goto cp0_unimplemented
;
7345 goto cp0_unimplemented
;
7348 case CP0_REGISTER_26
:
7351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7352 register_name
= "ErrCtl";
7355 goto cp0_unimplemented
;
7358 case CP0_REGISTER_27
:
7364 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7365 register_name
= "CacheErr";
7368 goto cp0_unimplemented
;
7371 case CP0_REGISTER_28
:
7378 TCGv_i64 tmp
= tcg_temp_new_i64();
7379 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7380 gen_move_low32(arg
, tmp
);
7381 tcg_temp_free_i64(tmp
);
7383 register_name
= "TagLo";
7389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7390 register_name
= "DataLo";
7393 goto cp0_unimplemented
;
7396 case CP0_REGISTER_29
:
7402 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7403 register_name
= "TagHi";
7409 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7410 register_name
= "DataHi";
7413 goto cp0_unimplemented
;
7416 case CP0_REGISTER_30
:
7419 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7420 tcg_gen_ext32s_tl(arg
, arg
);
7421 register_name
= "ErrorEPC";
7424 goto cp0_unimplemented
;
7427 case CP0_REGISTER_31
:
7431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7432 register_name
= "DESAVE";
7440 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7441 tcg_gen_ld_tl(arg
, cpu_env
,
7442 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7443 tcg_gen_ext32s_tl(arg
, arg
);
7444 register_name
= "KScratch";
7447 goto cp0_unimplemented
;
7451 goto cp0_unimplemented
;
7453 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7457 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7458 register_name
, reg
, sel
);
7459 gen_mfc0_unimplemented(ctx
, arg
);
7462 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7464 const char *register_name
= "invalid";
7467 check_insn(ctx
, ISA_MIPS32
);
7469 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7474 case CP0_REGISTER_00
:
7477 gen_helper_mtc0_index(cpu_env
, arg
);
7478 register_name
= "Index";
7481 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7482 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7483 register_name
= "MVPControl";
7486 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7488 register_name
= "MVPConf0";
7491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7493 register_name
= "MVPConf1";
7498 register_name
= "VPControl";
7501 goto cp0_unimplemented
;
7504 case CP0_REGISTER_01
:
7508 register_name
= "Random";
7511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7512 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7513 register_name
= "VPEControl";
7516 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7517 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7518 register_name
= "VPEConf0";
7521 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7522 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7523 register_name
= "VPEConf1";
7526 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7527 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7528 register_name
= "YQMask";
7531 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7532 tcg_gen_st_tl(arg
, cpu_env
,
7533 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7534 register_name
= "VPESchedule";
7537 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7538 tcg_gen_st_tl(arg
, cpu_env
,
7539 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7540 register_name
= "VPEScheFBack";
7543 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7544 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7545 register_name
= "VPEOpt";
7548 goto cp0_unimplemented
;
7551 case CP0_REGISTER_02
:
7554 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7555 register_name
= "EntryLo0";
7558 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7559 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7560 register_name
= "TCStatus";
7563 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7564 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7565 register_name
= "TCBind";
7568 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7569 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7570 register_name
= "TCRestart";
7573 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7574 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7575 register_name
= "TCHalt";
7578 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7579 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7580 register_name
= "TCContext";
7583 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7584 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7585 register_name
= "TCSchedule";
7588 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7589 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7590 register_name
= "TCScheFBack";
7593 goto cp0_unimplemented
;
7596 case CP0_REGISTER_03
:
7599 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7600 register_name
= "EntryLo1";
7605 register_name
= "GlobalNumber";
7608 goto cp0_unimplemented
;
7611 case CP0_REGISTER_04
:
7614 gen_helper_mtc0_context(cpu_env
, arg
);
7615 register_name
= "Context";
7618 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7619 register_name
= "ContextConfig";
7620 goto cp0_unimplemented
;
7622 CP0_CHECK(ctx
->ulri
);
7623 tcg_gen_st_tl(arg
, cpu_env
,
7624 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7625 register_name
= "UserLocal";
7628 goto cp0_unimplemented
;
7631 case CP0_REGISTER_05
:
7634 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7635 register_name
= "PageMask";
7638 check_insn(ctx
, ISA_MIPS32R2
);
7639 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7640 register_name
= "PageGrain";
7641 ctx
->base
.is_jmp
= DISAS_STOP
;
7645 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7646 register_name
= "SegCtl0";
7650 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7651 register_name
= "SegCtl1";
7655 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7656 register_name
= "SegCtl2";
7660 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7661 register_name
= "PWBase";
7665 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7666 register_name
= "PWField";
7670 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7671 register_name
= "PWSize";
7674 goto cp0_unimplemented
;
7677 case CP0_REGISTER_06
:
7680 gen_helper_mtc0_wired(cpu_env
, arg
);
7681 register_name
= "Wired";
7684 check_insn(ctx
, ISA_MIPS32R2
);
7685 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7686 register_name
= "SRSConf0";
7689 check_insn(ctx
, ISA_MIPS32R2
);
7690 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7691 register_name
= "SRSConf1";
7694 check_insn(ctx
, ISA_MIPS32R2
);
7695 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7696 register_name
= "SRSConf2";
7699 check_insn(ctx
, ISA_MIPS32R2
);
7700 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7701 register_name
= "SRSConf3";
7704 check_insn(ctx
, ISA_MIPS32R2
);
7705 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7706 register_name
= "SRSConf4";
7710 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7711 register_name
= "PWCtl";
7714 goto cp0_unimplemented
;
7717 case CP0_REGISTER_07
:
7720 check_insn(ctx
, ISA_MIPS32R2
);
7721 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7722 ctx
->base
.is_jmp
= DISAS_STOP
;
7723 register_name
= "HWREna";
7726 goto cp0_unimplemented
;
7729 case CP0_REGISTER_08
:
7733 register_name
= "BadVAddr";
7737 register_name
= "BadInstr";
7741 register_name
= "BadInstrP";
7745 register_name
= "BadInstrX";
7748 goto cp0_unimplemented
;
7751 case CP0_REGISTER_09
:
7754 gen_helper_mtc0_count(cpu_env
, arg
);
7755 register_name
= "Count";
7758 CP0_CHECK(ctx
->saar
);
7759 gen_helper_mtc0_saari(cpu_env
, arg
);
7760 register_name
= "SAARI";
7763 CP0_CHECK(ctx
->saar
);
7764 gen_helper_mtc0_saar(cpu_env
, arg
);
7765 register_name
= "SAAR";
7768 goto cp0_unimplemented
;
7771 case CP0_REGISTER_10
:
7774 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7775 register_name
= "EntryHi";
7778 goto cp0_unimplemented
;
7781 case CP0_REGISTER_11
:
7784 gen_helper_mtc0_compare(cpu_env
, arg
);
7785 register_name
= "Compare";
7787 /* 6,7 are implementation dependent */
7789 goto cp0_unimplemented
;
7792 case CP0_REGISTER_12
:
7795 save_cpu_state(ctx
, 1);
7796 gen_helper_mtc0_status(cpu_env
, arg
);
7797 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7798 gen_save_pc(ctx
->base
.pc_next
+ 4);
7799 ctx
->base
.is_jmp
= DISAS_EXIT
;
7800 register_name
= "Status";
7803 check_insn(ctx
, ISA_MIPS32R2
);
7804 gen_helper_mtc0_intctl(cpu_env
, arg
);
7805 /* Stop translation as we may have switched the execution mode */
7806 ctx
->base
.is_jmp
= DISAS_STOP
;
7807 register_name
= "IntCtl";
7810 check_insn(ctx
, ISA_MIPS32R2
);
7811 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7812 /* Stop translation as we may have switched the execution mode */
7813 ctx
->base
.is_jmp
= DISAS_STOP
;
7814 register_name
= "SRSCtl";
7817 check_insn(ctx
, ISA_MIPS32R2
);
7818 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7819 /* Stop translation as we may have switched the execution mode */
7820 ctx
->base
.is_jmp
= DISAS_STOP
;
7821 register_name
= "SRSMap";
7824 goto cp0_unimplemented
;
7827 case CP0_REGISTER_13
:
7830 save_cpu_state(ctx
, 1);
7831 gen_helper_mtc0_cause(cpu_env
, arg
);
7832 /* Stop translation as we may have triggered an interrupt.
7833 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7834 * translated code to check for pending interrupts. */
7835 gen_save_pc(ctx
->base
.pc_next
+ 4);
7836 ctx
->base
.is_jmp
= DISAS_EXIT
;
7837 register_name
= "Cause";
7840 goto cp0_unimplemented
;
7843 case CP0_REGISTER_14
:
7846 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7847 register_name
= "EPC";
7850 goto cp0_unimplemented
;
7853 case CP0_REGISTER_15
:
7857 register_name
= "PRid";
7860 check_insn(ctx
, ISA_MIPS32R2
);
7861 gen_helper_mtc0_ebase(cpu_env
, arg
);
7862 register_name
= "EBase";
7865 goto cp0_unimplemented
;
7868 case CP0_REGISTER_16
:
7871 gen_helper_mtc0_config0(cpu_env
, arg
);
7872 register_name
= "Config";
7873 /* Stop translation as we may have switched the execution mode */
7874 ctx
->base
.is_jmp
= DISAS_STOP
;
7877 /* ignored, read only */
7878 register_name
= "Config1";
7881 gen_helper_mtc0_config2(cpu_env
, arg
);
7882 register_name
= "Config2";
7883 /* Stop translation as we may have switched the execution mode */
7884 ctx
->base
.is_jmp
= DISAS_STOP
;
7887 gen_helper_mtc0_config3(cpu_env
, arg
);
7888 register_name
= "Config3";
7889 /* Stop translation as we may have switched the execution mode */
7890 ctx
->base
.is_jmp
= DISAS_STOP
;
7893 gen_helper_mtc0_config4(cpu_env
, arg
);
7894 register_name
= "Config4";
7895 ctx
->base
.is_jmp
= DISAS_STOP
;
7898 gen_helper_mtc0_config5(cpu_env
, arg
);
7899 register_name
= "Config5";
7900 /* Stop translation as we may have switched the execution mode */
7901 ctx
->base
.is_jmp
= DISAS_STOP
;
7903 /* 6,7 are implementation dependent */
7906 register_name
= "Config6";
7910 register_name
= "Config7";
7913 register_name
= "Invalid config selector";
7914 goto cp0_unimplemented
;
7917 case CP0_REGISTER_17
:
7920 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7921 register_name
= "LLAddr";
7924 CP0_CHECK(ctx
->mrp
);
7925 gen_helper_mtc0_maar(cpu_env
, arg
);
7926 register_name
= "MAAR";
7929 CP0_CHECK(ctx
->mrp
);
7930 gen_helper_mtc0_maari(cpu_env
, arg
);
7931 register_name
= "MAARI";
7934 goto cp0_unimplemented
;
7937 case CP0_REGISTER_18
:
7947 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7948 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7949 register_name
= "WatchLo";
7952 goto cp0_unimplemented
;
7955 case CP0_REGISTER_19
:
7965 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7966 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7967 register_name
= "WatchHi";
7970 goto cp0_unimplemented
;
7973 case CP0_REGISTER_20
:
7976 #if defined(TARGET_MIPS64)
7977 check_insn(ctx
, ISA_MIPS3
);
7978 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7979 register_name
= "XContext";
7983 goto cp0_unimplemented
;
7986 case CP0_REGISTER_21
:
7987 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7988 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7991 gen_helper_mtc0_framemask(cpu_env
, arg
);
7992 register_name
= "Framemask";
7995 goto cp0_unimplemented
;
7998 case CP0_REGISTER_22
:
8000 register_name
= "Diagnostic"; /* implementation dependent */
8002 case CP0_REGISTER_23
:
8005 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8006 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8007 gen_save_pc(ctx
->base
.pc_next
+ 4);
8008 ctx
->base
.is_jmp
= DISAS_EXIT
;
8009 register_name
= "Debug";
8012 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8013 register_name
= "TraceControl";
8014 /* Stop translation as we may have switched the execution mode */
8015 ctx
->base
.is_jmp
= DISAS_STOP
;
8016 goto cp0_unimplemented
;
8018 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8019 register_name
= "TraceControl2";
8020 /* Stop translation as we may have switched the execution mode */
8021 ctx
->base
.is_jmp
= DISAS_STOP
;
8022 goto cp0_unimplemented
;
8024 /* Stop translation as we may have switched the execution mode */
8025 ctx
->base
.is_jmp
= DISAS_STOP
;
8026 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8027 register_name
= "UserTraceData";
8028 /* Stop translation as we may have switched the execution mode */
8029 ctx
->base
.is_jmp
= DISAS_STOP
;
8030 goto cp0_unimplemented
;
8032 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8033 /* Stop translation as we may have switched the execution mode */
8034 ctx
->base
.is_jmp
= DISAS_STOP
;
8035 register_name
= "TraceBPC";
8036 goto cp0_unimplemented
;
8038 goto cp0_unimplemented
;
8041 case CP0_REGISTER_24
:
8045 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8046 register_name
= "DEPC";
8049 goto cp0_unimplemented
;
8052 case CP0_REGISTER_25
:
8055 gen_helper_mtc0_performance0(cpu_env
, arg
);
8056 register_name
= "Performance0";
8059 // gen_helper_mtc0_performance1(arg);
8060 register_name
= "Performance1";
8061 goto cp0_unimplemented
;
8063 // gen_helper_mtc0_performance2(arg);
8064 register_name
= "Performance2";
8065 goto cp0_unimplemented
;
8067 // gen_helper_mtc0_performance3(arg);
8068 register_name
= "Performance3";
8069 goto cp0_unimplemented
;
8071 // gen_helper_mtc0_performance4(arg);
8072 register_name
= "Performance4";
8073 goto cp0_unimplemented
;
8075 // gen_helper_mtc0_performance5(arg);
8076 register_name
= "Performance5";
8077 goto cp0_unimplemented
;
8079 // gen_helper_mtc0_performance6(arg);
8080 register_name
= "Performance6";
8081 goto cp0_unimplemented
;
8083 // gen_helper_mtc0_performance7(arg);
8084 register_name
= "Performance7";
8085 goto cp0_unimplemented
;
8087 goto cp0_unimplemented
;
8090 case CP0_REGISTER_26
:
8093 gen_helper_mtc0_errctl(cpu_env
, arg
);
8094 ctx
->base
.is_jmp
= DISAS_STOP
;
8095 register_name
= "ErrCtl";
8098 goto cp0_unimplemented
;
8101 case CP0_REGISTER_27
:
8108 register_name
= "CacheErr";
8111 goto cp0_unimplemented
;
8114 case CP0_REGISTER_28
:
8120 gen_helper_mtc0_taglo(cpu_env
, arg
);
8121 register_name
= "TagLo";
8127 gen_helper_mtc0_datalo(cpu_env
, arg
);
8128 register_name
= "DataLo";
8131 goto cp0_unimplemented
;
8134 case CP0_REGISTER_29
:
8140 gen_helper_mtc0_taghi(cpu_env
, arg
);
8141 register_name
= "TagHi";
8147 gen_helper_mtc0_datahi(cpu_env
, arg
);
8148 register_name
= "DataHi";
8151 register_name
= "invalid sel";
8152 goto cp0_unimplemented
;
8155 case CP0_REGISTER_30
:
8158 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8159 register_name
= "ErrorEPC";
8162 goto cp0_unimplemented
;
8165 case CP0_REGISTER_31
:
8169 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8170 register_name
= "DESAVE";
8178 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8179 tcg_gen_st_tl(arg
, cpu_env
,
8180 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8181 register_name
= "KScratch";
8184 goto cp0_unimplemented
;
8188 goto cp0_unimplemented
;
8190 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8192 /* For simplicity assume that all writes can cause interrupts. */
8193 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8195 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8196 * translated code to check for pending interrupts. */
8197 gen_save_pc(ctx
->base
.pc_next
+ 4);
8198 ctx
->base
.is_jmp
= DISAS_EXIT
;
8203 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8204 register_name
, reg
, sel
);
8207 #if defined(TARGET_MIPS64)
8208 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8210 const char *register_name
= "invalid";
8213 check_insn(ctx
, ISA_MIPS64
);
8216 case CP0_REGISTER_00
:
8219 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8220 register_name
= "Index";
8223 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8224 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8225 register_name
= "MVPControl";
8228 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8229 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8230 register_name
= "MVPConf0";
8233 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8234 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8235 register_name
= "MVPConf1";
8239 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8240 register_name
= "VPControl";
8243 goto cp0_unimplemented
;
8246 case CP0_REGISTER_01
:
8249 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8250 gen_helper_mfc0_random(arg
, cpu_env
);
8251 register_name
= "Random";
8254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8255 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8256 register_name
= "VPEControl";
8259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8261 register_name
= "VPEConf0";
8264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8265 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8266 register_name
= "VPEConf1";
8269 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8270 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8271 register_name
= "YQMask";
8274 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8275 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8276 register_name
= "VPESchedule";
8279 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8280 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8281 register_name
= "VPEScheFBack";
8284 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8285 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8286 register_name
= "VPEOpt";
8289 goto cp0_unimplemented
;
8292 case CP0_REGISTER_02
:
8295 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8296 register_name
= "EntryLo0";
8299 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8300 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8301 register_name
= "TCStatus";
8304 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8305 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8306 register_name
= "TCBind";
8309 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8310 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8311 register_name
= "TCRestart";
8314 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8315 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8316 register_name
= "TCHalt";
8319 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8320 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8321 register_name
= "TCContext";
8324 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8325 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8326 register_name
= "TCSchedule";
8329 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8330 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8331 register_name
= "TCScheFBack";
8334 goto cp0_unimplemented
;
8337 case CP0_REGISTER_03
:
8340 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8341 register_name
= "EntryLo1";
8345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8346 register_name
= "GlobalNumber";
8349 goto cp0_unimplemented
;
8352 case CP0_REGISTER_04
:
8355 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8356 register_name
= "Context";
8359 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8360 register_name
= "ContextConfig";
8361 goto cp0_unimplemented
;
8363 CP0_CHECK(ctx
->ulri
);
8364 tcg_gen_ld_tl(arg
, cpu_env
,
8365 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8366 register_name
= "UserLocal";
8369 goto cp0_unimplemented
;
8372 case CP0_REGISTER_05
:
8375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8376 register_name
= "PageMask";
8379 check_insn(ctx
, ISA_MIPS32R2
);
8380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8381 register_name
= "PageGrain";
8385 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8386 register_name
= "SegCtl0";
8390 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8391 register_name
= "SegCtl1";
8395 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8396 register_name
= "SegCtl2";
8400 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8401 register_name
= "PWBase";
8405 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8406 register_name
= "PWField";
8410 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8411 register_name
= "PWSize";
8414 goto cp0_unimplemented
;
8417 case CP0_REGISTER_06
:
8420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8421 register_name
= "Wired";
8424 check_insn(ctx
, ISA_MIPS32R2
);
8425 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8426 register_name
= "SRSConf0";
8429 check_insn(ctx
, ISA_MIPS32R2
);
8430 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8431 register_name
= "SRSConf1";
8434 check_insn(ctx
, ISA_MIPS32R2
);
8435 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8436 register_name
= "SRSConf2";
8439 check_insn(ctx
, ISA_MIPS32R2
);
8440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8441 register_name
= "SRSConf3";
8444 check_insn(ctx
, ISA_MIPS32R2
);
8445 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8446 register_name
= "SRSConf4";
8450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8451 register_name
= "PWCtl";
8454 goto cp0_unimplemented
;
8457 case CP0_REGISTER_07
:
8460 check_insn(ctx
, ISA_MIPS32R2
);
8461 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8462 register_name
= "HWREna";
8465 goto cp0_unimplemented
;
8468 case CP0_REGISTER_08
:
8471 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8472 register_name
= "BadVAddr";
8476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8477 register_name
= "BadInstr";
8481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8482 register_name
= "BadInstrP";
8486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8487 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8488 register_name
= "BadInstrX";
8491 goto cp0_unimplemented
;
8494 case CP0_REGISTER_09
:
8497 /* Mark as an IO operation because we read the time. */
8498 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8501 gen_helper_mfc0_count(arg
, cpu_env
);
8502 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8505 /* Break the TB to be able to take timer interrupts immediately
8506 after reading count. DISAS_STOP isn't sufficient, we need to
8507 ensure we break completely out of translated code. */
8508 gen_save_pc(ctx
->base
.pc_next
+ 4);
8509 ctx
->base
.is_jmp
= DISAS_EXIT
;
8510 register_name
= "Count";
8513 CP0_CHECK(ctx
->saar
);
8514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8515 register_name
= "SAARI";
8518 CP0_CHECK(ctx
->saar
);
8519 gen_helper_dmfc0_saar(arg
, cpu_env
);
8520 register_name
= "SAAR";
8523 goto cp0_unimplemented
;
8526 case CP0_REGISTER_10
:
8529 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8530 register_name
= "EntryHi";
8533 goto cp0_unimplemented
;
8536 case CP0_REGISTER_11
:
8539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8540 register_name
= "Compare";
8542 /* 6,7 are implementation dependent */
8544 goto cp0_unimplemented
;
8547 case CP0_REGISTER_12
:
8550 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8551 register_name
= "Status";
8554 check_insn(ctx
, ISA_MIPS32R2
);
8555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8556 register_name
= "IntCtl";
8559 check_insn(ctx
, ISA_MIPS32R2
);
8560 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8561 register_name
= "SRSCtl";
8564 check_insn(ctx
, ISA_MIPS32R2
);
8565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8566 register_name
= "SRSMap";
8569 goto cp0_unimplemented
;
8572 case CP0_REGISTER_13
:
8575 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8576 register_name
= "Cause";
8579 goto cp0_unimplemented
;
8582 case CP0_REGISTER_14
:
8585 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8586 register_name
= "EPC";
8589 goto cp0_unimplemented
;
8592 case CP0_REGISTER_15
:
8595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8596 register_name
= "PRid";
8599 check_insn(ctx
, ISA_MIPS32R2
);
8600 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8601 register_name
= "EBase";
8604 check_insn(ctx
, ISA_MIPS32R2
);
8605 CP0_CHECK(ctx
->cmgcr
);
8606 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8607 register_name
= "CMGCRBase";
8610 goto cp0_unimplemented
;
8613 case CP0_REGISTER_16
:
8616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8617 register_name
= "Config";
8620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8621 register_name
= "Config1";
8624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8625 register_name
= "Config2";
8628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8629 register_name
= "Config3";
8632 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8633 register_name
= "Config4";
8636 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8637 register_name
= "Config5";
8639 /* 6,7 are implementation dependent */
8641 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8642 register_name
= "Config6";
8645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8646 register_name
= "Config7";
8649 goto cp0_unimplemented
;
8652 case CP0_REGISTER_17
:
8655 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8656 register_name
= "LLAddr";
8659 CP0_CHECK(ctx
->mrp
);
8660 gen_helper_dmfc0_maar(arg
, cpu_env
);
8661 register_name
= "MAAR";
8664 CP0_CHECK(ctx
->mrp
);
8665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8666 register_name
= "MAARI";
8669 goto cp0_unimplemented
;
8672 case CP0_REGISTER_18
:
8682 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8683 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8684 register_name
= "WatchLo";
8687 goto cp0_unimplemented
;
8690 case CP0_REGISTER_19
:
8700 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8701 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8702 register_name
= "WatchHi";
8705 goto cp0_unimplemented
;
8708 case CP0_REGISTER_20
:
8711 check_insn(ctx
, ISA_MIPS3
);
8712 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8713 register_name
= "XContext";
8716 goto cp0_unimplemented
;
8719 case CP0_REGISTER_21
:
8720 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8721 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8724 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8725 register_name
= "Framemask";
8728 goto cp0_unimplemented
;
8731 case CP0_REGISTER_22
:
8732 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8733 register_name
= "'Diagnostic"; /* implementation dependent */
8735 case CP0_REGISTER_23
:
8738 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8739 register_name
= "Debug";
8742 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8743 register_name
= "TraceControl";
8744 goto cp0_unimplemented
;
8746 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8747 register_name
= "TraceControl2";
8748 goto cp0_unimplemented
;
8750 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8751 register_name
= "UserTraceData";
8752 goto cp0_unimplemented
;
8754 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8755 register_name
= "TraceBPC";
8756 goto cp0_unimplemented
;
8758 goto cp0_unimplemented
;
8761 case CP0_REGISTER_24
:
8765 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8766 register_name
= "DEPC";
8769 goto cp0_unimplemented
;
8772 case CP0_REGISTER_25
:
8775 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8776 register_name
= "Performance0";
8779 // gen_helper_dmfc0_performance1(arg);
8780 register_name
= "Performance1";
8781 goto cp0_unimplemented
;
8783 // gen_helper_dmfc0_performance2(arg);
8784 register_name
= "Performance2";
8785 goto cp0_unimplemented
;
8787 // gen_helper_dmfc0_performance3(arg);
8788 register_name
= "Performance3";
8789 goto cp0_unimplemented
;
8791 // gen_helper_dmfc0_performance4(arg);
8792 register_name
= "Performance4";
8793 goto cp0_unimplemented
;
8795 // gen_helper_dmfc0_performance5(arg);
8796 register_name
= "Performance5";
8797 goto cp0_unimplemented
;
8799 // gen_helper_dmfc0_performance6(arg);
8800 register_name
= "Performance6";
8801 goto cp0_unimplemented
;
8803 // gen_helper_dmfc0_performance7(arg);
8804 register_name
= "Performance7";
8805 goto cp0_unimplemented
;
8807 goto cp0_unimplemented
;
8810 case CP0_REGISTER_26
:
8813 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8814 register_name
= "ErrCtl";
8817 goto cp0_unimplemented
;
8820 case CP0_REGISTER_27
:
8827 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8828 register_name
= "CacheErr";
8831 goto cp0_unimplemented
;
8834 case CP0_REGISTER_28
:
8840 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8841 register_name
= "TagLo";
8847 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8848 register_name
= "DataLo";
8851 goto cp0_unimplemented
;
8854 case CP0_REGISTER_29
:
8860 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8861 register_name
= "TagHi";
8867 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8868 register_name
= "DataHi";
8871 goto cp0_unimplemented
;
8874 case CP0_REGISTER_30
:
8877 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8878 register_name
= "ErrorEPC";
8881 goto cp0_unimplemented
;
8884 case CP0_REGISTER_31
:
8888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8889 register_name
= "DESAVE";
8897 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8898 tcg_gen_ld_tl(arg
, cpu_env
,
8899 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8900 register_name
= "KScratch";
8903 goto cp0_unimplemented
;
8907 goto cp0_unimplemented
;
8909 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
8913 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
8914 register_name
, reg
, sel
);
8915 gen_mfc0_unimplemented(ctx
, arg
);
8918 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8920 const char *register_name
= "invalid";
8923 check_insn(ctx
, ISA_MIPS64
);
8925 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8930 case CP0_REGISTER_00
:
8933 gen_helper_mtc0_index(cpu_env
, arg
);
8934 register_name
= "Index";
8937 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8938 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8939 register_name
= "MVPControl";
8942 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8944 register_name
= "MVPConf0";
8947 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8949 register_name
= "MVPConf1";
8954 register_name
= "VPControl";
8957 goto cp0_unimplemented
;
8960 case CP0_REGISTER_01
:
8964 register_name
= "Random";
8967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8968 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8969 register_name
= "VPEControl";
8972 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8973 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8974 register_name
= "VPEConf0";
8977 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8978 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8979 register_name
= "VPEConf1";
8982 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8983 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8984 register_name
= "YQMask";
8987 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8988 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8989 register_name
= "VPESchedule";
8992 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8993 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8994 register_name
= "VPEScheFBack";
8997 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8998 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8999 register_name
= "VPEOpt";
9002 goto cp0_unimplemented
;
9005 case CP0_REGISTER_02
:
9008 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9009 register_name
= "EntryLo0";
9012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9013 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9014 register_name
= "TCStatus";
9017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9018 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9019 register_name
= "TCBind";
9022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9023 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9024 register_name
= "TCRestart";
9027 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9028 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9029 register_name
= "TCHalt";
9032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9033 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9034 register_name
= "TCContext";
9037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9038 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9039 register_name
= "TCSchedule";
9042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9043 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9044 register_name
= "TCScheFBack";
9047 goto cp0_unimplemented
;
9050 case CP0_REGISTER_03
:
9053 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9054 register_name
= "EntryLo1";
9059 register_name
= "GlobalNumber";
9062 goto cp0_unimplemented
;
9065 case CP0_REGISTER_04
:
9068 gen_helper_mtc0_context(cpu_env
, arg
);
9069 register_name
= "Context";
9072 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9073 register_name
= "ContextConfig";
9074 goto cp0_unimplemented
;
9076 CP0_CHECK(ctx
->ulri
);
9077 tcg_gen_st_tl(arg
, cpu_env
,
9078 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9079 register_name
= "UserLocal";
9082 goto cp0_unimplemented
;
9085 case CP0_REGISTER_05
:
9088 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9089 register_name
= "PageMask";
9092 check_insn(ctx
, ISA_MIPS32R2
);
9093 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9094 register_name
= "PageGrain";
9098 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9099 register_name
= "SegCtl0";
9103 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9104 register_name
= "SegCtl1";
9108 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9109 register_name
= "SegCtl2";
9113 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9114 register_name
= "PWBase";
9118 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9119 register_name
= "PWField";
9123 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9124 register_name
= "PWSize";
9127 goto cp0_unimplemented
;
9130 case CP0_REGISTER_06
:
9133 gen_helper_mtc0_wired(cpu_env
, arg
);
9134 register_name
= "Wired";
9137 check_insn(ctx
, ISA_MIPS32R2
);
9138 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9139 register_name
= "SRSConf0";
9142 check_insn(ctx
, ISA_MIPS32R2
);
9143 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9144 register_name
= "SRSConf1";
9147 check_insn(ctx
, ISA_MIPS32R2
);
9148 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9149 register_name
= "SRSConf2";
9152 check_insn(ctx
, ISA_MIPS32R2
);
9153 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9154 register_name
= "SRSConf3";
9157 check_insn(ctx
, ISA_MIPS32R2
);
9158 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9159 register_name
= "SRSConf4";
9163 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9164 register_name
= "PWCtl";
9167 goto cp0_unimplemented
;
9170 case CP0_REGISTER_07
:
9173 check_insn(ctx
, ISA_MIPS32R2
);
9174 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9175 ctx
->base
.is_jmp
= DISAS_STOP
;
9176 register_name
= "HWREna";
9179 goto cp0_unimplemented
;
9182 case CP0_REGISTER_08
:
9186 register_name
= "BadVAddr";
9190 register_name
= "BadInstr";
9194 register_name
= "BadInstrP";
9198 register_name
= "BadInstrX";
9201 goto cp0_unimplemented
;
9204 case CP0_REGISTER_09
:
9207 gen_helper_mtc0_count(cpu_env
, arg
);
9208 register_name
= "Count";
9211 CP0_CHECK(ctx
->saar
);
9212 gen_helper_mtc0_saari(cpu_env
, arg
);
9213 register_name
= "SAARI";
9216 CP0_CHECK(ctx
->saar
);
9217 gen_helper_mtc0_saar(cpu_env
, arg
);
9218 register_name
= "SAAR";
9221 goto cp0_unimplemented
;
9223 /* Stop translation as we may have switched the execution mode */
9224 ctx
->base
.is_jmp
= DISAS_STOP
;
9226 case CP0_REGISTER_10
:
9229 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9230 register_name
= "EntryHi";
9233 goto cp0_unimplemented
;
9236 case CP0_REGISTER_11
:
9239 gen_helper_mtc0_compare(cpu_env
, arg
);
9240 register_name
= "Compare";
9242 /* 6,7 are implementation dependent */
9244 goto cp0_unimplemented
;
9246 /* Stop translation as we may have switched the execution mode */
9247 ctx
->base
.is_jmp
= DISAS_STOP
;
9249 case CP0_REGISTER_12
:
9252 save_cpu_state(ctx
, 1);
9253 gen_helper_mtc0_status(cpu_env
, arg
);
9254 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9255 gen_save_pc(ctx
->base
.pc_next
+ 4);
9256 ctx
->base
.is_jmp
= DISAS_EXIT
;
9257 register_name
= "Status";
9260 check_insn(ctx
, ISA_MIPS32R2
);
9261 gen_helper_mtc0_intctl(cpu_env
, arg
);
9262 /* Stop translation as we may have switched the execution mode */
9263 ctx
->base
.is_jmp
= DISAS_STOP
;
9264 register_name
= "IntCtl";
9267 check_insn(ctx
, ISA_MIPS32R2
);
9268 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9269 /* Stop translation as we may have switched the execution mode */
9270 ctx
->base
.is_jmp
= DISAS_STOP
;
9271 register_name
= "SRSCtl";
9274 check_insn(ctx
, ISA_MIPS32R2
);
9275 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9276 /* Stop translation as we may have switched the execution mode */
9277 ctx
->base
.is_jmp
= DISAS_STOP
;
9278 register_name
= "SRSMap";
9281 goto cp0_unimplemented
;
9284 case CP0_REGISTER_13
:
9287 save_cpu_state(ctx
, 1);
9288 gen_helper_mtc0_cause(cpu_env
, arg
);
9289 /* Stop translation as we may have triggered an interrupt.
9290 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9291 * translated code to check for pending interrupts. */
9292 gen_save_pc(ctx
->base
.pc_next
+ 4);
9293 ctx
->base
.is_jmp
= DISAS_EXIT
;
9294 register_name
= "Cause";
9297 goto cp0_unimplemented
;
9300 case CP0_REGISTER_14
:
9303 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9304 register_name
= "EPC";
9307 goto cp0_unimplemented
;
9310 case CP0_REGISTER_15
:
9314 register_name
= "PRid";
9317 check_insn(ctx
, ISA_MIPS32R2
);
9318 gen_helper_mtc0_ebase(cpu_env
, arg
);
9319 register_name
= "EBase";
9322 goto cp0_unimplemented
;
9325 case CP0_REGISTER_16
:
9328 gen_helper_mtc0_config0(cpu_env
, arg
);
9329 register_name
= "Config";
9330 /* Stop translation as we may have switched the execution mode */
9331 ctx
->base
.is_jmp
= DISAS_STOP
;
9334 /* ignored, read only */
9335 register_name
= "Config1";
9338 gen_helper_mtc0_config2(cpu_env
, arg
);
9339 register_name
= "Config2";
9340 /* Stop translation as we may have switched the execution mode */
9341 ctx
->base
.is_jmp
= DISAS_STOP
;
9344 gen_helper_mtc0_config3(cpu_env
, arg
);
9345 register_name
= "Config3";
9346 /* Stop translation as we may have switched the execution mode */
9347 ctx
->base
.is_jmp
= DISAS_STOP
;
9350 /* currently ignored */
9351 register_name
= "Config4";
9354 gen_helper_mtc0_config5(cpu_env
, arg
);
9355 register_name
= "Config5";
9356 /* Stop translation as we may have switched the execution mode */
9357 ctx
->base
.is_jmp
= DISAS_STOP
;
9359 /* 6,7 are implementation dependent */
9361 register_name
= "Invalid config selector";
9362 goto cp0_unimplemented
;
9365 case CP0_REGISTER_17
:
9368 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9369 register_name
= "LLAddr";
9372 CP0_CHECK(ctx
->mrp
);
9373 gen_helper_mtc0_maar(cpu_env
, arg
);
9374 register_name
= "MAAR";
9377 CP0_CHECK(ctx
->mrp
);
9378 gen_helper_mtc0_maari(cpu_env
, arg
);
9379 register_name
= "MAARI";
9382 goto cp0_unimplemented
;
9385 case CP0_REGISTER_18
:
9395 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9396 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9397 register_name
= "WatchLo";
9400 goto cp0_unimplemented
;
9403 case CP0_REGISTER_19
:
9413 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9414 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9415 register_name
= "WatchHi";
9418 goto cp0_unimplemented
;
9421 case CP0_REGISTER_20
:
9424 check_insn(ctx
, ISA_MIPS3
);
9425 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9426 register_name
= "XContext";
9429 goto cp0_unimplemented
;
9432 case CP0_REGISTER_21
:
9433 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9434 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9437 gen_helper_mtc0_framemask(cpu_env
, arg
);
9438 register_name
= "Framemask";
9441 goto cp0_unimplemented
;
9444 case CP0_REGISTER_22
:
9446 register_name
= "Diagnostic"; /* implementation dependent */
9448 case CP0_REGISTER_23
:
9451 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9452 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9453 gen_save_pc(ctx
->base
.pc_next
+ 4);
9454 ctx
->base
.is_jmp
= DISAS_EXIT
;
9455 register_name
= "Debug";
9458 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9459 /* Stop translation as we may have switched the execution mode */
9460 ctx
->base
.is_jmp
= DISAS_STOP
;
9461 register_name
= "TraceControl";
9462 goto cp0_unimplemented
;
9464 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9465 /* Stop translation as we may have switched the execution mode */
9466 ctx
->base
.is_jmp
= DISAS_STOP
;
9467 register_name
= "TraceControl2";
9468 goto cp0_unimplemented
;
9470 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9471 /* Stop translation as we may have switched the execution mode */
9472 ctx
->base
.is_jmp
= DISAS_STOP
;
9473 register_name
= "UserTraceData";
9474 goto cp0_unimplemented
;
9476 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9477 /* Stop translation as we may have switched the execution mode */
9478 ctx
->base
.is_jmp
= DISAS_STOP
;
9479 register_name
= "TraceBPC";
9480 goto cp0_unimplemented
;
9482 goto cp0_unimplemented
;
9485 case CP0_REGISTER_24
:
9489 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9490 register_name
= "DEPC";
9493 goto cp0_unimplemented
;
9496 case CP0_REGISTER_25
:
9499 gen_helper_mtc0_performance0(cpu_env
, arg
);
9500 register_name
= "Performance0";
9503 // gen_helper_mtc0_performance1(cpu_env, arg);
9504 register_name
= "Performance1";
9505 goto cp0_unimplemented
;
9507 // gen_helper_mtc0_performance2(cpu_env, arg);
9508 register_name
= "Performance2";
9509 goto cp0_unimplemented
;
9511 // gen_helper_mtc0_performance3(cpu_env, arg);
9512 register_name
= "Performance3";
9513 goto cp0_unimplemented
;
9515 // gen_helper_mtc0_performance4(cpu_env, arg);
9516 register_name
= "Performance4";
9517 goto cp0_unimplemented
;
9519 // gen_helper_mtc0_performance5(cpu_env, arg);
9520 register_name
= "Performance5";
9521 goto cp0_unimplemented
;
9523 // gen_helper_mtc0_performance6(cpu_env, arg);
9524 register_name
= "Performance6";
9525 goto cp0_unimplemented
;
9527 // gen_helper_mtc0_performance7(cpu_env, arg);
9528 register_name
= "Performance7";
9529 goto cp0_unimplemented
;
9531 goto cp0_unimplemented
;
9534 case CP0_REGISTER_26
:
9537 gen_helper_mtc0_errctl(cpu_env
, arg
);
9538 ctx
->base
.is_jmp
= DISAS_STOP
;
9539 register_name
= "ErrCtl";
9542 goto cp0_unimplemented
;
9545 case CP0_REGISTER_27
:
9552 register_name
= "CacheErr";
9555 goto cp0_unimplemented
;
9558 case CP0_REGISTER_28
:
9564 gen_helper_mtc0_taglo(cpu_env
, arg
);
9565 register_name
= "TagLo";
9571 gen_helper_mtc0_datalo(cpu_env
, arg
);
9572 register_name
= "DataLo";
9575 goto cp0_unimplemented
;
9578 case CP0_REGISTER_29
:
9584 gen_helper_mtc0_taghi(cpu_env
, arg
);
9585 register_name
= "TagHi";
9591 gen_helper_mtc0_datahi(cpu_env
, arg
);
9592 register_name
= "DataHi";
9595 register_name
= "invalid sel";
9596 goto cp0_unimplemented
;
9599 case CP0_REGISTER_30
:
9602 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9603 register_name
= "ErrorEPC";
9606 goto cp0_unimplemented
;
9609 case CP0_REGISTER_31
:
9613 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9614 register_name
= "DESAVE";
9622 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9623 tcg_gen_st_tl(arg
, cpu_env
,
9624 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9625 register_name
= "KScratch";
9628 goto cp0_unimplemented
;
9632 goto cp0_unimplemented
;
9634 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9636 /* For simplicity assume that all writes can cause interrupts. */
9637 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9639 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9640 * translated code to check for pending interrupts. */
9641 gen_save_pc(ctx
->base
.pc_next
+ 4);
9642 ctx
->base
.is_jmp
= DISAS_EXIT
;
9647 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9648 register_name
, reg
, sel
);
9650 #endif /* TARGET_MIPS64 */
9652 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9653 int u
, int sel
, int h
)
9655 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9656 TCGv t0
= tcg_temp_local_new();
9658 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9659 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9660 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9661 tcg_gen_movi_tl(t0
, -1);
9662 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9663 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9664 tcg_gen_movi_tl(t0
, -1);
9670 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9673 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9683 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9686 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9689 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9692 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9695 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9698 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9701 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9704 gen_mfc0(ctx
, t0
, rt
, sel
);
9711 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9714 gen_mfc0(ctx
, t0
, rt
, sel
);
9720 gen_helper_mftc0_status(t0
, cpu_env
);
9723 gen_mfc0(ctx
, t0
, rt
, sel
);
9729 gen_helper_mftc0_cause(t0
, cpu_env
);
9739 gen_helper_mftc0_epc(t0
, cpu_env
);
9749 gen_helper_mftc0_ebase(t0
, cpu_env
);
9766 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9776 gen_helper_mftc0_debug(t0
, cpu_env
);
9779 gen_mfc0(ctx
, t0
, rt
, sel
);
9784 gen_mfc0(ctx
, t0
, rt
, sel
);
9786 } else switch (sel
) {
9787 /* GPR registers. */
9789 gen_helper_1e0i(mftgpr
, t0
, rt
);
9791 /* Auxiliary CPU registers */
9795 gen_helper_1e0i(mftlo
, t0
, 0);
9798 gen_helper_1e0i(mfthi
, t0
, 0);
9801 gen_helper_1e0i(mftacx
, t0
, 0);
9804 gen_helper_1e0i(mftlo
, t0
, 1);
9807 gen_helper_1e0i(mfthi
, t0
, 1);
9810 gen_helper_1e0i(mftacx
, t0
, 1);
9813 gen_helper_1e0i(mftlo
, t0
, 2);
9816 gen_helper_1e0i(mfthi
, t0
, 2);
9819 gen_helper_1e0i(mftacx
, t0
, 2);
9822 gen_helper_1e0i(mftlo
, t0
, 3);
9825 gen_helper_1e0i(mfthi
, t0
, 3);
9828 gen_helper_1e0i(mftacx
, t0
, 3);
9831 gen_helper_mftdsp(t0
, cpu_env
);
9837 /* Floating point (COP1). */
9839 /* XXX: For now we support only a single FPU context. */
9841 TCGv_i32 fp0
= tcg_temp_new_i32();
9843 gen_load_fpr32(ctx
, fp0
, rt
);
9844 tcg_gen_ext_i32_tl(t0
, fp0
);
9845 tcg_temp_free_i32(fp0
);
9847 TCGv_i32 fp0
= tcg_temp_new_i32();
9849 gen_load_fpr32h(ctx
, fp0
, rt
);
9850 tcg_gen_ext_i32_tl(t0
, fp0
);
9851 tcg_temp_free_i32(fp0
);
9855 /* XXX: For now we support only a single FPU context. */
9856 gen_helper_1e0i(cfc1
, t0
, rt
);
9858 /* COP2: Not implemented. */
9865 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9866 gen_store_gpr(t0
, rd
);
9872 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9873 generate_exception_end(ctx
, EXCP_RI
);
9876 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9877 int u
, int sel
, int h
)
9879 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9880 TCGv t0
= tcg_temp_local_new();
9882 gen_load_gpr(t0
, rt
);
9883 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9884 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9885 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9887 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9888 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9895 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9898 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9908 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9911 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9914 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9917 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9920 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9923 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9926 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9929 gen_mtc0(ctx
, t0
, rd
, sel
);
9936 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9939 gen_mtc0(ctx
, t0
, rd
, sel
);
9945 gen_helper_mttc0_status(cpu_env
, t0
);
9948 gen_mtc0(ctx
, t0
, rd
, sel
);
9954 gen_helper_mttc0_cause(cpu_env
, t0
);
9964 gen_helper_mttc0_ebase(cpu_env
, t0
);
9974 gen_helper_mttc0_debug(cpu_env
, t0
);
9977 gen_mtc0(ctx
, t0
, rd
, sel
);
9982 gen_mtc0(ctx
, t0
, rd
, sel
);
9984 } else switch (sel
) {
9985 /* GPR registers. */
9987 gen_helper_0e1i(mttgpr
, t0
, rd
);
9989 /* Auxiliary CPU registers */
9993 gen_helper_0e1i(mttlo
, t0
, 0);
9996 gen_helper_0e1i(mtthi
, t0
, 0);
9999 gen_helper_0e1i(mttacx
, t0
, 0);
10002 gen_helper_0e1i(mttlo
, t0
, 1);
10005 gen_helper_0e1i(mtthi
, t0
, 1);
10008 gen_helper_0e1i(mttacx
, t0
, 1);
10011 gen_helper_0e1i(mttlo
, t0
, 2);
10014 gen_helper_0e1i(mtthi
, t0
, 2);
10017 gen_helper_0e1i(mttacx
, t0
, 2);
10020 gen_helper_0e1i(mttlo
, t0
, 3);
10023 gen_helper_0e1i(mtthi
, t0
, 3);
10026 gen_helper_0e1i(mttacx
, t0
, 3);
10029 gen_helper_mttdsp(cpu_env
, t0
);
10035 /* Floating point (COP1). */
10037 /* XXX: For now we support only a single FPU context. */
10039 TCGv_i32 fp0
= tcg_temp_new_i32();
10041 tcg_gen_trunc_tl_i32(fp0
, t0
);
10042 gen_store_fpr32(ctx
, fp0
, rd
);
10043 tcg_temp_free_i32(fp0
);
10045 TCGv_i32 fp0
= tcg_temp_new_i32();
10047 tcg_gen_trunc_tl_i32(fp0
, t0
);
10048 gen_store_fpr32h(ctx
, fp0
, rd
);
10049 tcg_temp_free_i32(fp0
);
10053 /* XXX: For now we support only a single FPU context. */
10055 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10057 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10058 tcg_temp_free_i32(fs_tmp
);
10060 /* Stop translation as we may have changed hflags */
10061 ctx
->base
.is_jmp
= DISAS_STOP
;
10063 /* COP2: Not implemented. */
10070 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10076 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10077 generate_exception_end(ctx
, EXCP_RI
);
10080 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
10082 const char *opn
= "ldst";
10084 check_cp0_enabled(ctx
);
10088 /* Treat as NOP. */
10091 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10096 TCGv t0
= tcg_temp_new();
10098 gen_load_gpr(t0
, rt
);
10099 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10104 #if defined(TARGET_MIPS64)
10106 check_insn(ctx
, ISA_MIPS3
);
10108 /* Treat as NOP. */
10111 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10115 check_insn(ctx
, ISA_MIPS3
);
10117 TCGv t0
= tcg_temp_new();
10119 gen_load_gpr(t0
, rt
);
10120 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10129 /* Treat as NOP. */
10132 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10138 TCGv t0
= tcg_temp_new();
10139 gen_load_gpr(t0
, rt
);
10140 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10146 check_cp0_enabled(ctx
);
10148 /* Treat as NOP. */
10151 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10152 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10156 check_cp0_enabled(ctx
);
10157 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10158 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10163 if (!env
->tlb
->helper_tlbwi
)
10165 gen_helper_tlbwi(cpu_env
);
10169 if (ctx
->ie
>= 2) {
10170 if (!env
->tlb
->helper_tlbinv
) {
10173 gen_helper_tlbinv(cpu_env
);
10174 } /* treat as nop if TLBINV not supported */
10178 if (ctx
->ie
>= 2) {
10179 if (!env
->tlb
->helper_tlbinvf
) {
10182 gen_helper_tlbinvf(cpu_env
);
10183 } /* treat as nop if TLBINV not supported */
10187 if (!env
->tlb
->helper_tlbwr
)
10189 gen_helper_tlbwr(cpu_env
);
10193 if (!env
->tlb
->helper_tlbp
)
10195 gen_helper_tlbp(cpu_env
);
10199 if (!env
->tlb
->helper_tlbr
)
10201 gen_helper_tlbr(cpu_env
);
10203 case OPC_ERET
: /* OPC_ERETNC */
10204 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10205 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10208 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10209 if (ctx
->opcode
& (1 << bit_shift
)) {
10212 check_insn(ctx
, ISA_MIPS32R5
);
10213 gen_helper_eretnc(cpu_env
);
10217 check_insn(ctx
, ISA_MIPS2
);
10218 gen_helper_eret(cpu_env
);
10220 ctx
->base
.is_jmp
= DISAS_EXIT
;
10225 check_insn(ctx
, ISA_MIPS32
);
10226 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10227 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10230 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10232 generate_exception_end(ctx
, EXCP_RI
);
10234 gen_helper_deret(cpu_env
);
10235 ctx
->base
.is_jmp
= DISAS_EXIT
;
10240 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10241 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10242 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10245 /* If we get an exception, we want to restart at next instruction */
10246 ctx
->base
.pc_next
+= 4;
10247 save_cpu_state(ctx
, 1);
10248 ctx
->base
.pc_next
-= 4;
10249 gen_helper_wait(cpu_env
);
10250 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10255 generate_exception_end(ctx
, EXCP_RI
);
10258 (void)opn
; /* avoid a compiler warning */
10260 #endif /* !CONFIG_USER_ONLY */
10262 /* CP1 Branches (before delay slot) */
10263 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10264 int32_t cc
, int32_t offset
)
10266 target_ulong btarget
;
10267 TCGv_i32 t0
= tcg_temp_new_i32();
10269 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10270 generate_exception_end(ctx
, EXCP_RI
);
10275 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10277 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10281 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10282 tcg_gen_not_i32(t0
, t0
);
10283 tcg_gen_andi_i32(t0
, t0
, 1);
10284 tcg_gen_extu_i32_tl(bcond
, t0
);
10287 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10288 tcg_gen_not_i32(t0
, t0
);
10289 tcg_gen_andi_i32(t0
, t0
, 1);
10290 tcg_gen_extu_i32_tl(bcond
, t0
);
10293 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10294 tcg_gen_andi_i32(t0
, t0
, 1);
10295 tcg_gen_extu_i32_tl(bcond
, t0
);
10298 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10299 tcg_gen_andi_i32(t0
, t0
, 1);
10300 tcg_gen_extu_i32_tl(bcond
, t0
);
10302 ctx
->hflags
|= MIPS_HFLAG_BL
;
10306 TCGv_i32 t1
= tcg_temp_new_i32();
10307 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10308 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10309 tcg_gen_nand_i32(t0
, t0
, t1
);
10310 tcg_temp_free_i32(t1
);
10311 tcg_gen_andi_i32(t0
, t0
, 1);
10312 tcg_gen_extu_i32_tl(bcond
, t0
);
10317 TCGv_i32 t1
= tcg_temp_new_i32();
10318 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10319 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10320 tcg_gen_or_i32(t0
, t0
, t1
);
10321 tcg_temp_free_i32(t1
);
10322 tcg_gen_andi_i32(t0
, t0
, 1);
10323 tcg_gen_extu_i32_tl(bcond
, t0
);
10328 TCGv_i32 t1
= tcg_temp_new_i32();
10329 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10330 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10331 tcg_gen_and_i32(t0
, t0
, t1
);
10332 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10333 tcg_gen_and_i32(t0
, t0
, t1
);
10334 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10335 tcg_gen_nand_i32(t0
, t0
, t1
);
10336 tcg_temp_free_i32(t1
);
10337 tcg_gen_andi_i32(t0
, t0
, 1);
10338 tcg_gen_extu_i32_tl(bcond
, t0
);
10343 TCGv_i32 t1
= tcg_temp_new_i32();
10344 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10345 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10346 tcg_gen_or_i32(t0
, t0
, t1
);
10347 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10348 tcg_gen_or_i32(t0
, t0
, t1
);
10349 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10350 tcg_gen_or_i32(t0
, t0
, t1
);
10351 tcg_temp_free_i32(t1
);
10352 tcg_gen_andi_i32(t0
, t0
, 1);
10353 tcg_gen_extu_i32_tl(bcond
, t0
);
10356 ctx
->hflags
|= MIPS_HFLAG_BC
;
10359 MIPS_INVAL("cp1 cond branch");
10360 generate_exception_end(ctx
, EXCP_RI
);
10363 ctx
->btarget
= btarget
;
10364 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10366 tcg_temp_free_i32(t0
);
10369 /* R6 CP1 Branches */
10370 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10371 int32_t ft
, int32_t offset
,
10372 int delayslot_size
)
10374 target_ulong btarget
;
10375 TCGv_i64 t0
= tcg_temp_new_i64();
10377 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10378 #ifdef MIPS_DEBUG_DISAS
10379 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10380 "\n", ctx
->base
.pc_next
);
10382 generate_exception_end(ctx
, EXCP_RI
);
10386 gen_load_fpr64(ctx
, t0
, ft
);
10387 tcg_gen_andi_i64(t0
, t0
, 1);
10389 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10393 tcg_gen_xori_i64(t0
, t0
, 1);
10394 ctx
->hflags
|= MIPS_HFLAG_BC
;
10397 /* t0 already set */
10398 ctx
->hflags
|= MIPS_HFLAG_BC
;
10401 MIPS_INVAL("cp1 cond branch");
10402 generate_exception_end(ctx
, EXCP_RI
);
10406 tcg_gen_trunc_i64_tl(bcond
, t0
);
10408 ctx
->btarget
= btarget
;
10410 switch (delayslot_size
) {
10412 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10415 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10420 tcg_temp_free_i64(t0
);
10423 /* Coprocessor 1 (FPU) */
10425 #define FOP(func, fmt) (((fmt) << 21) | (func))
10428 OPC_ADD_S
= FOP(0, FMT_S
),
10429 OPC_SUB_S
= FOP(1, FMT_S
),
10430 OPC_MUL_S
= FOP(2, FMT_S
),
10431 OPC_DIV_S
= FOP(3, FMT_S
),
10432 OPC_SQRT_S
= FOP(4, FMT_S
),
10433 OPC_ABS_S
= FOP(5, FMT_S
),
10434 OPC_MOV_S
= FOP(6, FMT_S
),
10435 OPC_NEG_S
= FOP(7, FMT_S
),
10436 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10437 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10438 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10439 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10440 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10441 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10442 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10443 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10444 OPC_SEL_S
= FOP(16, FMT_S
),
10445 OPC_MOVCF_S
= FOP(17, FMT_S
),
10446 OPC_MOVZ_S
= FOP(18, FMT_S
),
10447 OPC_MOVN_S
= FOP(19, FMT_S
),
10448 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10449 OPC_RECIP_S
= FOP(21, FMT_S
),
10450 OPC_RSQRT_S
= FOP(22, FMT_S
),
10451 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10452 OPC_MADDF_S
= FOP(24, FMT_S
),
10453 OPC_MSUBF_S
= FOP(25, FMT_S
),
10454 OPC_RINT_S
= FOP(26, FMT_S
),
10455 OPC_CLASS_S
= FOP(27, FMT_S
),
10456 OPC_MIN_S
= FOP(28, FMT_S
),
10457 OPC_RECIP2_S
= FOP(28, FMT_S
),
10458 OPC_MINA_S
= FOP(29, FMT_S
),
10459 OPC_RECIP1_S
= FOP(29, FMT_S
),
10460 OPC_MAX_S
= FOP(30, FMT_S
),
10461 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10462 OPC_MAXA_S
= FOP(31, FMT_S
),
10463 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10464 OPC_CVT_D_S
= FOP(33, FMT_S
),
10465 OPC_CVT_W_S
= FOP(36, FMT_S
),
10466 OPC_CVT_L_S
= FOP(37, FMT_S
),
10467 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10468 OPC_CMP_F_S
= FOP (48, FMT_S
),
10469 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10470 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10471 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10472 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10473 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10474 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10475 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10476 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10477 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10478 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10479 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10480 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10481 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10482 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10483 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10485 OPC_ADD_D
= FOP(0, FMT_D
),
10486 OPC_SUB_D
= FOP(1, FMT_D
),
10487 OPC_MUL_D
= FOP(2, FMT_D
),
10488 OPC_DIV_D
= FOP(3, FMT_D
),
10489 OPC_SQRT_D
= FOP(4, FMT_D
),
10490 OPC_ABS_D
= FOP(5, FMT_D
),
10491 OPC_MOV_D
= FOP(6, FMT_D
),
10492 OPC_NEG_D
= FOP(7, FMT_D
),
10493 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10494 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10495 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10496 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10497 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10498 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10499 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10500 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10501 OPC_SEL_D
= FOP(16, FMT_D
),
10502 OPC_MOVCF_D
= FOP(17, FMT_D
),
10503 OPC_MOVZ_D
= FOP(18, FMT_D
),
10504 OPC_MOVN_D
= FOP(19, FMT_D
),
10505 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10506 OPC_RECIP_D
= FOP(21, FMT_D
),
10507 OPC_RSQRT_D
= FOP(22, FMT_D
),
10508 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10509 OPC_MADDF_D
= FOP(24, FMT_D
),
10510 OPC_MSUBF_D
= FOP(25, FMT_D
),
10511 OPC_RINT_D
= FOP(26, FMT_D
),
10512 OPC_CLASS_D
= FOP(27, FMT_D
),
10513 OPC_MIN_D
= FOP(28, FMT_D
),
10514 OPC_RECIP2_D
= FOP(28, FMT_D
),
10515 OPC_MINA_D
= FOP(29, FMT_D
),
10516 OPC_RECIP1_D
= FOP(29, FMT_D
),
10517 OPC_MAX_D
= FOP(30, FMT_D
),
10518 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10519 OPC_MAXA_D
= FOP(31, FMT_D
),
10520 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10521 OPC_CVT_S_D
= FOP(32, FMT_D
),
10522 OPC_CVT_W_D
= FOP(36, FMT_D
),
10523 OPC_CVT_L_D
= FOP(37, FMT_D
),
10524 OPC_CMP_F_D
= FOP (48, FMT_D
),
10525 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10526 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10527 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10528 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10529 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10530 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10531 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10532 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10533 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10534 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10535 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10536 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10537 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10538 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10539 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10541 OPC_CVT_S_W
= FOP(32, FMT_W
),
10542 OPC_CVT_D_W
= FOP(33, FMT_W
),
10543 OPC_CVT_S_L
= FOP(32, FMT_L
),
10544 OPC_CVT_D_L
= FOP(33, FMT_L
),
10545 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10547 OPC_ADD_PS
= FOP(0, FMT_PS
),
10548 OPC_SUB_PS
= FOP(1, FMT_PS
),
10549 OPC_MUL_PS
= FOP(2, FMT_PS
),
10550 OPC_DIV_PS
= FOP(3, FMT_PS
),
10551 OPC_ABS_PS
= FOP(5, FMT_PS
),
10552 OPC_MOV_PS
= FOP(6, FMT_PS
),
10553 OPC_NEG_PS
= FOP(7, FMT_PS
),
10554 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10555 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10556 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10557 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10558 OPC_MULR_PS
= FOP(26, FMT_PS
),
10559 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10560 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10561 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10562 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10564 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10565 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10566 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10567 OPC_PLL_PS
= FOP(44, FMT_PS
),
10568 OPC_PLU_PS
= FOP(45, FMT_PS
),
10569 OPC_PUL_PS
= FOP(46, FMT_PS
),
10570 OPC_PUU_PS
= FOP(47, FMT_PS
),
10571 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10572 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10573 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10574 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10575 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10576 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10577 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10578 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10579 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10580 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10581 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10582 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10583 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10584 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10585 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10586 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10590 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10591 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10592 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10593 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10594 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10595 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10596 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10597 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10598 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10599 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10600 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10601 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10602 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10603 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10604 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10605 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10606 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10607 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10608 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10609 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10610 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10611 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10613 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10614 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10615 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10616 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10617 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10618 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10619 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10620 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10621 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10622 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10623 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10624 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10625 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10626 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10627 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10628 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10629 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10630 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10631 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10632 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10633 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10634 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10636 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10638 TCGv t0
= tcg_temp_new();
10643 TCGv_i32 fp0
= tcg_temp_new_i32();
10645 gen_load_fpr32(ctx
, fp0
, fs
);
10646 tcg_gen_ext_i32_tl(t0
, fp0
);
10647 tcg_temp_free_i32(fp0
);
10649 gen_store_gpr(t0
, rt
);
10652 gen_load_gpr(t0
, rt
);
10654 TCGv_i32 fp0
= tcg_temp_new_i32();
10656 tcg_gen_trunc_tl_i32(fp0
, t0
);
10657 gen_store_fpr32(ctx
, fp0
, fs
);
10658 tcg_temp_free_i32(fp0
);
10662 gen_helper_1e0i(cfc1
, t0
, fs
);
10663 gen_store_gpr(t0
, rt
);
10666 gen_load_gpr(t0
, rt
);
10667 save_cpu_state(ctx
, 0);
10669 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10671 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10672 tcg_temp_free_i32(fs_tmp
);
10674 /* Stop translation as we may have changed hflags */
10675 ctx
->base
.is_jmp
= DISAS_STOP
;
10677 #if defined(TARGET_MIPS64)
10679 gen_load_fpr64(ctx
, t0
, fs
);
10680 gen_store_gpr(t0
, rt
);
10683 gen_load_gpr(t0
, rt
);
10684 gen_store_fpr64(ctx
, t0
, fs
);
10689 TCGv_i32 fp0
= tcg_temp_new_i32();
10691 gen_load_fpr32h(ctx
, fp0
, fs
);
10692 tcg_gen_ext_i32_tl(t0
, fp0
);
10693 tcg_temp_free_i32(fp0
);
10695 gen_store_gpr(t0
, rt
);
10698 gen_load_gpr(t0
, rt
);
10700 TCGv_i32 fp0
= tcg_temp_new_i32();
10702 tcg_gen_trunc_tl_i32(fp0
, t0
);
10703 gen_store_fpr32h(ctx
, fp0
, fs
);
10704 tcg_temp_free_i32(fp0
);
10708 MIPS_INVAL("cp1 move");
10709 generate_exception_end(ctx
, EXCP_RI
);
10717 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10724 /* Treat as NOP. */
10729 cond
= TCG_COND_EQ
;
10731 cond
= TCG_COND_NE
;
10733 l1
= gen_new_label();
10734 t0
= tcg_temp_new_i32();
10735 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10736 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10737 tcg_temp_free_i32(t0
);
10739 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10741 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10746 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10750 TCGv_i32 t0
= tcg_temp_new_i32();
10751 TCGLabel
*l1
= gen_new_label();
10754 cond
= TCG_COND_EQ
;
10756 cond
= TCG_COND_NE
;
10758 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10759 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10760 gen_load_fpr32(ctx
, t0
, fs
);
10761 gen_store_fpr32(ctx
, t0
, fd
);
10763 tcg_temp_free_i32(t0
);
10766 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10769 TCGv_i32 t0
= tcg_temp_new_i32();
10771 TCGLabel
*l1
= gen_new_label();
10774 cond
= TCG_COND_EQ
;
10776 cond
= TCG_COND_NE
;
10778 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10779 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10780 tcg_temp_free_i32(t0
);
10781 fp0
= tcg_temp_new_i64();
10782 gen_load_fpr64(ctx
, fp0
, fs
);
10783 gen_store_fpr64(ctx
, fp0
, fd
);
10784 tcg_temp_free_i64(fp0
);
10788 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10792 TCGv_i32 t0
= tcg_temp_new_i32();
10793 TCGLabel
*l1
= gen_new_label();
10794 TCGLabel
*l2
= gen_new_label();
10797 cond
= TCG_COND_EQ
;
10799 cond
= TCG_COND_NE
;
10801 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10802 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10803 gen_load_fpr32(ctx
, t0
, fs
);
10804 gen_store_fpr32(ctx
, t0
, fd
);
10807 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10808 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10809 gen_load_fpr32h(ctx
, t0
, fs
);
10810 gen_store_fpr32h(ctx
, t0
, fd
);
10811 tcg_temp_free_i32(t0
);
10815 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10818 TCGv_i32 t1
= tcg_const_i32(0);
10819 TCGv_i32 fp0
= tcg_temp_new_i32();
10820 TCGv_i32 fp1
= tcg_temp_new_i32();
10821 TCGv_i32 fp2
= tcg_temp_new_i32();
10822 gen_load_fpr32(ctx
, fp0
, fd
);
10823 gen_load_fpr32(ctx
, fp1
, ft
);
10824 gen_load_fpr32(ctx
, fp2
, fs
);
10828 tcg_gen_andi_i32(fp0
, fp0
, 1);
10829 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10832 tcg_gen_andi_i32(fp1
, fp1
, 1);
10833 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10836 tcg_gen_andi_i32(fp1
, fp1
, 1);
10837 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10840 MIPS_INVAL("gen_sel_s");
10841 generate_exception_end(ctx
, EXCP_RI
);
10845 gen_store_fpr32(ctx
, fp0
, fd
);
10846 tcg_temp_free_i32(fp2
);
10847 tcg_temp_free_i32(fp1
);
10848 tcg_temp_free_i32(fp0
);
10849 tcg_temp_free_i32(t1
);
10852 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10855 TCGv_i64 t1
= tcg_const_i64(0);
10856 TCGv_i64 fp0
= tcg_temp_new_i64();
10857 TCGv_i64 fp1
= tcg_temp_new_i64();
10858 TCGv_i64 fp2
= tcg_temp_new_i64();
10859 gen_load_fpr64(ctx
, fp0
, fd
);
10860 gen_load_fpr64(ctx
, fp1
, ft
);
10861 gen_load_fpr64(ctx
, fp2
, fs
);
10865 tcg_gen_andi_i64(fp0
, fp0
, 1);
10866 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10869 tcg_gen_andi_i64(fp1
, fp1
, 1);
10870 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10873 tcg_gen_andi_i64(fp1
, fp1
, 1);
10874 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10877 MIPS_INVAL("gen_sel_d");
10878 generate_exception_end(ctx
, EXCP_RI
);
10882 gen_store_fpr64(ctx
, fp0
, fd
);
10883 tcg_temp_free_i64(fp2
);
10884 tcg_temp_free_i64(fp1
);
10885 tcg_temp_free_i64(fp0
);
10886 tcg_temp_free_i64(t1
);
10889 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10890 int ft
, int fs
, int fd
, int cc
)
10892 uint32_t func
= ctx
->opcode
& 0x3f;
10896 TCGv_i32 fp0
= tcg_temp_new_i32();
10897 TCGv_i32 fp1
= tcg_temp_new_i32();
10899 gen_load_fpr32(ctx
, fp0
, fs
);
10900 gen_load_fpr32(ctx
, fp1
, ft
);
10901 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10902 tcg_temp_free_i32(fp1
);
10903 gen_store_fpr32(ctx
, fp0
, fd
);
10904 tcg_temp_free_i32(fp0
);
10909 TCGv_i32 fp0
= tcg_temp_new_i32();
10910 TCGv_i32 fp1
= tcg_temp_new_i32();
10912 gen_load_fpr32(ctx
, fp0
, fs
);
10913 gen_load_fpr32(ctx
, fp1
, ft
);
10914 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10915 tcg_temp_free_i32(fp1
);
10916 gen_store_fpr32(ctx
, fp0
, fd
);
10917 tcg_temp_free_i32(fp0
);
10922 TCGv_i32 fp0
= tcg_temp_new_i32();
10923 TCGv_i32 fp1
= tcg_temp_new_i32();
10925 gen_load_fpr32(ctx
, fp0
, fs
);
10926 gen_load_fpr32(ctx
, fp1
, ft
);
10927 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10928 tcg_temp_free_i32(fp1
);
10929 gen_store_fpr32(ctx
, fp0
, fd
);
10930 tcg_temp_free_i32(fp0
);
10935 TCGv_i32 fp0
= tcg_temp_new_i32();
10936 TCGv_i32 fp1
= tcg_temp_new_i32();
10938 gen_load_fpr32(ctx
, fp0
, fs
);
10939 gen_load_fpr32(ctx
, fp1
, ft
);
10940 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10941 tcg_temp_free_i32(fp1
);
10942 gen_store_fpr32(ctx
, fp0
, fd
);
10943 tcg_temp_free_i32(fp0
);
10948 TCGv_i32 fp0
= tcg_temp_new_i32();
10950 gen_load_fpr32(ctx
, fp0
, fs
);
10951 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10952 gen_store_fpr32(ctx
, fp0
, fd
);
10953 tcg_temp_free_i32(fp0
);
10958 TCGv_i32 fp0
= tcg_temp_new_i32();
10960 gen_load_fpr32(ctx
, fp0
, fs
);
10961 if (ctx
->abs2008
) {
10962 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10964 gen_helper_float_abs_s(fp0
, fp0
);
10966 gen_store_fpr32(ctx
, fp0
, fd
);
10967 tcg_temp_free_i32(fp0
);
10972 TCGv_i32 fp0
= tcg_temp_new_i32();
10974 gen_load_fpr32(ctx
, fp0
, fs
);
10975 gen_store_fpr32(ctx
, fp0
, fd
);
10976 tcg_temp_free_i32(fp0
);
10981 TCGv_i32 fp0
= tcg_temp_new_i32();
10983 gen_load_fpr32(ctx
, fp0
, fs
);
10984 if (ctx
->abs2008
) {
10985 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10987 gen_helper_float_chs_s(fp0
, fp0
);
10989 gen_store_fpr32(ctx
, fp0
, fd
);
10990 tcg_temp_free_i32(fp0
);
10993 case OPC_ROUND_L_S
:
10994 check_cp1_64bitmode(ctx
);
10996 TCGv_i32 fp32
= tcg_temp_new_i32();
10997 TCGv_i64 fp64
= tcg_temp_new_i64();
10999 gen_load_fpr32(ctx
, fp32
, fs
);
11000 if (ctx
->nan2008
) {
11001 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11003 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11005 tcg_temp_free_i32(fp32
);
11006 gen_store_fpr64(ctx
, fp64
, fd
);
11007 tcg_temp_free_i64(fp64
);
11010 case OPC_TRUNC_L_S
:
11011 check_cp1_64bitmode(ctx
);
11013 TCGv_i32 fp32
= tcg_temp_new_i32();
11014 TCGv_i64 fp64
= tcg_temp_new_i64();
11016 gen_load_fpr32(ctx
, fp32
, fs
);
11017 if (ctx
->nan2008
) {
11018 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11020 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11022 tcg_temp_free_i32(fp32
);
11023 gen_store_fpr64(ctx
, fp64
, fd
);
11024 tcg_temp_free_i64(fp64
);
11028 check_cp1_64bitmode(ctx
);
11030 TCGv_i32 fp32
= tcg_temp_new_i32();
11031 TCGv_i64 fp64
= tcg_temp_new_i64();
11033 gen_load_fpr32(ctx
, fp32
, fs
);
11034 if (ctx
->nan2008
) {
11035 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11037 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11039 tcg_temp_free_i32(fp32
);
11040 gen_store_fpr64(ctx
, fp64
, fd
);
11041 tcg_temp_free_i64(fp64
);
11044 case OPC_FLOOR_L_S
:
11045 check_cp1_64bitmode(ctx
);
11047 TCGv_i32 fp32
= tcg_temp_new_i32();
11048 TCGv_i64 fp64
= tcg_temp_new_i64();
11050 gen_load_fpr32(ctx
, fp32
, fs
);
11051 if (ctx
->nan2008
) {
11052 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11054 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11056 tcg_temp_free_i32(fp32
);
11057 gen_store_fpr64(ctx
, fp64
, fd
);
11058 tcg_temp_free_i64(fp64
);
11061 case OPC_ROUND_W_S
:
11063 TCGv_i32 fp0
= tcg_temp_new_i32();
11065 gen_load_fpr32(ctx
, fp0
, fs
);
11066 if (ctx
->nan2008
) {
11067 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11069 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11071 gen_store_fpr32(ctx
, fp0
, fd
);
11072 tcg_temp_free_i32(fp0
);
11075 case OPC_TRUNC_W_S
:
11077 TCGv_i32 fp0
= tcg_temp_new_i32();
11079 gen_load_fpr32(ctx
, fp0
, fs
);
11080 if (ctx
->nan2008
) {
11081 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11083 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11085 gen_store_fpr32(ctx
, fp0
, fd
);
11086 tcg_temp_free_i32(fp0
);
11091 TCGv_i32 fp0
= tcg_temp_new_i32();
11093 gen_load_fpr32(ctx
, fp0
, fs
);
11094 if (ctx
->nan2008
) {
11095 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11097 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11099 gen_store_fpr32(ctx
, fp0
, fd
);
11100 tcg_temp_free_i32(fp0
);
11103 case OPC_FLOOR_W_S
:
11105 TCGv_i32 fp0
= tcg_temp_new_i32();
11107 gen_load_fpr32(ctx
, fp0
, fs
);
11108 if (ctx
->nan2008
) {
11109 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11111 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11113 gen_store_fpr32(ctx
, fp0
, fd
);
11114 tcg_temp_free_i32(fp0
);
11118 check_insn(ctx
, ISA_MIPS32R6
);
11119 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11122 check_insn(ctx
, ISA_MIPS32R6
);
11123 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11126 check_insn(ctx
, ISA_MIPS32R6
);
11127 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11130 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11131 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11136 TCGLabel
*l1
= gen_new_label();
11140 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11142 fp0
= tcg_temp_new_i32();
11143 gen_load_fpr32(ctx
, fp0
, fs
);
11144 gen_store_fpr32(ctx
, fp0
, fd
);
11145 tcg_temp_free_i32(fp0
);
11150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11152 TCGLabel
*l1
= gen_new_label();
11156 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11157 fp0
= tcg_temp_new_i32();
11158 gen_load_fpr32(ctx
, fp0
, fs
);
11159 gen_store_fpr32(ctx
, fp0
, fd
);
11160 tcg_temp_free_i32(fp0
);
11167 TCGv_i32 fp0
= tcg_temp_new_i32();
11169 gen_load_fpr32(ctx
, fp0
, fs
);
11170 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11171 gen_store_fpr32(ctx
, fp0
, fd
);
11172 tcg_temp_free_i32(fp0
);
11177 TCGv_i32 fp0
= tcg_temp_new_i32();
11179 gen_load_fpr32(ctx
, fp0
, fs
);
11180 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11181 gen_store_fpr32(ctx
, fp0
, fd
);
11182 tcg_temp_free_i32(fp0
);
11186 check_insn(ctx
, ISA_MIPS32R6
);
11188 TCGv_i32 fp0
= tcg_temp_new_i32();
11189 TCGv_i32 fp1
= tcg_temp_new_i32();
11190 TCGv_i32 fp2
= tcg_temp_new_i32();
11191 gen_load_fpr32(ctx
, fp0
, fs
);
11192 gen_load_fpr32(ctx
, fp1
, ft
);
11193 gen_load_fpr32(ctx
, fp2
, fd
);
11194 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11195 gen_store_fpr32(ctx
, fp2
, fd
);
11196 tcg_temp_free_i32(fp2
);
11197 tcg_temp_free_i32(fp1
);
11198 tcg_temp_free_i32(fp0
);
11202 check_insn(ctx
, ISA_MIPS32R6
);
11204 TCGv_i32 fp0
= tcg_temp_new_i32();
11205 TCGv_i32 fp1
= tcg_temp_new_i32();
11206 TCGv_i32 fp2
= tcg_temp_new_i32();
11207 gen_load_fpr32(ctx
, fp0
, fs
);
11208 gen_load_fpr32(ctx
, fp1
, ft
);
11209 gen_load_fpr32(ctx
, fp2
, fd
);
11210 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11211 gen_store_fpr32(ctx
, fp2
, fd
);
11212 tcg_temp_free_i32(fp2
);
11213 tcg_temp_free_i32(fp1
);
11214 tcg_temp_free_i32(fp0
);
11218 check_insn(ctx
, ISA_MIPS32R6
);
11220 TCGv_i32 fp0
= tcg_temp_new_i32();
11221 gen_load_fpr32(ctx
, fp0
, fs
);
11222 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11223 gen_store_fpr32(ctx
, fp0
, fd
);
11224 tcg_temp_free_i32(fp0
);
11228 check_insn(ctx
, ISA_MIPS32R6
);
11230 TCGv_i32 fp0
= tcg_temp_new_i32();
11231 gen_load_fpr32(ctx
, fp0
, fs
);
11232 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11233 gen_store_fpr32(ctx
, fp0
, fd
);
11234 tcg_temp_free_i32(fp0
);
11237 case OPC_MIN_S
: /* OPC_RECIP2_S */
11238 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11240 TCGv_i32 fp0
= tcg_temp_new_i32();
11241 TCGv_i32 fp1
= tcg_temp_new_i32();
11242 TCGv_i32 fp2
= tcg_temp_new_i32();
11243 gen_load_fpr32(ctx
, fp0
, fs
);
11244 gen_load_fpr32(ctx
, fp1
, ft
);
11245 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11246 gen_store_fpr32(ctx
, fp2
, fd
);
11247 tcg_temp_free_i32(fp2
);
11248 tcg_temp_free_i32(fp1
);
11249 tcg_temp_free_i32(fp0
);
11252 check_cp1_64bitmode(ctx
);
11254 TCGv_i32 fp0
= tcg_temp_new_i32();
11255 TCGv_i32 fp1
= tcg_temp_new_i32();
11257 gen_load_fpr32(ctx
, fp0
, fs
);
11258 gen_load_fpr32(ctx
, fp1
, ft
);
11259 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11260 tcg_temp_free_i32(fp1
);
11261 gen_store_fpr32(ctx
, fp0
, fd
);
11262 tcg_temp_free_i32(fp0
);
11266 case OPC_MINA_S
: /* OPC_RECIP1_S */
11267 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11269 TCGv_i32 fp0
= tcg_temp_new_i32();
11270 TCGv_i32 fp1
= tcg_temp_new_i32();
11271 TCGv_i32 fp2
= tcg_temp_new_i32();
11272 gen_load_fpr32(ctx
, fp0
, fs
);
11273 gen_load_fpr32(ctx
, fp1
, ft
);
11274 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11275 gen_store_fpr32(ctx
, fp2
, fd
);
11276 tcg_temp_free_i32(fp2
);
11277 tcg_temp_free_i32(fp1
);
11278 tcg_temp_free_i32(fp0
);
11281 check_cp1_64bitmode(ctx
);
11283 TCGv_i32 fp0
= tcg_temp_new_i32();
11285 gen_load_fpr32(ctx
, fp0
, fs
);
11286 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11287 gen_store_fpr32(ctx
, fp0
, fd
);
11288 tcg_temp_free_i32(fp0
);
11292 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11293 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11295 TCGv_i32 fp0
= tcg_temp_new_i32();
11296 TCGv_i32 fp1
= tcg_temp_new_i32();
11297 gen_load_fpr32(ctx
, fp0
, fs
);
11298 gen_load_fpr32(ctx
, fp1
, ft
);
11299 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11300 gen_store_fpr32(ctx
, fp1
, fd
);
11301 tcg_temp_free_i32(fp1
);
11302 tcg_temp_free_i32(fp0
);
11305 check_cp1_64bitmode(ctx
);
11307 TCGv_i32 fp0
= tcg_temp_new_i32();
11309 gen_load_fpr32(ctx
, fp0
, fs
);
11310 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11311 gen_store_fpr32(ctx
, fp0
, fd
);
11312 tcg_temp_free_i32(fp0
);
11316 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11317 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11319 TCGv_i32 fp0
= tcg_temp_new_i32();
11320 TCGv_i32 fp1
= tcg_temp_new_i32();
11321 gen_load_fpr32(ctx
, fp0
, fs
);
11322 gen_load_fpr32(ctx
, fp1
, ft
);
11323 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11324 gen_store_fpr32(ctx
, fp1
, fd
);
11325 tcg_temp_free_i32(fp1
);
11326 tcg_temp_free_i32(fp0
);
11329 check_cp1_64bitmode(ctx
);
11331 TCGv_i32 fp0
= tcg_temp_new_i32();
11332 TCGv_i32 fp1
= tcg_temp_new_i32();
11334 gen_load_fpr32(ctx
, fp0
, fs
);
11335 gen_load_fpr32(ctx
, fp1
, ft
);
11336 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11337 tcg_temp_free_i32(fp1
);
11338 gen_store_fpr32(ctx
, fp0
, fd
);
11339 tcg_temp_free_i32(fp0
);
11344 check_cp1_registers(ctx
, fd
);
11346 TCGv_i32 fp32
= tcg_temp_new_i32();
11347 TCGv_i64 fp64
= tcg_temp_new_i64();
11349 gen_load_fpr32(ctx
, fp32
, fs
);
11350 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11351 tcg_temp_free_i32(fp32
);
11352 gen_store_fpr64(ctx
, fp64
, fd
);
11353 tcg_temp_free_i64(fp64
);
11358 TCGv_i32 fp0
= tcg_temp_new_i32();
11360 gen_load_fpr32(ctx
, fp0
, fs
);
11361 if (ctx
->nan2008
) {
11362 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11364 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11366 gen_store_fpr32(ctx
, fp0
, fd
);
11367 tcg_temp_free_i32(fp0
);
11371 check_cp1_64bitmode(ctx
);
11373 TCGv_i32 fp32
= tcg_temp_new_i32();
11374 TCGv_i64 fp64
= tcg_temp_new_i64();
11376 gen_load_fpr32(ctx
, fp32
, fs
);
11377 if (ctx
->nan2008
) {
11378 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11380 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11382 tcg_temp_free_i32(fp32
);
11383 gen_store_fpr64(ctx
, fp64
, fd
);
11384 tcg_temp_free_i64(fp64
);
11390 TCGv_i64 fp64
= tcg_temp_new_i64();
11391 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11392 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11394 gen_load_fpr32(ctx
, fp32_0
, fs
);
11395 gen_load_fpr32(ctx
, fp32_1
, ft
);
11396 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11397 tcg_temp_free_i32(fp32_1
);
11398 tcg_temp_free_i32(fp32_0
);
11399 gen_store_fpr64(ctx
, fp64
, fd
);
11400 tcg_temp_free_i64(fp64
);
11406 case OPC_CMP_UEQ_S
:
11407 case OPC_CMP_OLT_S
:
11408 case OPC_CMP_ULT_S
:
11409 case OPC_CMP_OLE_S
:
11410 case OPC_CMP_ULE_S
:
11412 case OPC_CMP_NGLE_S
:
11413 case OPC_CMP_SEQ_S
:
11414 case OPC_CMP_NGL_S
:
11416 case OPC_CMP_NGE_S
:
11418 case OPC_CMP_NGT_S
:
11419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11420 if (ctx
->opcode
& (1 << 6)) {
11421 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11423 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11427 check_cp1_registers(ctx
, fs
| ft
| fd
);
11429 TCGv_i64 fp0
= tcg_temp_new_i64();
11430 TCGv_i64 fp1
= tcg_temp_new_i64();
11432 gen_load_fpr64(ctx
, fp0
, fs
);
11433 gen_load_fpr64(ctx
, fp1
, ft
);
11434 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11435 tcg_temp_free_i64(fp1
);
11436 gen_store_fpr64(ctx
, fp0
, fd
);
11437 tcg_temp_free_i64(fp0
);
11441 check_cp1_registers(ctx
, fs
| ft
| fd
);
11443 TCGv_i64 fp0
= tcg_temp_new_i64();
11444 TCGv_i64 fp1
= tcg_temp_new_i64();
11446 gen_load_fpr64(ctx
, fp0
, fs
);
11447 gen_load_fpr64(ctx
, fp1
, ft
);
11448 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11449 tcg_temp_free_i64(fp1
);
11450 gen_store_fpr64(ctx
, fp0
, fd
);
11451 tcg_temp_free_i64(fp0
);
11455 check_cp1_registers(ctx
, fs
| ft
| fd
);
11457 TCGv_i64 fp0
= tcg_temp_new_i64();
11458 TCGv_i64 fp1
= tcg_temp_new_i64();
11460 gen_load_fpr64(ctx
, fp0
, fs
);
11461 gen_load_fpr64(ctx
, fp1
, ft
);
11462 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11463 tcg_temp_free_i64(fp1
);
11464 gen_store_fpr64(ctx
, fp0
, fd
);
11465 tcg_temp_free_i64(fp0
);
11469 check_cp1_registers(ctx
, fs
| ft
| fd
);
11471 TCGv_i64 fp0
= tcg_temp_new_i64();
11472 TCGv_i64 fp1
= tcg_temp_new_i64();
11474 gen_load_fpr64(ctx
, fp0
, fs
);
11475 gen_load_fpr64(ctx
, fp1
, ft
);
11476 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11477 tcg_temp_free_i64(fp1
);
11478 gen_store_fpr64(ctx
, fp0
, fd
);
11479 tcg_temp_free_i64(fp0
);
11483 check_cp1_registers(ctx
, fs
| fd
);
11485 TCGv_i64 fp0
= tcg_temp_new_i64();
11487 gen_load_fpr64(ctx
, fp0
, fs
);
11488 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11489 gen_store_fpr64(ctx
, fp0
, fd
);
11490 tcg_temp_free_i64(fp0
);
11494 check_cp1_registers(ctx
, fs
| fd
);
11496 TCGv_i64 fp0
= tcg_temp_new_i64();
11498 gen_load_fpr64(ctx
, fp0
, fs
);
11499 if (ctx
->abs2008
) {
11500 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11502 gen_helper_float_abs_d(fp0
, fp0
);
11504 gen_store_fpr64(ctx
, fp0
, fd
);
11505 tcg_temp_free_i64(fp0
);
11509 check_cp1_registers(ctx
, fs
| fd
);
11511 TCGv_i64 fp0
= tcg_temp_new_i64();
11513 gen_load_fpr64(ctx
, fp0
, fs
);
11514 gen_store_fpr64(ctx
, fp0
, fd
);
11515 tcg_temp_free_i64(fp0
);
11519 check_cp1_registers(ctx
, fs
| fd
);
11521 TCGv_i64 fp0
= tcg_temp_new_i64();
11523 gen_load_fpr64(ctx
, fp0
, fs
);
11524 if (ctx
->abs2008
) {
11525 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11527 gen_helper_float_chs_d(fp0
, fp0
);
11529 gen_store_fpr64(ctx
, fp0
, fd
);
11530 tcg_temp_free_i64(fp0
);
11533 case OPC_ROUND_L_D
:
11534 check_cp1_64bitmode(ctx
);
11536 TCGv_i64 fp0
= tcg_temp_new_i64();
11538 gen_load_fpr64(ctx
, fp0
, fs
);
11539 if (ctx
->nan2008
) {
11540 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11542 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11544 gen_store_fpr64(ctx
, fp0
, fd
);
11545 tcg_temp_free_i64(fp0
);
11548 case OPC_TRUNC_L_D
:
11549 check_cp1_64bitmode(ctx
);
11551 TCGv_i64 fp0
= tcg_temp_new_i64();
11553 gen_load_fpr64(ctx
, fp0
, fs
);
11554 if (ctx
->nan2008
) {
11555 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11557 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11559 gen_store_fpr64(ctx
, fp0
, fd
);
11560 tcg_temp_free_i64(fp0
);
11564 check_cp1_64bitmode(ctx
);
11566 TCGv_i64 fp0
= tcg_temp_new_i64();
11568 gen_load_fpr64(ctx
, fp0
, fs
);
11569 if (ctx
->nan2008
) {
11570 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11572 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11574 gen_store_fpr64(ctx
, fp0
, fd
);
11575 tcg_temp_free_i64(fp0
);
11578 case OPC_FLOOR_L_D
:
11579 check_cp1_64bitmode(ctx
);
11581 TCGv_i64 fp0
= tcg_temp_new_i64();
11583 gen_load_fpr64(ctx
, fp0
, fs
);
11584 if (ctx
->nan2008
) {
11585 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11587 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11589 gen_store_fpr64(ctx
, fp0
, fd
);
11590 tcg_temp_free_i64(fp0
);
11593 case OPC_ROUND_W_D
:
11594 check_cp1_registers(ctx
, fs
);
11596 TCGv_i32 fp32
= tcg_temp_new_i32();
11597 TCGv_i64 fp64
= tcg_temp_new_i64();
11599 gen_load_fpr64(ctx
, fp64
, fs
);
11600 if (ctx
->nan2008
) {
11601 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11603 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11605 tcg_temp_free_i64(fp64
);
11606 gen_store_fpr32(ctx
, fp32
, fd
);
11607 tcg_temp_free_i32(fp32
);
11610 case OPC_TRUNC_W_D
:
11611 check_cp1_registers(ctx
, fs
);
11613 TCGv_i32 fp32
= tcg_temp_new_i32();
11614 TCGv_i64 fp64
= tcg_temp_new_i64();
11616 gen_load_fpr64(ctx
, fp64
, fs
);
11617 if (ctx
->nan2008
) {
11618 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11620 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11622 tcg_temp_free_i64(fp64
);
11623 gen_store_fpr32(ctx
, fp32
, fd
);
11624 tcg_temp_free_i32(fp32
);
11628 check_cp1_registers(ctx
, fs
);
11630 TCGv_i32 fp32
= tcg_temp_new_i32();
11631 TCGv_i64 fp64
= tcg_temp_new_i64();
11633 gen_load_fpr64(ctx
, fp64
, fs
);
11634 if (ctx
->nan2008
) {
11635 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11637 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11639 tcg_temp_free_i64(fp64
);
11640 gen_store_fpr32(ctx
, fp32
, fd
);
11641 tcg_temp_free_i32(fp32
);
11644 case OPC_FLOOR_W_D
:
11645 check_cp1_registers(ctx
, fs
);
11647 TCGv_i32 fp32
= tcg_temp_new_i32();
11648 TCGv_i64 fp64
= tcg_temp_new_i64();
11650 gen_load_fpr64(ctx
, fp64
, fs
);
11651 if (ctx
->nan2008
) {
11652 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11654 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11656 tcg_temp_free_i64(fp64
);
11657 gen_store_fpr32(ctx
, fp32
, fd
);
11658 tcg_temp_free_i32(fp32
);
11662 check_insn(ctx
, ISA_MIPS32R6
);
11663 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11666 check_insn(ctx
, ISA_MIPS32R6
);
11667 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11670 check_insn(ctx
, ISA_MIPS32R6
);
11671 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11674 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11675 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11678 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11680 TCGLabel
*l1
= gen_new_label();
11684 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11686 fp0
= tcg_temp_new_i64();
11687 gen_load_fpr64(ctx
, fp0
, fs
);
11688 gen_store_fpr64(ctx
, fp0
, fd
);
11689 tcg_temp_free_i64(fp0
);
11694 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11696 TCGLabel
*l1
= gen_new_label();
11700 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11701 fp0
= tcg_temp_new_i64();
11702 gen_load_fpr64(ctx
, fp0
, fs
);
11703 gen_store_fpr64(ctx
, fp0
, fd
);
11704 tcg_temp_free_i64(fp0
);
11710 check_cp1_registers(ctx
, fs
| fd
);
11712 TCGv_i64 fp0
= tcg_temp_new_i64();
11714 gen_load_fpr64(ctx
, fp0
, fs
);
11715 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11716 gen_store_fpr64(ctx
, fp0
, fd
);
11717 tcg_temp_free_i64(fp0
);
11721 check_cp1_registers(ctx
, fs
| fd
);
11723 TCGv_i64 fp0
= tcg_temp_new_i64();
11725 gen_load_fpr64(ctx
, fp0
, fs
);
11726 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11727 gen_store_fpr64(ctx
, fp0
, fd
);
11728 tcg_temp_free_i64(fp0
);
11732 check_insn(ctx
, ISA_MIPS32R6
);
11734 TCGv_i64 fp0
= tcg_temp_new_i64();
11735 TCGv_i64 fp1
= tcg_temp_new_i64();
11736 TCGv_i64 fp2
= tcg_temp_new_i64();
11737 gen_load_fpr64(ctx
, fp0
, fs
);
11738 gen_load_fpr64(ctx
, fp1
, ft
);
11739 gen_load_fpr64(ctx
, fp2
, fd
);
11740 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11741 gen_store_fpr64(ctx
, fp2
, fd
);
11742 tcg_temp_free_i64(fp2
);
11743 tcg_temp_free_i64(fp1
);
11744 tcg_temp_free_i64(fp0
);
11748 check_insn(ctx
, ISA_MIPS32R6
);
11750 TCGv_i64 fp0
= tcg_temp_new_i64();
11751 TCGv_i64 fp1
= tcg_temp_new_i64();
11752 TCGv_i64 fp2
= tcg_temp_new_i64();
11753 gen_load_fpr64(ctx
, fp0
, fs
);
11754 gen_load_fpr64(ctx
, fp1
, ft
);
11755 gen_load_fpr64(ctx
, fp2
, fd
);
11756 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11757 gen_store_fpr64(ctx
, fp2
, fd
);
11758 tcg_temp_free_i64(fp2
);
11759 tcg_temp_free_i64(fp1
);
11760 tcg_temp_free_i64(fp0
);
11764 check_insn(ctx
, ISA_MIPS32R6
);
11766 TCGv_i64 fp0
= tcg_temp_new_i64();
11767 gen_load_fpr64(ctx
, fp0
, fs
);
11768 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11769 gen_store_fpr64(ctx
, fp0
, fd
);
11770 tcg_temp_free_i64(fp0
);
11774 check_insn(ctx
, ISA_MIPS32R6
);
11776 TCGv_i64 fp0
= tcg_temp_new_i64();
11777 gen_load_fpr64(ctx
, fp0
, fs
);
11778 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11779 gen_store_fpr64(ctx
, fp0
, fd
);
11780 tcg_temp_free_i64(fp0
);
11783 case OPC_MIN_D
: /* OPC_RECIP2_D */
11784 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11786 TCGv_i64 fp0
= tcg_temp_new_i64();
11787 TCGv_i64 fp1
= tcg_temp_new_i64();
11788 gen_load_fpr64(ctx
, fp0
, fs
);
11789 gen_load_fpr64(ctx
, fp1
, ft
);
11790 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11791 gen_store_fpr64(ctx
, fp1
, fd
);
11792 tcg_temp_free_i64(fp1
);
11793 tcg_temp_free_i64(fp0
);
11796 check_cp1_64bitmode(ctx
);
11798 TCGv_i64 fp0
= tcg_temp_new_i64();
11799 TCGv_i64 fp1
= tcg_temp_new_i64();
11801 gen_load_fpr64(ctx
, fp0
, fs
);
11802 gen_load_fpr64(ctx
, fp1
, ft
);
11803 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11804 tcg_temp_free_i64(fp1
);
11805 gen_store_fpr64(ctx
, fp0
, fd
);
11806 tcg_temp_free_i64(fp0
);
11810 case OPC_MINA_D
: /* OPC_RECIP1_D */
11811 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11813 TCGv_i64 fp0
= tcg_temp_new_i64();
11814 TCGv_i64 fp1
= tcg_temp_new_i64();
11815 gen_load_fpr64(ctx
, fp0
, fs
);
11816 gen_load_fpr64(ctx
, fp1
, ft
);
11817 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11818 gen_store_fpr64(ctx
, fp1
, fd
);
11819 tcg_temp_free_i64(fp1
);
11820 tcg_temp_free_i64(fp0
);
11823 check_cp1_64bitmode(ctx
);
11825 TCGv_i64 fp0
= tcg_temp_new_i64();
11827 gen_load_fpr64(ctx
, fp0
, fs
);
11828 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11829 gen_store_fpr64(ctx
, fp0
, fd
);
11830 tcg_temp_free_i64(fp0
);
11834 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11835 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11837 TCGv_i64 fp0
= tcg_temp_new_i64();
11838 TCGv_i64 fp1
= tcg_temp_new_i64();
11839 gen_load_fpr64(ctx
, fp0
, fs
);
11840 gen_load_fpr64(ctx
, fp1
, ft
);
11841 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11842 gen_store_fpr64(ctx
, fp1
, fd
);
11843 tcg_temp_free_i64(fp1
);
11844 tcg_temp_free_i64(fp0
);
11847 check_cp1_64bitmode(ctx
);
11849 TCGv_i64 fp0
= tcg_temp_new_i64();
11851 gen_load_fpr64(ctx
, fp0
, fs
);
11852 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11853 gen_store_fpr64(ctx
, fp0
, fd
);
11854 tcg_temp_free_i64(fp0
);
11858 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11859 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11861 TCGv_i64 fp0
= tcg_temp_new_i64();
11862 TCGv_i64 fp1
= tcg_temp_new_i64();
11863 gen_load_fpr64(ctx
, fp0
, fs
);
11864 gen_load_fpr64(ctx
, fp1
, ft
);
11865 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11866 gen_store_fpr64(ctx
, fp1
, fd
);
11867 tcg_temp_free_i64(fp1
);
11868 tcg_temp_free_i64(fp0
);
11871 check_cp1_64bitmode(ctx
);
11873 TCGv_i64 fp0
= tcg_temp_new_i64();
11874 TCGv_i64 fp1
= tcg_temp_new_i64();
11876 gen_load_fpr64(ctx
, fp0
, fs
);
11877 gen_load_fpr64(ctx
, fp1
, ft
);
11878 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11879 tcg_temp_free_i64(fp1
);
11880 gen_store_fpr64(ctx
, fp0
, fd
);
11881 tcg_temp_free_i64(fp0
);
11888 case OPC_CMP_UEQ_D
:
11889 case OPC_CMP_OLT_D
:
11890 case OPC_CMP_ULT_D
:
11891 case OPC_CMP_OLE_D
:
11892 case OPC_CMP_ULE_D
:
11894 case OPC_CMP_NGLE_D
:
11895 case OPC_CMP_SEQ_D
:
11896 case OPC_CMP_NGL_D
:
11898 case OPC_CMP_NGE_D
:
11900 case OPC_CMP_NGT_D
:
11901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11902 if (ctx
->opcode
& (1 << 6)) {
11903 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11905 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11909 check_cp1_registers(ctx
, fs
);
11911 TCGv_i32 fp32
= tcg_temp_new_i32();
11912 TCGv_i64 fp64
= tcg_temp_new_i64();
11914 gen_load_fpr64(ctx
, fp64
, fs
);
11915 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11916 tcg_temp_free_i64(fp64
);
11917 gen_store_fpr32(ctx
, fp32
, fd
);
11918 tcg_temp_free_i32(fp32
);
11922 check_cp1_registers(ctx
, fs
);
11924 TCGv_i32 fp32
= tcg_temp_new_i32();
11925 TCGv_i64 fp64
= tcg_temp_new_i64();
11927 gen_load_fpr64(ctx
, fp64
, fs
);
11928 if (ctx
->nan2008
) {
11929 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11931 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11933 tcg_temp_free_i64(fp64
);
11934 gen_store_fpr32(ctx
, fp32
, fd
);
11935 tcg_temp_free_i32(fp32
);
11939 check_cp1_64bitmode(ctx
);
11941 TCGv_i64 fp0
= tcg_temp_new_i64();
11943 gen_load_fpr64(ctx
, fp0
, fs
);
11944 if (ctx
->nan2008
) {
11945 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11947 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11949 gen_store_fpr64(ctx
, fp0
, fd
);
11950 tcg_temp_free_i64(fp0
);
11955 TCGv_i32 fp0
= tcg_temp_new_i32();
11957 gen_load_fpr32(ctx
, fp0
, fs
);
11958 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11959 gen_store_fpr32(ctx
, fp0
, fd
);
11960 tcg_temp_free_i32(fp0
);
11964 check_cp1_registers(ctx
, fd
);
11966 TCGv_i32 fp32
= tcg_temp_new_i32();
11967 TCGv_i64 fp64
= tcg_temp_new_i64();
11969 gen_load_fpr32(ctx
, fp32
, fs
);
11970 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11971 tcg_temp_free_i32(fp32
);
11972 gen_store_fpr64(ctx
, fp64
, fd
);
11973 tcg_temp_free_i64(fp64
);
11977 check_cp1_64bitmode(ctx
);
11979 TCGv_i32 fp32
= tcg_temp_new_i32();
11980 TCGv_i64 fp64
= tcg_temp_new_i64();
11982 gen_load_fpr64(ctx
, fp64
, fs
);
11983 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11984 tcg_temp_free_i64(fp64
);
11985 gen_store_fpr32(ctx
, fp32
, fd
);
11986 tcg_temp_free_i32(fp32
);
11990 check_cp1_64bitmode(ctx
);
11992 TCGv_i64 fp0
= tcg_temp_new_i64();
11994 gen_load_fpr64(ctx
, fp0
, fs
);
11995 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11996 gen_store_fpr64(ctx
, fp0
, fd
);
11997 tcg_temp_free_i64(fp0
);
12000 case OPC_CVT_PS_PW
:
12003 TCGv_i64 fp0
= tcg_temp_new_i64();
12005 gen_load_fpr64(ctx
, fp0
, fs
);
12006 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12007 gen_store_fpr64(ctx
, fp0
, fd
);
12008 tcg_temp_free_i64(fp0
);
12014 TCGv_i64 fp0
= tcg_temp_new_i64();
12015 TCGv_i64 fp1
= tcg_temp_new_i64();
12017 gen_load_fpr64(ctx
, fp0
, fs
);
12018 gen_load_fpr64(ctx
, fp1
, ft
);
12019 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12020 tcg_temp_free_i64(fp1
);
12021 gen_store_fpr64(ctx
, fp0
, fd
);
12022 tcg_temp_free_i64(fp0
);
12028 TCGv_i64 fp0
= tcg_temp_new_i64();
12029 TCGv_i64 fp1
= tcg_temp_new_i64();
12031 gen_load_fpr64(ctx
, fp0
, fs
);
12032 gen_load_fpr64(ctx
, fp1
, ft
);
12033 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12034 tcg_temp_free_i64(fp1
);
12035 gen_store_fpr64(ctx
, fp0
, fd
);
12036 tcg_temp_free_i64(fp0
);
12042 TCGv_i64 fp0
= tcg_temp_new_i64();
12043 TCGv_i64 fp1
= tcg_temp_new_i64();
12045 gen_load_fpr64(ctx
, fp0
, fs
);
12046 gen_load_fpr64(ctx
, fp1
, ft
);
12047 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12048 tcg_temp_free_i64(fp1
);
12049 gen_store_fpr64(ctx
, fp0
, fd
);
12050 tcg_temp_free_i64(fp0
);
12056 TCGv_i64 fp0
= tcg_temp_new_i64();
12058 gen_load_fpr64(ctx
, fp0
, fs
);
12059 gen_helper_float_abs_ps(fp0
, fp0
);
12060 gen_store_fpr64(ctx
, fp0
, fd
);
12061 tcg_temp_free_i64(fp0
);
12067 TCGv_i64 fp0
= tcg_temp_new_i64();
12069 gen_load_fpr64(ctx
, fp0
, fs
);
12070 gen_store_fpr64(ctx
, fp0
, fd
);
12071 tcg_temp_free_i64(fp0
);
12077 TCGv_i64 fp0
= tcg_temp_new_i64();
12079 gen_load_fpr64(ctx
, fp0
, fs
);
12080 gen_helper_float_chs_ps(fp0
, fp0
);
12081 gen_store_fpr64(ctx
, fp0
, fd
);
12082 tcg_temp_free_i64(fp0
);
12087 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12092 TCGLabel
*l1
= gen_new_label();
12096 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12097 fp0
= tcg_temp_new_i64();
12098 gen_load_fpr64(ctx
, fp0
, fs
);
12099 gen_store_fpr64(ctx
, fp0
, fd
);
12100 tcg_temp_free_i64(fp0
);
12107 TCGLabel
*l1
= gen_new_label();
12111 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12112 fp0
= tcg_temp_new_i64();
12113 gen_load_fpr64(ctx
, fp0
, fs
);
12114 gen_store_fpr64(ctx
, fp0
, fd
);
12115 tcg_temp_free_i64(fp0
);
12123 TCGv_i64 fp0
= tcg_temp_new_i64();
12124 TCGv_i64 fp1
= tcg_temp_new_i64();
12126 gen_load_fpr64(ctx
, fp0
, ft
);
12127 gen_load_fpr64(ctx
, fp1
, fs
);
12128 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12129 tcg_temp_free_i64(fp1
);
12130 gen_store_fpr64(ctx
, fp0
, fd
);
12131 tcg_temp_free_i64(fp0
);
12137 TCGv_i64 fp0
= tcg_temp_new_i64();
12138 TCGv_i64 fp1
= tcg_temp_new_i64();
12140 gen_load_fpr64(ctx
, fp0
, ft
);
12141 gen_load_fpr64(ctx
, fp1
, fs
);
12142 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12143 tcg_temp_free_i64(fp1
);
12144 gen_store_fpr64(ctx
, fp0
, fd
);
12145 tcg_temp_free_i64(fp0
);
12148 case OPC_RECIP2_PS
:
12151 TCGv_i64 fp0
= tcg_temp_new_i64();
12152 TCGv_i64 fp1
= tcg_temp_new_i64();
12154 gen_load_fpr64(ctx
, fp0
, fs
);
12155 gen_load_fpr64(ctx
, fp1
, ft
);
12156 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12157 tcg_temp_free_i64(fp1
);
12158 gen_store_fpr64(ctx
, fp0
, fd
);
12159 tcg_temp_free_i64(fp0
);
12162 case OPC_RECIP1_PS
:
12165 TCGv_i64 fp0
= tcg_temp_new_i64();
12167 gen_load_fpr64(ctx
, fp0
, fs
);
12168 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12169 gen_store_fpr64(ctx
, fp0
, fd
);
12170 tcg_temp_free_i64(fp0
);
12173 case OPC_RSQRT1_PS
:
12176 TCGv_i64 fp0
= tcg_temp_new_i64();
12178 gen_load_fpr64(ctx
, fp0
, fs
);
12179 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12180 gen_store_fpr64(ctx
, fp0
, fd
);
12181 tcg_temp_free_i64(fp0
);
12184 case OPC_RSQRT2_PS
:
12187 TCGv_i64 fp0
= tcg_temp_new_i64();
12188 TCGv_i64 fp1
= tcg_temp_new_i64();
12190 gen_load_fpr64(ctx
, fp0
, fs
);
12191 gen_load_fpr64(ctx
, fp1
, ft
);
12192 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12193 tcg_temp_free_i64(fp1
);
12194 gen_store_fpr64(ctx
, fp0
, fd
);
12195 tcg_temp_free_i64(fp0
);
12199 check_cp1_64bitmode(ctx
);
12201 TCGv_i32 fp0
= tcg_temp_new_i32();
12203 gen_load_fpr32h(ctx
, fp0
, fs
);
12204 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12205 gen_store_fpr32(ctx
, fp0
, fd
);
12206 tcg_temp_free_i32(fp0
);
12209 case OPC_CVT_PW_PS
:
12212 TCGv_i64 fp0
= tcg_temp_new_i64();
12214 gen_load_fpr64(ctx
, fp0
, fs
);
12215 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12216 gen_store_fpr64(ctx
, fp0
, fd
);
12217 tcg_temp_free_i64(fp0
);
12221 check_cp1_64bitmode(ctx
);
12223 TCGv_i32 fp0
= tcg_temp_new_i32();
12225 gen_load_fpr32(ctx
, fp0
, fs
);
12226 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12227 gen_store_fpr32(ctx
, fp0
, fd
);
12228 tcg_temp_free_i32(fp0
);
12234 TCGv_i32 fp0
= tcg_temp_new_i32();
12235 TCGv_i32 fp1
= tcg_temp_new_i32();
12237 gen_load_fpr32(ctx
, fp0
, fs
);
12238 gen_load_fpr32(ctx
, fp1
, ft
);
12239 gen_store_fpr32h(ctx
, fp0
, fd
);
12240 gen_store_fpr32(ctx
, fp1
, fd
);
12241 tcg_temp_free_i32(fp0
);
12242 tcg_temp_free_i32(fp1
);
12248 TCGv_i32 fp0
= tcg_temp_new_i32();
12249 TCGv_i32 fp1
= tcg_temp_new_i32();
12251 gen_load_fpr32(ctx
, fp0
, fs
);
12252 gen_load_fpr32h(ctx
, fp1
, ft
);
12253 gen_store_fpr32(ctx
, fp1
, fd
);
12254 gen_store_fpr32h(ctx
, fp0
, fd
);
12255 tcg_temp_free_i32(fp0
);
12256 tcg_temp_free_i32(fp1
);
12262 TCGv_i32 fp0
= tcg_temp_new_i32();
12263 TCGv_i32 fp1
= tcg_temp_new_i32();
12265 gen_load_fpr32h(ctx
, fp0
, fs
);
12266 gen_load_fpr32(ctx
, fp1
, ft
);
12267 gen_store_fpr32(ctx
, fp1
, fd
);
12268 gen_store_fpr32h(ctx
, fp0
, fd
);
12269 tcg_temp_free_i32(fp0
);
12270 tcg_temp_free_i32(fp1
);
12276 TCGv_i32 fp0
= tcg_temp_new_i32();
12277 TCGv_i32 fp1
= tcg_temp_new_i32();
12279 gen_load_fpr32h(ctx
, fp0
, fs
);
12280 gen_load_fpr32h(ctx
, fp1
, ft
);
12281 gen_store_fpr32(ctx
, fp1
, fd
);
12282 gen_store_fpr32h(ctx
, fp0
, fd
);
12283 tcg_temp_free_i32(fp0
);
12284 tcg_temp_free_i32(fp1
);
12288 case OPC_CMP_UN_PS
:
12289 case OPC_CMP_EQ_PS
:
12290 case OPC_CMP_UEQ_PS
:
12291 case OPC_CMP_OLT_PS
:
12292 case OPC_CMP_ULT_PS
:
12293 case OPC_CMP_OLE_PS
:
12294 case OPC_CMP_ULE_PS
:
12295 case OPC_CMP_SF_PS
:
12296 case OPC_CMP_NGLE_PS
:
12297 case OPC_CMP_SEQ_PS
:
12298 case OPC_CMP_NGL_PS
:
12299 case OPC_CMP_LT_PS
:
12300 case OPC_CMP_NGE_PS
:
12301 case OPC_CMP_LE_PS
:
12302 case OPC_CMP_NGT_PS
:
12303 if (ctx
->opcode
& (1 << 6)) {
12304 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12306 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12310 MIPS_INVAL("farith");
12311 generate_exception_end(ctx
, EXCP_RI
);
12316 /* Coprocessor 3 (FPU) */
12317 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12318 int fd
, int fs
, int base
, int index
)
12320 TCGv t0
= tcg_temp_new();
12323 gen_load_gpr(t0
, index
);
12324 } else if (index
== 0) {
12325 gen_load_gpr(t0
, base
);
12327 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12329 /* Don't do NOP if destination is zero: we must perform the actual
12335 TCGv_i32 fp0
= tcg_temp_new_i32();
12337 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12338 tcg_gen_trunc_tl_i32(fp0
, t0
);
12339 gen_store_fpr32(ctx
, fp0
, fd
);
12340 tcg_temp_free_i32(fp0
);
12345 check_cp1_registers(ctx
, fd
);
12347 TCGv_i64 fp0
= tcg_temp_new_i64();
12348 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12349 gen_store_fpr64(ctx
, fp0
, fd
);
12350 tcg_temp_free_i64(fp0
);
12354 check_cp1_64bitmode(ctx
);
12355 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12357 TCGv_i64 fp0
= tcg_temp_new_i64();
12359 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12360 gen_store_fpr64(ctx
, fp0
, fd
);
12361 tcg_temp_free_i64(fp0
);
12367 TCGv_i32 fp0
= tcg_temp_new_i32();
12368 gen_load_fpr32(ctx
, fp0
, fs
);
12369 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12370 tcg_temp_free_i32(fp0
);
12375 check_cp1_registers(ctx
, fs
);
12377 TCGv_i64 fp0
= tcg_temp_new_i64();
12378 gen_load_fpr64(ctx
, fp0
, fs
);
12379 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12380 tcg_temp_free_i64(fp0
);
12384 check_cp1_64bitmode(ctx
);
12385 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12387 TCGv_i64 fp0
= tcg_temp_new_i64();
12388 gen_load_fpr64(ctx
, fp0
, fs
);
12389 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12390 tcg_temp_free_i64(fp0
);
12397 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12398 int fd
, int fr
, int fs
, int ft
)
12404 TCGv t0
= tcg_temp_local_new();
12405 TCGv_i32 fp
= tcg_temp_new_i32();
12406 TCGv_i32 fph
= tcg_temp_new_i32();
12407 TCGLabel
*l1
= gen_new_label();
12408 TCGLabel
*l2
= gen_new_label();
12410 gen_load_gpr(t0
, fr
);
12411 tcg_gen_andi_tl(t0
, t0
, 0x7);
12413 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12414 gen_load_fpr32(ctx
, fp
, fs
);
12415 gen_load_fpr32h(ctx
, fph
, fs
);
12416 gen_store_fpr32(ctx
, fp
, fd
);
12417 gen_store_fpr32h(ctx
, fph
, fd
);
12420 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12422 #ifdef TARGET_WORDS_BIGENDIAN
12423 gen_load_fpr32(ctx
, fp
, fs
);
12424 gen_load_fpr32h(ctx
, fph
, ft
);
12425 gen_store_fpr32h(ctx
, fp
, fd
);
12426 gen_store_fpr32(ctx
, fph
, fd
);
12428 gen_load_fpr32h(ctx
, fph
, fs
);
12429 gen_load_fpr32(ctx
, fp
, ft
);
12430 gen_store_fpr32(ctx
, fph
, fd
);
12431 gen_store_fpr32h(ctx
, fp
, fd
);
12434 tcg_temp_free_i32(fp
);
12435 tcg_temp_free_i32(fph
);
12441 TCGv_i32 fp0
= tcg_temp_new_i32();
12442 TCGv_i32 fp1
= tcg_temp_new_i32();
12443 TCGv_i32 fp2
= tcg_temp_new_i32();
12445 gen_load_fpr32(ctx
, fp0
, fs
);
12446 gen_load_fpr32(ctx
, fp1
, ft
);
12447 gen_load_fpr32(ctx
, fp2
, fr
);
12448 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12449 tcg_temp_free_i32(fp0
);
12450 tcg_temp_free_i32(fp1
);
12451 gen_store_fpr32(ctx
, fp2
, fd
);
12452 tcg_temp_free_i32(fp2
);
12457 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12459 TCGv_i64 fp0
= tcg_temp_new_i64();
12460 TCGv_i64 fp1
= tcg_temp_new_i64();
12461 TCGv_i64 fp2
= tcg_temp_new_i64();
12463 gen_load_fpr64(ctx
, fp0
, fs
);
12464 gen_load_fpr64(ctx
, fp1
, ft
);
12465 gen_load_fpr64(ctx
, fp2
, fr
);
12466 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12467 tcg_temp_free_i64(fp0
);
12468 tcg_temp_free_i64(fp1
);
12469 gen_store_fpr64(ctx
, fp2
, fd
);
12470 tcg_temp_free_i64(fp2
);
12476 TCGv_i64 fp0
= tcg_temp_new_i64();
12477 TCGv_i64 fp1
= tcg_temp_new_i64();
12478 TCGv_i64 fp2
= tcg_temp_new_i64();
12480 gen_load_fpr64(ctx
, fp0
, fs
);
12481 gen_load_fpr64(ctx
, fp1
, ft
);
12482 gen_load_fpr64(ctx
, fp2
, fr
);
12483 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12484 tcg_temp_free_i64(fp0
);
12485 tcg_temp_free_i64(fp1
);
12486 gen_store_fpr64(ctx
, fp2
, fd
);
12487 tcg_temp_free_i64(fp2
);
12493 TCGv_i32 fp0
= tcg_temp_new_i32();
12494 TCGv_i32 fp1
= tcg_temp_new_i32();
12495 TCGv_i32 fp2
= tcg_temp_new_i32();
12497 gen_load_fpr32(ctx
, fp0
, fs
);
12498 gen_load_fpr32(ctx
, fp1
, ft
);
12499 gen_load_fpr32(ctx
, fp2
, fr
);
12500 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12501 tcg_temp_free_i32(fp0
);
12502 tcg_temp_free_i32(fp1
);
12503 gen_store_fpr32(ctx
, fp2
, fd
);
12504 tcg_temp_free_i32(fp2
);
12509 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12511 TCGv_i64 fp0
= tcg_temp_new_i64();
12512 TCGv_i64 fp1
= tcg_temp_new_i64();
12513 TCGv_i64 fp2
= tcg_temp_new_i64();
12515 gen_load_fpr64(ctx
, fp0
, fs
);
12516 gen_load_fpr64(ctx
, fp1
, ft
);
12517 gen_load_fpr64(ctx
, fp2
, fr
);
12518 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12519 tcg_temp_free_i64(fp0
);
12520 tcg_temp_free_i64(fp1
);
12521 gen_store_fpr64(ctx
, fp2
, fd
);
12522 tcg_temp_free_i64(fp2
);
12528 TCGv_i64 fp0
= tcg_temp_new_i64();
12529 TCGv_i64 fp1
= tcg_temp_new_i64();
12530 TCGv_i64 fp2
= tcg_temp_new_i64();
12532 gen_load_fpr64(ctx
, fp0
, fs
);
12533 gen_load_fpr64(ctx
, fp1
, ft
);
12534 gen_load_fpr64(ctx
, fp2
, fr
);
12535 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12536 tcg_temp_free_i64(fp0
);
12537 tcg_temp_free_i64(fp1
);
12538 gen_store_fpr64(ctx
, fp2
, fd
);
12539 tcg_temp_free_i64(fp2
);
12545 TCGv_i32 fp0
= tcg_temp_new_i32();
12546 TCGv_i32 fp1
= tcg_temp_new_i32();
12547 TCGv_i32 fp2
= tcg_temp_new_i32();
12549 gen_load_fpr32(ctx
, fp0
, fs
);
12550 gen_load_fpr32(ctx
, fp1
, ft
);
12551 gen_load_fpr32(ctx
, fp2
, fr
);
12552 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12553 tcg_temp_free_i32(fp0
);
12554 tcg_temp_free_i32(fp1
);
12555 gen_store_fpr32(ctx
, fp2
, fd
);
12556 tcg_temp_free_i32(fp2
);
12561 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12563 TCGv_i64 fp0
= tcg_temp_new_i64();
12564 TCGv_i64 fp1
= tcg_temp_new_i64();
12565 TCGv_i64 fp2
= tcg_temp_new_i64();
12567 gen_load_fpr64(ctx
, fp0
, fs
);
12568 gen_load_fpr64(ctx
, fp1
, ft
);
12569 gen_load_fpr64(ctx
, fp2
, fr
);
12570 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12571 tcg_temp_free_i64(fp0
);
12572 tcg_temp_free_i64(fp1
);
12573 gen_store_fpr64(ctx
, fp2
, fd
);
12574 tcg_temp_free_i64(fp2
);
12580 TCGv_i64 fp0
= tcg_temp_new_i64();
12581 TCGv_i64 fp1
= tcg_temp_new_i64();
12582 TCGv_i64 fp2
= tcg_temp_new_i64();
12584 gen_load_fpr64(ctx
, fp0
, fs
);
12585 gen_load_fpr64(ctx
, fp1
, ft
);
12586 gen_load_fpr64(ctx
, fp2
, fr
);
12587 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12588 tcg_temp_free_i64(fp0
);
12589 tcg_temp_free_i64(fp1
);
12590 gen_store_fpr64(ctx
, fp2
, fd
);
12591 tcg_temp_free_i64(fp2
);
12597 TCGv_i32 fp0
= tcg_temp_new_i32();
12598 TCGv_i32 fp1
= tcg_temp_new_i32();
12599 TCGv_i32 fp2
= tcg_temp_new_i32();
12601 gen_load_fpr32(ctx
, fp0
, fs
);
12602 gen_load_fpr32(ctx
, fp1
, ft
);
12603 gen_load_fpr32(ctx
, fp2
, fr
);
12604 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12605 tcg_temp_free_i32(fp0
);
12606 tcg_temp_free_i32(fp1
);
12607 gen_store_fpr32(ctx
, fp2
, fd
);
12608 tcg_temp_free_i32(fp2
);
12613 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12615 TCGv_i64 fp0
= tcg_temp_new_i64();
12616 TCGv_i64 fp1
= tcg_temp_new_i64();
12617 TCGv_i64 fp2
= tcg_temp_new_i64();
12619 gen_load_fpr64(ctx
, fp0
, fs
);
12620 gen_load_fpr64(ctx
, fp1
, ft
);
12621 gen_load_fpr64(ctx
, fp2
, fr
);
12622 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12623 tcg_temp_free_i64(fp0
);
12624 tcg_temp_free_i64(fp1
);
12625 gen_store_fpr64(ctx
, fp2
, fd
);
12626 tcg_temp_free_i64(fp2
);
12632 TCGv_i64 fp0
= tcg_temp_new_i64();
12633 TCGv_i64 fp1
= tcg_temp_new_i64();
12634 TCGv_i64 fp2
= tcg_temp_new_i64();
12636 gen_load_fpr64(ctx
, fp0
, fs
);
12637 gen_load_fpr64(ctx
, fp1
, ft
);
12638 gen_load_fpr64(ctx
, fp2
, fr
);
12639 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12640 tcg_temp_free_i64(fp0
);
12641 tcg_temp_free_i64(fp1
);
12642 gen_store_fpr64(ctx
, fp2
, fd
);
12643 tcg_temp_free_i64(fp2
);
12647 MIPS_INVAL("flt3_arith");
12648 generate_exception_end(ctx
, EXCP_RI
);
12653 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12657 #if !defined(CONFIG_USER_ONLY)
12658 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12659 Therefore only check the ISA in system mode. */
12660 check_insn(ctx
, ISA_MIPS32R2
);
12662 t0
= tcg_temp_new();
12666 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12667 gen_store_gpr(t0
, rt
);
12670 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12671 gen_store_gpr(t0
, rt
);
12674 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12677 gen_helper_rdhwr_cc(t0
, cpu_env
);
12678 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12681 gen_store_gpr(t0
, rt
);
12682 /* Break the TB to be able to take timer interrupts immediately
12683 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12684 we break completely out of translated code. */
12685 gen_save_pc(ctx
->base
.pc_next
+ 4);
12686 ctx
->base
.is_jmp
= DISAS_EXIT
;
12689 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12690 gen_store_gpr(t0
, rt
);
12693 check_insn(ctx
, ISA_MIPS32R6
);
12695 /* Performance counter registers are not implemented other than
12696 * control register 0.
12698 generate_exception(ctx
, EXCP_RI
);
12700 gen_helper_rdhwr_performance(t0
, cpu_env
);
12701 gen_store_gpr(t0
, rt
);
12704 check_insn(ctx
, ISA_MIPS32R6
);
12705 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12706 gen_store_gpr(t0
, rt
);
12709 #if defined(CONFIG_USER_ONLY)
12710 tcg_gen_ld_tl(t0
, cpu_env
,
12711 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12712 gen_store_gpr(t0
, rt
);
12715 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12716 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12717 tcg_gen_ld_tl(t0
, cpu_env
,
12718 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12719 gen_store_gpr(t0
, rt
);
12721 generate_exception_end(ctx
, EXCP_RI
);
12725 default: /* Invalid */
12726 MIPS_INVAL("rdhwr");
12727 generate_exception_end(ctx
, EXCP_RI
);
12733 static inline void clear_branch_hflags(DisasContext
*ctx
)
12735 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12736 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12737 save_cpu_state(ctx
, 0);
12739 /* it is not safe to save ctx->hflags as hflags may be changed
12740 in execution time by the instruction in delay / forbidden slot. */
12741 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12745 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12747 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12748 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12749 /* Branches completion */
12750 clear_branch_hflags(ctx
);
12751 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12752 /* FIXME: Need to clear can_do_io. */
12753 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12754 case MIPS_HFLAG_FBNSLOT
:
12755 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12758 /* unconditional branch */
12759 if (proc_hflags
& MIPS_HFLAG_BX
) {
12760 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12762 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12764 case MIPS_HFLAG_BL
:
12765 /* blikely taken case */
12766 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12768 case MIPS_HFLAG_BC
:
12769 /* Conditional branch */
12771 TCGLabel
*l1
= gen_new_label();
12773 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12774 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12776 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12779 case MIPS_HFLAG_BR
:
12780 /* unconditional branch to register */
12781 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12782 TCGv t0
= tcg_temp_new();
12783 TCGv_i32 t1
= tcg_temp_new_i32();
12785 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12786 tcg_gen_trunc_tl_i32(t1
, t0
);
12788 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12789 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12790 tcg_gen_or_i32(hflags
, hflags
, t1
);
12791 tcg_temp_free_i32(t1
);
12793 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12795 tcg_gen_mov_tl(cpu_PC
, btarget
);
12797 if (ctx
->base
.singlestep_enabled
) {
12798 save_cpu_state(ctx
, 0);
12799 gen_helper_raise_exception_debug(cpu_env
);
12801 tcg_gen_lookup_and_goto_ptr();
12804 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12810 /* Compact Branches */
12811 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12812 int rs
, int rt
, int32_t offset
)
12814 int bcond_compute
= 0;
12815 TCGv t0
= tcg_temp_new();
12816 TCGv t1
= tcg_temp_new();
12817 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12819 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12820 #ifdef MIPS_DEBUG_DISAS
12821 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12822 "\n", ctx
->base
.pc_next
);
12824 generate_exception_end(ctx
, EXCP_RI
);
12828 /* Load needed operands and calculate btarget */
12830 /* compact branch */
12831 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12832 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12833 gen_load_gpr(t0
, rs
);
12834 gen_load_gpr(t1
, rt
);
12836 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12837 if (rs
<= rt
&& rs
== 0) {
12838 /* OPC_BEQZALC, OPC_BNEZALC */
12839 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12842 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12843 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12844 gen_load_gpr(t0
, rs
);
12845 gen_load_gpr(t1
, rt
);
12847 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12849 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12850 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12851 if (rs
== 0 || rs
== rt
) {
12852 /* OPC_BLEZALC, OPC_BGEZALC */
12853 /* OPC_BGTZALC, OPC_BLTZALC */
12854 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12856 gen_load_gpr(t0
, rs
);
12857 gen_load_gpr(t1
, rt
);
12859 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12863 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12868 /* OPC_BEQZC, OPC_BNEZC */
12869 gen_load_gpr(t0
, rs
);
12871 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12873 /* OPC_JIC, OPC_JIALC */
12874 TCGv tbase
= tcg_temp_new();
12875 TCGv toffset
= tcg_temp_new();
12877 gen_load_gpr(tbase
, rt
);
12878 tcg_gen_movi_tl(toffset
, offset
);
12879 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12880 tcg_temp_free(tbase
);
12881 tcg_temp_free(toffset
);
12885 MIPS_INVAL("Compact branch/jump");
12886 generate_exception_end(ctx
, EXCP_RI
);
12890 if (bcond_compute
== 0) {
12891 /* Uncoditional compact branch */
12894 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12897 ctx
->hflags
|= MIPS_HFLAG_BR
;
12900 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12903 ctx
->hflags
|= MIPS_HFLAG_B
;
12906 MIPS_INVAL("Compact branch/jump");
12907 generate_exception_end(ctx
, EXCP_RI
);
12911 /* Generating branch here as compact branches don't have delay slot */
12912 gen_branch(ctx
, 4);
12914 /* Conditional compact branch */
12915 TCGLabel
*fs
= gen_new_label();
12916 save_cpu_state(ctx
, 0);
12919 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12920 if (rs
== 0 && rt
!= 0) {
12922 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12923 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12925 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12928 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12931 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12932 if (rs
== 0 && rt
!= 0) {
12934 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12935 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12937 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12940 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12943 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12944 if (rs
== 0 && rt
!= 0) {
12946 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12947 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12949 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12952 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12955 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12956 if (rs
== 0 && rt
!= 0) {
12958 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12959 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12961 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12964 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12967 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12968 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12970 /* OPC_BOVC, OPC_BNVC */
12971 TCGv t2
= tcg_temp_new();
12972 TCGv t3
= tcg_temp_new();
12973 TCGv t4
= tcg_temp_new();
12974 TCGv input_overflow
= tcg_temp_new();
12976 gen_load_gpr(t0
, rs
);
12977 gen_load_gpr(t1
, rt
);
12978 tcg_gen_ext32s_tl(t2
, t0
);
12979 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12980 tcg_gen_ext32s_tl(t3
, t1
);
12981 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12982 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12984 tcg_gen_add_tl(t4
, t2
, t3
);
12985 tcg_gen_ext32s_tl(t4
, t4
);
12986 tcg_gen_xor_tl(t2
, t2
, t3
);
12987 tcg_gen_xor_tl(t3
, t4
, t3
);
12988 tcg_gen_andc_tl(t2
, t3
, t2
);
12989 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12990 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12991 if (opc
== OPC_BOVC
) {
12993 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12996 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12998 tcg_temp_free(input_overflow
);
13002 } else if (rs
< rt
&& rs
== 0) {
13003 /* OPC_BEQZALC, OPC_BNEZALC */
13004 if (opc
== OPC_BEQZALC
) {
13006 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13009 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13012 /* OPC_BEQC, OPC_BNEC */
13013 if (opc
== OPC_BEQC
) {
13015 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13018 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13023 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13026 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13029 MIPS_INVAL("Compact conditional branch/jump");
13030 generate_exception_end(ctx
, EXCP_RI
);
13034 /* Generating branch here as compact branches don't have delay slot */
13035 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13038 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13046 /* ISA extensions (ASEs) */
13047 /* MIPS16 extension to MIPS32 */
13049 /* MIPS16 major opcodes */
13051 M16_OPC_ADDIUSP
= 0x00,
13052 M16_OPC_ADDIUPC
= 0x01,
13054 M16_OPC_JAL
= 0x03,
13055 M16_OPC_BEQZ
= 0x04,
13056 M16_OPC_BNEQZ
= 0x05,
13057 M16_OPC_SHIFT
= 0x06,
13059 M16_OPC_RRIA
= 0x08,
13060 M16_OPC_ADDIU8
= 0x09,
13061 M16_OPC_SLTI
= 0x0a,
13062 M16_OPC_SLTIU
= 0x0b,
13065 M16_OPC_CMPI
= 0x0e,
13069 M16_OPC_LWSP
= 0x12,
13071 M16_OPC_LBU
= 0x14,
13072 M16_OPC_LHU
= 0x15,
13073 M16_OPC_LWPC
= 0x16,
13074 M16_OPC_LWU
= 0x17,
13077 M16_OPC_SWSP
= 0x1a,
13079 M16_OPC_RRR
= 0x1c,
13081 M16_OPC_EXTEND
= 0x1e,
13085 /* I8 funct field */
13104 /* RR funct field */
13138 /* I64 funct field */
13146 I64_DADDIUPC
= 0x6,
13150 /* RR ry field for CNVT */
13152 RR_RY_CNVT_ZEB
= 0x0,
13153 RR_RY_CNVT_ZEH
= 0x1,
13154 RR_RY_CNVT_ZEW
= 0x2,
13155 RR_RY_CNVT_SEB
= 0x4,
13156 RR_RY_CNVT_SEH
= 0x5,
13157 RR_RY_CNVT_SEW
= 0x6,
13160 static int xlat (int r
)
13162 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13167 static void gen_mips16_save (DisasContext
*ctx
,
13168 int xsregs
, int aregs
,
13169 int do_ra
, int do_s0
, int do_s1
,
13172 TCGv t0
= tcg_temp_new();
13173 TCGv t1
= tcg_temp_new();
13174 TCGv t2
= tcg_temp_new();
13204 generate_exception_end(ctx
, EXCP_RI
);
13210 gen_base_offset_addr(ctx
, t0
, 29, 12);
13211 gen_load_gpr(t1
, 7);
13212 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13215 gen_base_offset_addr(ctx
, t0
, 29, 8);
13216 gen_load_gpr(t1
, 6);
13217 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13220 gen_base_offset_addr(ctx
, t0
, 29, 4);
13221 gen_load_gpr(t1
, 5);
13222 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13225 gen_base_offset_addr(ctx
, t0
, 29, 0);
13226 gen_load_gpr(t1
, 4);
13227 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13230 gen_load_gpr(t0
, 29);
13232 #define DECR_AND_STORE(reg) do { \
13233 tcg_gen_movi_tl(t2, -4); \
13234 gen_op_addr_add(ctx, t0, t0, t2); \
13235 gen_load_gpr(t1, reg); \
13236 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13240 DECR_AND_STORE(31);
13245 DECR_AND_STORE(30);
13248 DECR_AND_STORE(23);
13251 DECR_AND_STORE(22);
13254 DECR_AND_STORE(21);
13257 DECR_AND_STORE(20);
13260 DECR_AND_STORE(19);
13263 DECR_AND_STORE(18);
13267 DECR_AND_STORE(17);
13270 DECR_AND_STORE(16);
13300 generate_exception_end(ctx
, EXCP_RI
);
13316 #undef DECR_AND_STORE
13318 tcg_gen_movi_tl(t2
, -framesize
);
13319 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13325 static void gen_mips16_restore (DisasContext
*ctx
,
13326 int xsregs
, int aregs
,
13327 int do_ra
, int do_s0
, int do_s1
,
13331 TCGv t0
= tcg_temp_new();
13332 TCGv t1
= tcg_temp_new();
13333 TCGv t2
= tcg_temp_new();
13335 tcg_gen_movi_tl(t2
, framesize
);
13336 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13338 #define DECR_AND_LOAD(reg) do { \
13339 tcg_gen_movi_tl(t2, -4); \
13340 gen_op_addr_add(ctx, t0, t0, t2); \
13341 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13342 gen_store_gpr(t1, reg); \
13406 generate_exception_end(ctx
, EXCP_RI
);
13422 #undef DECR_AND_LOAD
13424 tcg_gen_movi_tl(t2
, framesize
);
13425 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13431 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13432 int is_64_bit
, int extended
)
13436 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13437 generate_exception_end(ctx
, EXCP_RI
);
13441 t0
= tcg_temp_new();
13443 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13444 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13446 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13452 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13455 TCGv_i32 t0
= tcg_const_i32(op
);
13456 TCGv t1
= tcg_temp_new();
13457 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13458 gen_helper_cache(cpu_env
, t1
, t0
);
13461 #if defined(TARGET_MIPS64)
13462 static void decode_i64_mips16 (DisasContext
*ctx
,
13463 int ry
, int funct
, int16_t offset
,
13468 check_insn(ctx
, ISA_MIPS3
);
13469 check_mips_64(ctx
);
13470 offset
= extended
? offset
: offset
<< 3;
13471 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13474 check_insn(ctx
, ISA_MIPS3
);
13475 check_mips_64(ctx
);
13476 offset
= extended
? offset
: offset
<< 3;
13477 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13480 check_insn(ctx
, ISA_MIPS3
);
13481 check_mips_64(ctx
);
13482 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13483 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13486 check_insn(ctx
, ISA_MIPS3
);
13487 check_mips_64(ctx
);
13488 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13489 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13492 check_insn(ctx
, ISA_MIPS3
);
13493 check_mips_64(ctx
);
13494 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13495 generate_exception_end(ctx
, EXCP_RI
);
13497 offset
= extended
? offset
: offset
<< 3;
13498 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13502 check_insn(ctx
, ISA_MIPS3
);
13503 check_mips_64(ctx
);
13504 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13505 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13508 check_insn(ctx
, ISA_MIPS3
);
13509 check_mips_64(ctx
);
13510 offset
= extended
? offset
: offset
<< 2;
13511 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13514 check_insn(ctx
, ISA_MIPS3
);
13515 check_mips_64(ctx
);
13516 offset
= extended
? offset
: offset
<< 2;
13517 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13523 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13525 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13526 int op
, rx
, ry
, funct
, sa
;
13527 int16_t imm
, offset
;
13529 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13530 op
= (ctx
->opcode
>> 11) & 0x1f;
13531 sa
= (ctx
->opcode
>> 22) & 0x1f;
13532 funct
= (ctx
->opcode
>> 8) & 0x7;
13533 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13534 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13535 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13536 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13537 | (ctx
->opcode
& 0x1f));
13539 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13542 case M16_OPC_ADDIUSP
:
13543 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13545 case M16_OPC_ADDIUPC
:
13546 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13549 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13550 /* No delay slot, so just process as a normal instruction */
13553 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13554 /* No delay slot, so just process as a normal instruction */
13556 case M16_OPC_BNEQZ
:
13557 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13558 /* No delay slot, so just process as a normal instruction */
13560 case M16_OPC_SHIFT
:
13561 switch (ctx
->opcode
& 0x3) {
13563 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13566 #if defined(TARGET_MIPS64)
13567 check_mips_64(ctx
);
13568 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13570 generate_exception_end(ctx
, EXCP_RI
);
13574 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13577 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13581 #if defined(TARGET_MIPS64)
13583 check_insn(ctx
, ISA_MIPS3
);
13584 check_mips_64(ctx
);
13585 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13589 imm
= ctx
->opcode
& 0xf;
13590 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13591 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13592 imm
= (int16_t) (imm
<< 1) >> 1;
13593 if ((ctx
->opcode
>> 4) & 0x1) {
13594 #if defined(TARGET_MIPS64)
13595 check_mips_64(ctx
);
13596 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13598 generate_exception_end(ctx
, EXCP_RI
);
13601 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13604 case M16_OPC_ADDIU8
:
13605 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13608 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13610 case M16_OPC_SLTIU
:
13611 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13616 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13619 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13622 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13625 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13628 check_insn(ctx
, ISA_MIPS32
);
13630 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13631 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13632 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13633 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13634 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13635 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13636 | (ctx
->opcode
& 0xf)) << 3;
13638 if (ctx
->opcode
& (1 << 7)) {
13639 gen_mips16_save(ctx
, xsregs
, aregs
,
13640 do_ra
, do_s0
, do_s1
,
13643 gen_mips16_restore(ctx
, xsregs
, aregs
,
13644 do_ra
, do_s0
, do_s1
,
13650 generate_exception_end(ctx
, EXCP_RI
);
13655 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13658 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13660 #if defined(TARGET_MIPS64)
13662 check_insn(ctx
, ISA_MIPS3
);
13663 check_mips_64(ctx
);
13664 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13668 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13671 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13674 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13677 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13680 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13683 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13686 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13688 #if defined(TARGET_MIPS64)
13690 check_insn(ctx
, ISA_MIPS3
);
13691 check_mips_64(ctx
);
13692 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13696 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13699 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13702 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13705 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13707 #if defined(TARGET_MIPS64)
13709 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13713 generate_exception_end(ctx
, EXCP_RI
);
13720 static inline bool is_uhi(int sdbbp_code
)
13722 #ifdef CONFIG_USER_ONLY
13725 return semihosting_enabled() && sdbbp_code
== 1;
13729 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13733 int op
, cnvt_op
, op1
, offset
;
13737 op
= (ctx
->opcode
>> 11) & 0x1f;
13738 sa
= (ctx
->opcode
>> 2) & 0x7;
13739 sa
= sa
== 0 ? 8 : sa
;
13740 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13741 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13742 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13743 op1
= offset
= ctx
->opcode
& 0x1f;
13748 case M16_OPC_ADDIUSP
:
13750 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13752 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13755 case M16_OPC_ADDIUPC
:
13756 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13759 offset
= (ctx
->opcode
& 0x7ff) << 1;
13760 offset
= (int16_t)(offset
<< 4) >> 4;
13761 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13762 /* No delay slot, so just process as a normal instruction */
13765 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13766 offset
= (((ctx
->opcode
& 0x1f) << 21)
13767 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13769 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13770 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13774 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13775 ((int8_t)ctx
->opcode
) << 1, 0);
13776 /* No delay slot, so just process as a normal instruction */
13778 case M16_OPC_BNEQZ
:
13779 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13780 ((int8_t)ctx
->opcode
) << 1, 0);
13781 /* No delay slot, so just process as a normal instruction */
13783 case M16_OPC_SHIFT
:
13784 switch (ctx
->opcode
& 0x3) {
13786 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13789 #if defined(TARGET_MIPS64)
13790 check_insn(ctx
, ISA_MIPS3
);
13791 check_mips_64(ctx
);
13792 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13794 generate_exception_end(ctx
, EXCP_RI
);
13798 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13801 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13805 #if defined(TARGET_MIPS64)
13807 check_insn(ctx
, ISA_MIPS3
);
13808 check_mips_64(ctx
);
13809 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13814 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13816 if ((ctx
->opcode
>> 4) & 1) {
13817 #if defined(TARGET_MIPS64)
13818 check_insn(ctx
, ISA_MIPS3
);
13819 check_mips_64(ctx
);
13820 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13822 generate_exception_end(ctx
, EXCP_RI
);
13825 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13829 case M16_OPC_ADDIU8
:
13831 int16_t imm
= (int8_t) ctx
->opcode
;
13833 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13838 int16_t imm
= (uint8_t) ctx
->opcode
;
13839 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13842 case M16_OPC_SLTIU
:
13844 int16_t imm
= (uint8_t) ctx
->opcode
;
13845 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13852 funct
= (ctx
->opcode
>> 8) & 0x7;
13855 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13856 ((int8_t)ctx
->opcode
) << 1, 0);
13859 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13860 ((int8_t)ctx
->opcode
) << 1, 0);
13863 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13866 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13867 ((int8_t)ctx
->opcode
) << 3);
13870 check_insn(ctx
, ISA_MIPS32
);
13872 int do_ra
= ctx
->opcode
& (1 << 6);
13873 int do_s0
= ctx
->opcode
& (1 << 5);
13874 int do_s1
= ctx
->opcode
& (1 << 4);
13875 int framesize
= ctx
->opcode
& 0xf;
13877 if (framesize
== 0) {
13880 framesize
= framesize
<< 3;
13883 if (ctx
->opcode
& (1 << 7)) {
13884 gen_mips16_save(ctx
, 0, 0,
13885 do_ra
, do_s0
, do_s1
, framesize
);
13887 gen_mips16_restore(ctx
, 0, 0,
13888 do_ra
, do_s0
, do_s1
, framesize
);
13894 int rz
= xlat(ctx
->opcode
& 0x7);
13896 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13897 ((ctx
->opcode
>> 5) & 0x7);
13898 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13902 reg32
= ctx
->opcode
& 0x1f;
13903 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13906 generate_exception_end(ctx
, EXCP_RI
);
13913 int16_t imm
= (uint8_t) ctx
->opcode
;
13915 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13920 int16_t imm
= (uint8_t) ctx
->opcode
;
13921 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13924 #if defined(TARGET_MIPS64)
13926 check_insn(ctx
, ISA_MIPS3
);
13927 check_mips_64(ctx
);
13928 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13932 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13935 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13938 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13941 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13944 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13947 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13950 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13952 #if defined (TARGET_MIPS64)
13954 check_insn(ctx
, ISA_MIPS3
);
13955 check_mips_64(ctx
);
13956 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13960 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13963 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13966 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13969 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13973 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13976 switch (ctx
->opcode
& 0x3) {
13978 mips32_op
= OPC_ADDU
;
13981 mips32_op
= OPC_SUBU
;
13983 #if defined(TARGET_MIPS64)
13985 mips32_op
= OPC_DADDU
;
13986 check_insn(ctx
, ISA_MIPS3
);
13987 check_mips_64(ctx
);
13990 mips32_op
= OPC_DSUBU
;
13991 check_insn(ctx
, ISA_MIPS3
);
13992 check_mips_64(ctx
);
13996 generate_exception_end(ctx
, EXCP_RI
);
14000 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14009 int nd
= (ctx
->opcode
>> 7) & 0x1;
14010 int link
= (ctx
->opcode
>> 6) & 0x1;
14011 int ra
= (ctx
->opcode
>> 5) & 0x1;
14014 check_insn(ctx
, ISA_MIPS32
);
14023 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14028 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14029 gen_helper_do_semihosting(cpu_env
);
14031 /* XXX: not clear which exception should be raised
14032 * when in debug mode...
14034 check_insn(ctx
, ISA_MIPS32
);
14035 generate_exception_end(ctx
, EXCP_DBp
);
14039 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14042 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14045 generate_exception_end(ctx
, EXCP_BREAK
);
14048 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14051 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14054 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14056 #if defined (TARGET_MIPS64)
14058 check_insn(ctx
, ISA_MIPS3
);
14059 check_mips_64(ctx
);
14060 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14064 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14067 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14070 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14073 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14076 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14079 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14082 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14085 check_insn(ctx
, ISA_MIPS32
);
14087 case RR_RY_CNVT_ZEB
:
14088 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14090 case RR_RY_CNVT_ZEH
:
14091 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14093 case RR_RY_CNVT_SEB
:
14094 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14096 case RR_RY_CNVT_SEH
:
14097 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14099 #if defined (TARGET_MIPS64)
14100 case RR_RY_CNVT_ZEW
:
14101 check_insn(ctx
, ISA_MIPS64
);
14102 check_mips_64(ctx
);
14103 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14105 case RR_RY_CNVT_SEW
:
14106 check_insn(ctx
, ISA_MIPS64
);
14107 check_mips_64(ctx
);
14108 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14112 generate_exception_end(ctx
, EXCP_RI
);
14117 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14119 #if defined (TARGET_MIPS64)
14121 check_insn(ctx
, ISA_MIPS3
);
14122 check_mips_64(ctx
);
14123 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14126 check_insn(ctx
, ISA_MIPS3
);
14127 check_mips_64(ctx
);
14128 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14131 check_insn(ctx
, ISA_MIPS3
);
14132 check_mips_64(ctx
);
14133 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14136 check_insn(ctx
, ISA_MIPS3
);
14137 check_mips_64(ctx
);
14138 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14142 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14145 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14148 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14151 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14153 #if defined (TARGET_MIPS64)
14155 check_insn(ctx
, ISA_MIPS3
);
14156 check_mips_64(ctx
);
14157 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14160 check_insn(ctx
, ISA_MIPS3
);
14161 check_mips_64(ctx
);
14162 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14165 check_insn(ctx
, ISA_MIPS3
);
14166 check_mips_64(ctx
);
14167 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14170 check_insn(ctx
, ISA_MIPS3
);
14171 check_mips_64(ctx
);
14172 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14176 generate_exception_end(ctx
, EXCP_RI
);
14180 case M16_OPC_EXTEND
:
14181 decode_extended_mips16_opc(env
, ctx
);
14184 #if defined(TARGET_MIPS64)
14186 funct
= (ctx
->opcode
>> 8) & 0x7;
14187 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14191 generate_exception_end(ctx
, EXCP_RI
);
14198 /* microMIPS extension to MIPS32/MIPS64 */
14201 * microMIPS32/microMIPS64 major opcodes
14203 * 1. MIPS Architecture for Programmers Volume II-B:
14204 * The microMIPS32 Instruction Set (Revision 3.05)
14206 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14208 * 2. MIPS Architecture For Programmers Volume II-A:
14209 * The MIPS64 Instruction Set (Revision 3.51)
14239 POOL32S
= 0x16, /* MIPS64 */
14240 DADDIU32
= 0x17, /* MIPS64 */
14269 /* 0x29 is reserved */
14282 /* 0x31 is reserved */
14295 SD32
= 0x36, /* MIPS64 */
14296 LD32
= 0x37, /* MIPS64 */
14298 /* 0x39 is reserved */
14314 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14336 /* POOL32A encoding of minor opcode field */
14339 /* These opcodes are distinguished only by bits 9..6; those bits are
14340 * what are recorded below. */
14377 /* The following can be distinguished by their lower 6 bits. */
14387 /* POOL32AXF encoding of minor opcode field extension */
14390 * 1. MIPS Architecture for Programmers Volume II-B:
14391 * The microMIPS32 Instruction Set (Revision 3.05)
14393 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14395 * 2. MIPS Architecture for Programmers VolumeIV-e:
14396 * The MIPS DSP Application-Specific Extension
14397 * to the microMIPS32 Architecture (Revision 2.34)
14399 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14414 /* begin of microMIPS32 DSP */
14416 /* bits 13..12 for 0x01 */
14422 /* bits 13..12 for 0x2a */
14428 /* bits 13..12 for 0x32 */
14432 /* end of microMIPS32 DSP */
14434 /* bits 15..12 for 0x2c */
14451 /* bits 15..12 for 0x34 */
14459 /* bits 15..12 for 0x3c */
14461 JR
= 0x0, /* alias */
14469 /* bits 15..12 for 0x05 */
14473 /* bits 15..12 for 0x0d */
14485 /* bits 15..12 for 0x15 */
14491 /* bits 15..12 for 0x1d */
14495 /* bits 15..12 for 0x2d */
14500 /* bits 15..12 for 0x35 */
14507 /* POOL32B encoding of minor opcode field (bits 15..12) */
14523 /* POOL32C encoding of minor opcode field (bits 15..12) */
14544 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14557 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14570 /* POOL32F encoding of minor opcode field (bits 5..0) */
14573 /* These are the bit 7..6 values */
14582 /* These are the bit 8..6 values */
14607 MOVZ_FMT_05
= 0x05,
14641 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14648 /* POOL32Fxf encoding of minor opcode extension field */
14686 /* POOL32I encoding of minor opcode field (bits 25..21) */
14716 /* These overlap and are distinguished by bit16 of the instruction */
14725 /* POOL16A encoding of minor opcode field */
14732 /* POOL16B encoding of minor opcode field */
14739 /* POOL16C encoding of minor opcode field */
14759 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14783 /* POOL16D encoding of minor opcode field */
14790 /* POOL16E encoding of minor opcode field */
14797 static int mmreg (int r
)
14799 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14804 /* Used for 16-bit store instructions. */
14805 static int mmreg2 (int r
)
14807 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14812 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14813 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14814 #define uMIPS_RS2(op) uMIPS_RS(op)
14815 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14816 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14817 #define uMIPS_RS5(op) (op & 0x1f)
14819 /* Signed immediate */
14820 #define SIMM(op, start, width) \
14821 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14824 /* Zero-extended immediate */
14825 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14827 static void gen_addiur1sp(DisasContext
*ctx
)
14829 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14831 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14834 static void gen_addiur2(DisasContext
*ctx
)
14836 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14837 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14838 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14840 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14843 static void gen_addiusp(DisasContext
*ctx
)
14845 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14848 if (encoded
<= 1) {
14849 decoded
= 256 + encoded
;
14850 } else if (encoded
<= 255) {
14852 } else if (encoded
<= 509) {
14853 decoded
= encoded
- 512;
14855 decoded
= encoded
- 768;
14858 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14861 static void gen_addius5(DisasContext
*ctx
)
14863 int imm
= SIMM(ctx
->opcode
, 1, 4);
14864 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14866 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14869 static void gen_andi16(DisasContext
*ctx
)
14871 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14872 31, 32, 63, 64, 255, 32768, 65535 };
14873 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14874 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14875 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14877 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14880 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14881 int base
, int16_t offset
)
14886 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14887 generate_exception_end(ctx
, EXCP_RI
);
14891 t0
= tcg_temp_new();
14893 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14895 t1
= tcg_const_tl(reglist
);
14896 t2
= tcg_const_i32(ctx
->mem_idx
);
14898 save_cpu_state(ctx
, 1);
14901 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14904 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14906 #ifdef TARGET_MIPS64
14908 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14911 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14917 tcg_temp_free_i32(t2
);
14921 static void gen_pool16c_insn(DisasContext
*ctx
)
14923 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14924 int rs
= mmreg(ctx
->opcode
& 0x7);
14926 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14931 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14937 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14943 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14949 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14956 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14957 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14959 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14968 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14969 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14971 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14978 int reg
= ctx
->opcode
& 0x1f;
14980 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14986 int reg
= ctx
->opcode
& 0x1f;
14987 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14988 /* Let normal delay slot handling in our caller take us
14989 to the branch target. */
14994 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14995 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14999 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15000 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15004 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15008 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15011 generate_exception_end(ctx
, EXCP_BREAK
);
15014 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15015 gen_helper_do_semihosting(cpu_env
);
15017 /* XXX: not clear which exception should be raised
15018 * when in debug mode...
15020 check_insn(ctx
, ISA_MIPS32
);
15021 generate_exception_end(ctx
, EXCP_DBp
);
15024 case JRADDIUSP
+ 0:
15025 case JRADDIUSP
+ 1:
15027 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15028 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15029 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15030 /* Let normal delay slot handling in our caller take us
15031 to the branch target. */
15035 generate_exception_end(ctx
, EXCP_RI
);
15040 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15043 int rd
, rs
, re
, rt
;
15044 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15045 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15046 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15047 rd
= rd_enc
[enc_dest
];
15048 re
= re_enc
[enc_dest
];
15049 rs
= rs_rt_enc
[enc_rs
];
15050 rt
= rs_rt_enc
[enc_rt
];
15052 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15054 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15057 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15059 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15063 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15065 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15066 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15068 switch (ctx
->opcode
& 0xf) {
15070 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15073 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15077 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15078 int offset
= extract32(ctx
->opcode
, 4, 4);
15079 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15082 case R6_JRC16
: /* JRCADDIUSP */
15083 if ((ctx
->opcode
>> 4) & 1) {
15085 int imm
= extract32(ctx
->opcode
, 5, 5);
15086 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15087 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15090 rs
= extract32(ctx
->opcode
, 5, 5);
15091 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15103 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15104 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15105 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15106 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15110 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15113 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15117 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15118 int offset
= extract32(ctx
->opcode
, 4, 4);
15119 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15122 case JALRC16
: /* BREAK16, SDBBP16 */
15123 switch (ctx
->opcode
& 0x3f) {
15125 case JALRC16
+ 0x20:
15127 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15132 generate_exception(ctx
, EXCP_BREAK
);
15136 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15137 gen_helper_do_semihosting(cpu_env
);
15139 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15140 generate_exception(ctx
, EXCP_RI
);
15142 generate_exception(ctx
, EXCP_DBp
);
15149 generate_exception(ctx
, EXCP_RI
);
15154 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
15156 TCGv t0
= tcg_temp_new();
15157 TCGv t1
= tcg_temp_new();
15159 gen_load_gpr(t0
, base
);
15162 gen_load_gpr(t1
, index
);
15163 tcg_gen_shli_tl(t1
, t1
, 2);
15164 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15167 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15168 gen_store_gpr(t1
, rd
);
15174 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
15175 int base
, int16_t offset
)
15179 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15180 generate_exception_end(ctx
, EXCP_RI
);
15184 t0
= tcg_temp_new();
15185 t1
= tcg_temp_new();
15187 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15192 generate_exception_end(ctx
, EXCP_RI
);
15195 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15196 gen_store_gpr(t1
, rd
);
15197 tcg_gen_movi_tl(t1
, 4);
15198 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15199 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15200 gen_store_gpr(t1
, rd
+1);
15203 gen_load_gpr(t1
, rd
);
15204 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15205 tcg_gen_movi_tl(t1
, 4);
15206 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15207 gen_load_gpr(t1
, rd
+1);
15208 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15210 #ifdef TARGET_MIPS64
15213 generate_exception_end(ctx
, EXCP_RI
);
15216 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15217 gen_store_gpr(t1
, rd
);
15218 tcg_gen_movi_tl(t1
, 8);
15219 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15220 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15221 gen_store_gpr(t1
, rd
+1);
15224 gen_load_gpr(t1
, rd
);
15225 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15226 tcg_gen_movi_tl(t1
, 8);
15227 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15228 gen_load_gpr(t1
, rd
+1);
15229 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15237 static void gen_sync(int stype
)
15239 TCGBar tcg_mo
= TCG_BAR_SC
;
15242 case 0x4: /* SYNC_WMB */
15243 tcg_mo
|= TCG_MO_ST_ST
;
15245 case 0x10: /* SYNC_MB */
15246 tcg_mo
|= TCG_MO_ALL
;
15248 case 0x11: /* SYNC_ACQUIRE */
15249 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15251 case 0x12: /* SYNC_RELEASE */
15252 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15254 case 0x13: /* SYNC_RMB */
15255 tcg_mo
|= TCG_MO_LD_LD
;
15258 tcg_mo
|= TCG_MO_ALL
;
15262 tcg_gen_mb(tcg_mo
);
15265 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15267 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15268 int minor
= (ctx
->opcode
>> 12) & 0xf;
15269 uint32_t mips32_op
;
15271 switch (extension
) {
15273 mips32_op
= OPC_TEQ
;
15276 mips32_op
= OPC_TGE
;
15279 mips32_op
= OPC_TGEU
;
15282 mips32_op
= OPC_TLT
;
15285 mips32_op
= OPC_TLTU
;
15288 mips32_op
= OPC_TNE
;
15290 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15292 #ifndef CONFIG_USER_ONLY
15295 check_cp0_enabled(ctx
);
15297 /* Treat as NOP. */
15300 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15304 check_cp0_enabled(ctx
);
15306 TCGv t0
= tcg_temp_new();
15308 gen_load_gpr(t0
, rt
);
15309 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15315 switch (minor
& 3) {
15317 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15320 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15323 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15326 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15329 goto pool32axf_invalid
;
15333 switch (minor
& 3) {
15335 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15338 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15341 goto pool32axf_invalid
;
15347 check_insn(ctx
, ISA_MIPS32R6
);
15348 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15351 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15354 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15357 mips32_op
= OPC_CLO
;
15360 mips32_op
= OPC_CLZ
;
15362 check_insn(ctx
, ISA_MIPS32
);
15363 gen_cl(ctx
, mips32_op
, rt
, rs
);
15366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15367 gen_rdhwr(ctx
, rt
, rs
, 0);
15370 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15373 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15374 mips32_op
= OPC_MULT
;
15377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15378 mips32_op
= OPC_MULTU
;
15381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15382 mips32_op
= OPC_DIV
;
15385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15386 mips32_op
= OPC_DIVU
;
15389 check_insn(ctx
, ISA_MIPS32
);
15390 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15393 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15394 mips32_op
= OPC_MADD
;
15397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15398 mips32_op
= OPC_MADDU
;
15401 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15402 mips32_op
= OPC_MSUB
;
15405 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15406 mips32_op
= OPC_MSUBU
;
15408 check_insn(ctx
, ISA_MIPS32
);
15409 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15412 goto pool32axf_invalid
;
15423 generate_exception_err(ctx
, EXCP_CpU
, 2);
15426 goto pool32axf_invalid
;
15431 case JALR
: /* JALRC */
15432 case JALR_HB
: /* JALRC_HB */
15433 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15434 /* JALRC, JALRC_HB */
15435 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15437 /* JALR, JALR_HB */
15438 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15439 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15445 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15446 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15449 goto pool32axf_invalid
;
15455 check_cp0_enabled(ctx
);
15456 check_insn(ctx
, ISA_MIPS32R2
);
15457 gen_load_srsgpr(rs
, rt
);
15460 check_cp0_enabled(ctx
);
15461 check_insn(ctx
, ISA_MIPS32R2
);
15462 gen_store_srsgpr(rs
, rt
);
15465 goto pool32axf_invalid
;
15468 #ifndef CONFIG_USER_ONLY
15472 mips32_op
= OPC_TLBP
;
15475 mips32_op
= OPC_TLBR
;
15478 mips32_op
= OPC_TLBWI
;
15481 mips32_op
= OPC_TLBWR
;
15484 mips32_op
= OPC_TLBINV
;
15487 mips32_op
= OPC_TLBINVF
;
15490 mips32_op
= OPC_WAIT
;
15493 mips32_op
= OPC_DERET
;
15496 mips32_op
= OPC_ERET
;
15498 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15501 goto pool32axf_invalid
;
15507 check_cp0_enabled(ctx
);
15509 TCGv t0
= tcg_temp_new();
15511 save_cpu_state(ctx
, 1);
15512 gen_helper_di(t0
, cpu_env
);
15513 gen_store_gpr(t0
, rs
);
15514 /* Stop translation as we may have switched the execution mode */
15515 ctx
->base
.is_jmp
= DISAS_STOP
;
15520 check_cp0_enabled(ctx
);
15522 TCGv t0
= tcg_temp_new();
15524 save_cpu_state(ctx
, 1);
15525 gen_helper_ei(t0
, cpu_env
);
15526 gen_store_gpr(t0
, rs
);
15527 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15528 of translated code to check for pending interrupts. */
15529 gen_save_pc(ctx
->base
.pc_next
+ 4);
15530 ctx
->base
.is_jmp
= DISAS_EXIT
;
15535 goto pool32axf_invalid
;
15542 gen_sync(extract32(ctx
->opcode
, 16, 5));
15545 generate_exception_end(ctx
, EXCP_SYSCALL
);
15548 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15549 gen_helper_do_semihosting(cpu_env
);
15551 check_insn(ctx
, ISA_MIPS32
);
15552 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15553 generate_exception_end(ctx
, EXCP_RI
);
15555 generate_exception_end(ctx
, EXCP_DBp
);
15560 goto pool32axf_invalid
;
15564 switch (minor
& 3) {
15566 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15569 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15572 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15575 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15578 goto pool32axf_invalid
;
15582 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15585 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15588 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15591 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15594 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15597 goto pool32axf_invalid
;
15602 MIPS_INVAL("pool32axf");
15603 generate_exception_end(ctx
, EXCP_RI
);
15608 /* Values for microMIPS fmt field. Variable-width, depending on which
15609 formats the instruction supports. */
15628 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15630 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15631 uint32_t mips32_op
;
15633 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15634 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15635 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15637 switch (extension
) {
15638 case FLOAT_1BIT_FMT(CFC1
, 0):
15639 mips32_op
= OPC_CFC1
;
15641 case FLOAT_1BIT_FMT(CTC1
, 0):
15642 mips32_op
= OPC_CTC1
;
15644 case FLOAT_1BIT_FMT(MFC1
, 0):
15645 mips32_op
= OPC_MFC1
;
15647 case FLOAT_1BIT_FMT(MTC1
, 0):
15648 mips32_op
= OPC_MTC1
;
15650 case FLOAT_1BIT_FMT(MFHC1
, 0):
15651 mips32_op
= OPC_MFHC1
;
15653 case FLOAT_1BIT_FMT(MTHC1
, 0):
15654 mips32_op
= OPC_MTHC1
;
15656 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15659 /* Reciprocal square root */
15660 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15661 mips32_op
= OPC_RSQRT_S
;
15663 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15664 mips32_op
= OPC_RSQRT_D
;
15668 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15669 mips32_op
= OPC_SQRT_S
;
15671 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15672 mips32_op
= OPC_SQRT_D
;
15676 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15677 mips32_op
= OPC_RECIP_S
;
15679 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15680 mips32_op
= OPC_RECIP_D
;
15684 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15685 mips32_op
= OPC_FLOOR_L_S
;
15687 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15688 mips32_op
= OPC_FLOOR_L_D
;
15690 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15691 mips32_op
= OPC_FLOOR_W_S
;
15693 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15694 mips32_op
= OPC_FLOOR_W_D
;
15698 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15699 mips32_op
= OPC_CEIL_L_S
;
15701 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15702 mips32_op
= OPC_CEIL_L_D
;
15704 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15705 mips32_op
= OPC_CEIL_W_S
;
15707 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15708 mips32_op
= OPC_CEIL_W_D
;
15712 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15713 mips32_op
= OPC_TRUNC_L_S
;
15715 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15716 mips32_op
= OPC_TRUNC_L_D
;
15718 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15719 mips32_op
= OPC_TRUNC_W_S
;
15721 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15722 mips32_op
= OPC_TRUNC_W_D
;
15726 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15727 mips32_op
= OPC_ROUND_L_S
;
15729 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15730 mips32_op
= OPC_ROUND_L_D
;
15732 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15733 mips32_op
= OPC_ROUND_W_S
;
15735 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15736 mips32_op
= OPC_ROUND_W_D
;
15739 /* Integer to floating-point conversion */
15740 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15741 mips32_op
= OPC_CVT_L_S
;
15743 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15744 mips32_op
= OPC_CVT_L_D
;
15746 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15747 mips32_op
= OPC_CVT_W_S
;
15749 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15750 mips32_op
= OPC_CVT_W_D
;
15753 /* Paired-foo conversions */
15754 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15755 mips32_op
= OPC_CVT_S_PL
;
15757 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15758 mips32_op
= OPC_CVT_S_PU
;
15760 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15761 mips32_op
= OPC_CVT_PW_PS
;
15763 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15764 mips32_op
= OPC_CVT_PS_PW
;
15767 /* Floating-point moves */
15768 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15769 mips32_op
= OPC_MOV_S
;
15771 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15772 mips32_op
= OPC_MOV_D
;
15774 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15775 mips32_op
= OPC_MOV_PS
;
15778 /* Absolute value */
15779 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15780 mips32_op
= OPC_ABS_S
;
15782 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15783 mips32_op
= OPC_ABS_D
;
15785 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15786 mips32_op
= OPC_ABS_PS
;
15790 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15791 mips32_op
= OPC_NEG_S
;
15793 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15794 mips32_op
= OPC_NEG_D
;
15796 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15797 mips32_op
= OPC_NEG_PS
;
15800 /* Reciprocal square root step */
15801 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15802 mips32_op
= OPC_RSQRT1_S
;
15804 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15805 mips32_op
= OPC_RSQRT1_D
;
15807 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15808 mips32_op
= OPC_RSQRT1_PS
;
15811 /* Reciprocal step */
15812 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15813 mips32_op
= OPC_RECIP1_S
;
15815 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15816 mips32_op
= OPC_RECIP1_S
;
15818 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15819 mips32_op
= OPC_RECIP1_PS
;
15822 /* Conversions from double */
15823 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15824 mips32_op
= OPC_CVT_D_S
;
15826 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15827 mips32_op
= OPC_CVT_D_W
;
15829 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15830 mips32_op
= OPC_CVT_D_L
;
15833 /* Conversions from single */
15834 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15835 mips32_op
= OPC_CVT_S_D
;
15837 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15838 mips32_op
= OPC_CVT_S_W
;
15840 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15841 mips32_op
= OPC_CVT_S_L
;
15843 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15846 /* Conditional moves on floating-point codes */
15847 case COND_FLOAT_MOV(MOVT
, 0):
15848 case COND_FLOAT_MOV(MOVT
, 1):
15849 case COND_FLOAT_MOV(MOVT
, 2):
15850 case COND_FLOAT_MOV(MOVT
, 3):
15851 case COND_FLOAT_MOV(MOVT
, 4):
15852 case COND_FLOAT_MOV(MOVT
, 5):
15853 case COND_FLOAT_MOV(MOVT
, 6):
15854 case COND_FLOAT_MOV(MOVT
, 7):
15855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15856 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15858 case COND_FLOAT_MOV(MOVF
, 0):
15859 case COND_FLOAT_MOV(MOVF
, 1):
15860 case COND_FLOAT_MOV(MOVF
, 2):
15861 case COND_FLOAT_MOV(MOVF
, 3):
15862 case COND_FLOAT_MOV(MOVF
, 4):
15863 case COND_FLOAT_MOV(MOVF
, 5):
15864 case COND_FLOAT_MOV(MOVF
, 6):
15865 case COND_FLOAT_MOV(MOVF
, 7):
15866 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15867 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15870 MIPS_INVAL("pool32fxf");
15871 generate_exception_end(ctx
, EXCP_RI
);
15876 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15880 int rt
, rs
, rd
, rr
;
15882 uint32_t op
, minor
, minor2
, mips32_op
;
15883 uint32_t cond
, fmt
, cc
;
15885 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15886 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15888 rt
= (ctx
->opcode
>> 21) & 0x1f;
15889 rs
= (ctx
->opcode
>> 16) & 0x1f;
15890 rd
= (ctx
->opcode
>> 11) & 0x1f;
15891 rr
= (ctx
->opcode
>> 6) & 0x1f;
15892 imm
= (int16_t) ctx
->opcode
;
15894 op
= (ctx
->opcode
>> 26) & 0x3f;
15897 minor
= ctx
->opcode
& 0x3f;
15900 minor
= (ctx
->opcode
>> 6) & 0xf;
15903 mips32_op
= OPC_SLL
;
15906 mips32_op
= OPC_SRA
;
15909 mips32_op
= OPC_SRL
;
15912 mips32_op
= OPC_ROTR
;
15914 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15917 check_insn(ctx
, ISA_MIPS32R6
);
15918 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15921 check_insn(ctx
, ISA_MIPS32R6
);
15922 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15925 check_insn(ctx
, ISA_MIPS32R6
);
15926 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15929 goto pool32a_invalid
;
15933 minor
= (ctx
->opcode
>> 6) & 0xf;
15937 mips32_op
= OPC_ADD
;
15940 mips32_op
= OPC_ADDU
;
15943 mips32_op
= OPC_SUB
;
15946 mips32_op
= OPC_SUBU
;
15949 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15950 mips32_op
= OPC_MUL
;
15952 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15956 mips32_op
= OPC_SLLV
;
15959 mips32_op
= OPC_SRLV
;
15962 mips32_op
= OPC_SRAV
;
15965 mips32_op
= OPC_ROTRV
;
15967 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15969 /* Logical operations */
15971 mips32_op
= OPC_AND
;
15974 mips32_op
= OPC_OR
;
15977 mips32_op
= OPC_NOR
;
15980 mips32_op
= OPC_XOR
;
15982 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15984 /* Set less than */
15986 mips32_op
= OPC_SLT
;
15989 mips32_op
= OPC_SLTU
;
15991 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15994 goto pool32a_invalid
;
15998 minor
= (ctx
->opcode
>> 6) & 0xf;
16000 /* Conditional moves */
16001 case MOVN
: /* MUL */
16002 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16004 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16007 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16010 case MOVZ
: /* MUH */
16011 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16013 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16016 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16020 check_insn(ctx
, ISA_MIPS32R6
);
16021 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16024 check_insn(ctx
, ISA_MIPS32R6
);
16025 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16027 case LWXS
: /* DIV */
16028 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16030 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16033 gen_ldxs(ctx
, rs
, rt
, rd
);
16037 check_insn(ctx
, ISA_MIPS32R6
);
16038 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16041 check_insn(ctx
, ISA_MIPS32R6
);
16042 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16045 check_insn(ctx
, ISA_MIPS32R6
);
16046 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16049 goto pool32a_invalid
;
16053 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16056 check_insn(ctx
, ISA_MIPS32R6
);
16057 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16058 extract32(ctx
->opcode
, 9, 2));
16061 check_insn(ctx
, ISA_MIPS32R6
);
16062 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16065 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16068 gen_pool32axf(env
, ctx
, rt
, rs
);
16071 generate_exception_end(ctx
, EXCP_BREAK
);
16074 check_insn(ctx
, ISA_MIPS32R6
);
16075 generate_exception_end(ctx
, EXCP_RI
);
16079 MIPS_INVAL("pool32a");
16080 generate_exception_end(ctx
, EXCP_RI
);
16085 minor
= (ctx
->opcode
>> 12) & 0xf;
16088 check_cp0_enabled(ctx
);
16089 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16090 gen_cache_operation(ctx
, rt
, rs
, imm
);
16095 /* COP2: Not implemented. */
16096 generate_exception_err(ctx
, EXCP_CpU
, 2);
16098 #ifdef TARGET_MIPS64
16101 check_insn(ctx
, ISA_MIPS3
);
16102 check_mips_64(ctx
);
16107 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16109 #ifdef TARGET_MIPS64
16112 check_insn(ctx
, ISA_MIPS3
);
16113 check_mips_64(ctx
);
16118 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16121 MIPS_INVAL("pool32b");
16122 generate_exception_end(ctx
, EXCP_RI
);
16127 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16128 minor
= ctx
->opcode
& 0x3f;
16129 check_cp1_enabled(ctx
);
16132 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16133 mips32_op
= OPC_ALNV_PS
;
16136 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16137 mips32_op
= OPC_MADD_S
;
16140 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16141 mips32_op
= OPC_MADD_D
;
16144 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16145 mips32_op
= OPC_MADD_PS
;
16148 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16149 mips32_op
= OPC_MSUB_S
;
16152 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16153 mips32_op
= OPC_MSUB_D
;
16156 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16157 mips32_op
= OPC_MSUB_PS
;
16160 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16161 mips32_op
= OPC_NMADD_S
;
16164 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16165 mips32_op
= OPC_NMADD_D
;
16168 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16169 mips32_op
= OPC_NMADD_PS
;
16172 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16173 mips32_op
= OPC_NMSUB_S
;
16176 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16177 mips32_op
= OPC_NMSUB_D
;
16180 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16181 mips32_op
= OPC_NMSUB_PS
;
16183 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16185 case CABS_COND_FMT
:
16186 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16187 cond
= (ctx
->opcode
>> 6) & 0xf;
16188 cc
= (ctx
->opcode
>> 13) & 0x7;
16189 fmt
= (ctx
->opcode
>> 10) & 0x3;
16192 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16195 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16198 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16201 goto pool32f_invalid
;
16205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16206 cond
= (ctx
->opcode
>> 6) & 0xf;
16207 cc
= (ctx
->opcode
>> 13) & 0x7;
16208 fmt
= (ctx
->opcode
>> 10) & 0x3;
16211 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16214 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16217 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16220 goto pool32f_invalid
;
16224 check_insn(ctx
, ISA_MIPS32R6
);
16225 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16228 check_insn(ctx
, ISA_MIPS32R6
);
16229 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16232 gen_pool32fxf(ctx
, rt
, rs
);
16236 switch ((ctx
->opcode
>> 6) & 0x7) {
16238 mips32_op
= OPC_PLL_PS
;
16241 mips32_op
= OPC_PLU_PS
;
16244 mips32_op
= OPC_PUL_PS
;
16247 mips32_op
= OPC_PUU_PS
;
16250 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16251 mips32_op
= OPC_CVT_PS_S
;
16253 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16256 goto pool32f_invalid
;
16260 check_insn(ctx
, ISA_MIPS32R6
);
16261 switch ((ctx
->opcode
>> 9) & 0x3) {
16263 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16266 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16269 goto pool32f_invalid
;
16274 switch ((ctx
->opcode
>> 6) & 0x7) {
16276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16277 mips32_op
= OPC_LWXC1
;
16280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16281 mips32_op
= OPC_SWXC1
;
16284 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16285 mips32_op
= OPC_LDXC1
;
16288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16289 mips32_op
= OPC_SDXC1
;
16292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16293 mips32_op
= OPC_LUXC1
;
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 mips32_op
= OPC_SUXC1
;
16299 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16302 goto pool32f_invalid
;
16306 check_insn(ctx
, ISA_MIPS32R6
);
16307 switch ((ctx
->opcode
>> 9) & 0x3) {
16309 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16312 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16315 goto pool32f_invalid
;
16320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16321 fmt
= (ctx
->opcode
>> 9) & 0x3;
16322 switch ((ctx
->opcode
>> 6) & 0x7) {
16326 mips32_op
= OPC_RSQRT2_S
;
16329 mips32_op
= OPC_RSQRT2_D
;
16332 mips32_op
= OPC_RSQRT2_PS
;
16335 goto pool32f_invalid
;
16341 mips32_op
= OPC_RECIP2_S
;
16344 mips32_op
= OPC_RECIP2_D
;
16347 mips32_op
= OPC_RECIP2_PS
;
16350 goto pool32f_invalid
;
16354 mips32_op
= OPC_ADDR_PS
;
16357 mips32_op
= OPC_MULR_PS
;
16359 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16362 goto pool32f_invalid
;
16366 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16367 cc
= (ctx
->opcode
>> 13) & 0x7;
16368 fmt
= (ctx
->opcode
>> 9) & 0x3;
16369 switch ((ctx
->opcode
>> 6) & 0x7) {
16370 case MOVF_FMT
: /* RINT_FMT */
16371 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16375 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16378 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16381 goto pool32f_invalid
;
16387 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16390 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16394 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16397 goto pool32f_invalid
;
16401 case MOVT_FMT
: /* CLASS_FMT */
16402 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16406 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16409 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16412 goto pool32f_invalid
;
16418 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16421 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16425 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16428 goto pool32f_invalid
;
16433 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16436 goto pool32f_invalid
;
16439 #define FINSN_3ARG_SDPS(prfx) \
16440 switch ((ctx->opcode >> 8) & 0x3) { \
16442 mips32_op = OPC_##prfx##_S; \
16445 mips32_op = OPC_##prfx##_D; \
16447 case FMT_SDPS_PS: \
16449 mips32_op = OPC_##prfx##_PS; \
16452 goto pool32f_invalid; \
16455 check_insn(ctx
, ISA_MIPS32R6
);
16456 switch ((ctx
->opcode
>> 9) & 0x3) {
16458 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16461 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16464 goto pool32f_invalid
;
16468 check_insn(ctx
, ISA_MIPS32R6
);
16469 switch ((ctx
->opcode
>> 9) & 0x3) {
16471 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16474 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16477 goto pool32f_invalid
;
16481 /* regular FP ops */
16482 switch ((ctx
->opcode
>> 6) & 0x3) {
16484 FINSN_3ARG_SDPS(ADD
);
16487 FINSN_3ARG_SDPS(SUB
);
16490 FINSN_3ARG_SDPS(MUL
);
16493 fmt
= (ctx
->opcode
>> 8) & 0x3;
16495 mips32_op
= OPC_DIV_D
;
16496 } else if (fmt
== 0) {
16497 mips32_op
= OPC_DIV_S
;
16499 goto pool32f_invalid
;
16503 goto pool32f_invalid
;
16508 switch ((ctx
->opcode
>> 6) & 0x7) {
16509 case MOVN_FMT
: /* SELEQZ_FMT */
16510 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16512 switch ((ctx
->opcode
>> 9) & 0x3) {
16514 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16517 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16520 goto pool32f_invalid
;
16524 FINSN_3ARG_SDPS(MOVN
);
16528 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16529 FINSN_3ARG_SDPS(MOVN
);
16531 case MOVZ_FMT
: /* SELNEZ_FMT */
16532 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16534 switch ((ctx
->opcode
>> 9) & 0x3) {
16536 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16539 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16542 goto pool32f_invalid
;
16546 FINSN_3ARG_SDPS(MOVZ
);
16550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16551 FINSN_3ARG_SDPS(MOVZ
);
16554 check_insn(ctx
, ISA_MIPS32R6
);
16555 switch ((ctx
->opcode
>> 9) & 0x3) {
16557 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16560 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16563 goto pool32f_invalid
;
16567 check_insn(ctx
, ISA_MIPS32R6
);
16568 switch ((ctx
->opcode
>> 9) & 0x3) {
16570 mips32_op
= OPC_MADDF_S
;
16573 mips32_op
= OPC_MADDF_D
;
16576 goto pool32f_invalid
;
16580 check_insn(ctx
, ISA_MIPS32R6
);
16581 switch ((ctx
->opcode
>> 9) & 0x3) {
16583 mips32_op
= OPC_MSUBF_S
;
16586 mips32_op
= OPC_MSUBF_D
;
16589 goto pool32f_invalid
;
16593 goto pool32f_invalid
;
16597 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16601 MIPS_INVAL("pool32f");
16602 generate_exception_end(ctx
, EXCP_RI
);
16606 generate_exception_err(ctx
, EXCP_CpU
, 1);
16610 minor
= (ctx
->opcode
>> 21) & 0x1f;
16613 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16614 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16617 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16618 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16619 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16623 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16624 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16627 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16628 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16631 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16632 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16633 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16636 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16637 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16638 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16641 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16642 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16645 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16646 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16650 case TLTI
: /* BC1EQZC */
16651 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16653 check_cp1_enabled(ctx
);
16654 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16657 mips32_op
= OPC_TLTI
;
16661 case TGEI
: /* BC1NEZC */
16662 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16664 check_cp1_enabled(ctx
);
16665 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16668 mips32_op
= OPC_TGEI
;
16673 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16674 mips32_op
= OPC_TLTIU
;
16677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16678 mips32_op
= OPC_TGEIU
;
16680 case TNEI
: /* SYNCI */
16681 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16683 /* Break the TB to be able to sync copied instructions
16685 ctx
->base
.is_jmp
= DISAS_STOP
;
16688 mips32_op
= OPC_TNEI
;
16693 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16694 mips32_op
= OPC_TEQI
;
16696 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16701 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16702 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16703 4, rs
, 0, imm
<< 1, 0);
16704 /* Compact branches don't have a delay slot, so just let
16705 the normal delay slot handling take us to the branch
16709 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16710 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16713 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16714 /* Break the TB to be able to sync copied instructions
16716 ctx
->base
.is_jmp
= DISAS_STOP
;
16720 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16721 /* COP2: Not implemented. */
16722 generate_exception_err(ctx
, EXCP_CpU
, 2);
16725 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16726 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16729 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16730 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16733 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16734 mips32_op
= OPC_BC1FANY4
;
16737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16738 mips32_op
= OPC_BC1TANY4
;
16741 check_insn(ctx
, ASE_MIPS3D
);
16744 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16745 check_cp1_enabled(ctx
);
16746 gen_compute_branch1(ctx
, mips32_op
,
16747 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16749 generate_exception_err(ctx
, EXCP_CpU
, 1);
16754 /* MIPS DSP: not implemented */
16757 MIPS_INVAL("pool32i");
16758 generate_exception_end(ctx
, EXCP_RI
);
16763 minor
= (ctx
->opcode
>> 12) & 0xf;
16764 offset
= sextract32(ctx
->opcode
, 0,
16765 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16768 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16769 mips32_op
= OPC_LWL
;
16772 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16773 mips32_op
= OPC_SWL
;
16776 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16777 mips32_op
= OPC_LWR
;
16780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16781 mips32_op
= OPC_SWR
;
16783 #if defined(TARGET_MIPS64)
16785 check_insn(ctx
, ISA_MIPS3
);
16786 check_mips_64(ctx
);
16787 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16788 mips32_op
= OPC_LDL
;
16791 check_insn(ctx
, ISA_MIPS3
);
16792 check_mips_64(ctx
);
16793 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16794 mips32_op
= OPC_SDL
;
16797 check_insn(ctx
, ISA_MIPS3
);
16798 check_mips_64(ctx
);
16799 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16800 mips32_op
= OPC_LDR
;
16803 check_insn(ctx
, ISA_MIPS3
);
16804 check_mips_64(ctx
);
16805 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16806 mips32_op
= OPC_SDR
;
16809 check_insn(ctx
, ISA_MIPS3
);
16810 check_mips_64(ctx
);
16811 mips32_op
= OPC_LWU
;
16814 check_insn(ctx
, ISA_MIPS3
);
16815 check_mips_64(ctx
);
16816 mips32_op
= OPC_LLD
;
16820 mips32_op
= OPC_LL
;
16823 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16826 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16829 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
16831 #if defined(TARGET_MIPS64)
16833 check_insn(ctx
, ISA_MIPS3
);
16834 check_mips_64(ctx
);
16835 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
16840 MIPS_INVAL("pool32c ld-eva");
16841 generate_exception_end(ctx
, EXCP_RI
);
16844 check_cp0_enabled(ctx
);
16846 minor2
= (ctx
->opcode
>> 9) & 0x7;
16847 offset
= sextract32(ctx
->opcode
, 0, 9);
16850 mips32_op
= OPC_LBUE
;
16853 mips32_op
= OPC_LHUE
;
16856 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16857 mips32_op
= OPC_LWLE
;
16860 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16861 mips32_op
= OPC_LWRE
;
16864 mips32_op
= OPC_LBE
;
16867 mips32_op
= OPC_LHE
;
16870 mips32_op
= OPC_LLE
;
16873 mips32_op
= OPC_LWE
;
16879 MIPS_INVAL("pool32c st-eva");
16880 generate_exception_end(ctx
, EXCP_RI
);
16883 check_cp0_enabled(ctx
);
16885 minor2
= (ctx
->opcode
>> 9) & 0x7;
16886 offset
= sextract32(ctx
->opcode
, 0, 9);
16889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16890 mips32_op
= OPC_SWLE
;
16893 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16894 mips32_op
= OPC_SWRE
;
16897 /* Treat as no-op */
16898 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16899 /* hint codes 24-31 are reserved and signal RI */
16900 generate_exception(ctx
, EXCP_RI
);
16904 /* Treat as no-op */
16905 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16906 gen_cache_operation(ctx
, rt
, rs
, offset
);
16910 mips32_op
= OPC_SBE
;
16913 mips32_op
= OPC_SHE
;
16916 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
16919 mips32_op
= OPC_SWE
;
16924 /* Treat as no-op */
16925 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16926 /* hint codes 24-31 are reserved and signal RI */
16927 generate_exception(ctx
, EXCP_RI
);
16931 MIPS_INVAL("pool32c");
16932 generate_exception_end(ctx
, EXCP_RI
);
16936 case ADDI32
: /* AUI, LUI */
16937 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16939 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16942 mips32_op
= OPC_ADDI
;
16947 mips32_op
= OPC_ADDIU
;
16949 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16952 /* Logical operations */
16954 mips32_op
= OPC_ORI
;
16957 mips32_op
= OPC_XORI
;
16960 mips32_op
= OPC_ANDI
;
16962 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16965 /* Set less than immediate */
16967 mips32_op
= OPC_SLTI
;
16970 mips32_op
= OPC_SLTIU
;
16972 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16976 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16977 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16978 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16980 case JALS32
: /* BOVC, BEQC, BEQZALC */
16981 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16984 mips32_op
= OPC_BOVC
;
16985 } else if (rs
< rt
&& rs
== 0) {
16987 mips32_op
= OPC_BEQZALC
;
16990 mips32_op
= OPC_BEQC
;
16992 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16995 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16996 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16997 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17000 case BEQ32
: /* BC */
17001 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17003 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17004 sextract32(ctx
->opcode
<< 1, 0, 27));
17007 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17010 case BNE32
: /* BALC */
17011 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17013 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17014 sextract32(ctx
->opcode
<< 1, 0, 27));
17017 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17020 case J32
: /* BGTZC, BLTZC, BLTC */
17021 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17022 if (rs
== 0 && rt
!= 0) {
17024 mips32_op
= OPC_BGTZC
;
17025 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17027 mips32_op
= OPC_BLTZC
;
17030 mips32_op
= OPC_BLTC
;
17032 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17035 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17036 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17039 case JAL32
: /* BLEZC, BGEZC, BGEC */
17040 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17041 if (rs
== 0 && rt
!= 0) {
17043 mips32_op
= OPC_BLEZC
;
17044 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17046 mips32_op
= OPC_BGEZC
;
17049 mips32_op
= OPC_BGEC
;
17051 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17054 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17055 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17056 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17059 /* Floating point (COP1) */
17061 mips32_op
= OPC_LWC1
;
17064 mips32_op
= OPC_LDC1
;
17067 mips32_op
= OPC_SWC1
;
17070 mips32_op
= OPC_SDC1
;
17072 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17074 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17075 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17076 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17077 switch ((ctx
->opcode
>> 16) & 0x1f) {
17086 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17089 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17092 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17102 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17105 generate_exception(ctx
, EXCP_RI
);
17110 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17111 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17113 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17116 case BNVC
: /* BNEC, BNEZALC */
17117 check_insn(ctx
, ISA_MIPS32R6
);
17120 mips32_op
= OPC_BNVC
;
17121 } else if (rs
< rt
&& rs
== 0) {
17123 mips32_op
= OPC_BNEZALC
;
17126 mips32_op
= OPC_BNEC
;
17128 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17130 case R6_BNEZC
: /* JIALC */
17131 check_insn(ctx
, ISA_MIPS32R6
);
17134 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17135 sextract32(ctx
->opcode
<< 1, 0, 22));
17138 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17141 case R6_BEQZC
: /* JIC */
17142 check_insn(ctx
, ISA_MIPS32R6
);
17145 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17146 sextract32(ctx
->opcode
<< 1, 0, 22));
17149 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17152 case BLEZALC
: /* BGEZALC, BGEUC */
17153 check_insn(ctx
, ISA_MIPS32R6
);
17154 if (rs
== 0 && rt
!= 0) {
17156 mips32_op
= OPC_BLEZALC
;
17157 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17159 mips32_op
= OPC_BGEZALC
;
17162 mips32_op
= OPC_BGEUC
;
17164 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17166 case BGTZALC
: /* BLTZALC, BLTUC */
17167 check_insn(ctx
, ISA_MIPS32R6
);
17168 if (rs
== 0 && rt
!= 0) {
17170 mips32_op
= OPC_BGTZALC
;
17171 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17173 mips32_op
= OPC_BLTZALC
;
17176 mips32_op
= OPC_BLTUC
;
17178 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17180 /* Loads and stores */
17182 mips32_op
= OPC_LB
;
17185 mips32_op
= OPC_LBU
;
17188 mips32_op
= OPC_LH
;
17191 mips32_op
= OPC_LHU
;
17194 mips32_op
= OPC_LW
;
17196 #ifdef TARGET_MIPS64
17198 check_insn(ctx
, ISA_MIPS3
);
17199 check_mips_64(ctx
);
17200 mips32_op
= OPC_LD
;
17203 check_insn(ctx
, ISA_MIPS3
);
17204 check_mips_64(ctx
);
17205 mips32_op
= OPC_SD
;
17209 mips32_op
= OPC_SB
;
17212 mips32_op
= OPC_SH
;
17215 mips32_op
= OPC_SW
;
17218 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17221 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17224 generate_exception_end(ctx
, EXCP_RI
);
17229 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
17233 /* make sure instructions are on a halfword boundary */
17234 if (ctx
->base
.pc_next
& 0x1) {
17235 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17236 generate_exception_end(ctx
, EXCP_AdEL
);
17240 op
= (ctx
->opcode
>> 10) & 0x3f;
17241 /* Enforce properly-sized instructions in a delay slot */
17242 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17243 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17245 /* POOL32A, POOL32B, POOL32I, POOL32C */
17247 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17249 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17251 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17253 /* LB32, LH32, LWC132, LDC132, LW32 */
17254 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17255 generate_exception_end(ctx
, EXCP_RI
);
17260 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17262 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17264 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17265 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17266 generate_exception_end(ctx
, EXCP_RI
);
17276 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17277 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17278 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17281 switch (ctx
->opcode
& 0x1) {
17289 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17290 /* In the Release 6 the register number location in
17291 * the instruction encoding has changed.
17293 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17295 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17301 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17302 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17303 int amount
= (ctx
->opcode
>> 1) & 0x7;
17305 amount
= amount
== 0 ? 8 : amount
;
17307 switch (ctx
->opcode
& 0x1) {
17316 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17320 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17321 gen_pool16c_r6_insn(ctx
);
17323 gen_pool16c_insn(ctx
);
17328 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17329 int rb
= 28; /* GP */
17330 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17332 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17337 if (ctx
->opcode
& 1) {
17338 generate_exception_end(ctx
, EXCP_RI
);
17341 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17342 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17343 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17344 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17349 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17350 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17351 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17352 offset
= (offset
== 0xf ? -1 : offset
);
17354 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17359 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17360 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17361 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17363 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17368 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17369 int rb
= 29; /* SP */
17370 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17372 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17377 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17378 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17379 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17381 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17386 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17387 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17388 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17390 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17395 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17396 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17397 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17399 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17404 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17405 int rb
= 29; /* SP */
17406 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17408 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17413 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17414 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17415 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17417 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17422 int rd
= uMIPS_RD5(ctx
->opcode
);
17423 int rs
= uMIPS_RS5(ctx
->opcode
);
17425 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17432 switch (ctx
->opcode
& 0x1) {
17442 switch (ctx
->opcode
& 0x1) {
17447 gen_addiur1sp(ctx
);
17451 case B16
: /* BC16 */
17452 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17453 sextract32(ctx
->opcode
, 0, 10) << 1,
17454 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17456 case BNEZ16
: /* BNEZC16 */
17457 case BEQZ16
: /* BEQZC16 */
17458 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17459 mmreg(uMIPS_RD(ctx
->opcode
)),
17460 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17461 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17466 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17467 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17469 imm
= (imm
== 0x7f ? -1 : imm
);
17470 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17476 generate_exception_end(ctx
, EXCP_RI
);
17479 decode_micromips32_opc(env
, ctx
);
17492 /* MAJOR, P16, and P32 pools opcodes */
17496 NM_MOVE_BALC
= 0x02,
17504 NM_P16_SHIFT
= 0x0c,
17522 NM_P_LS_U12
= 0x21,
17532 NM_P16_ADDU
= 0x2c,
17546 NM_MOVEPREV
= 0x3f,
17549 /* POOL32A instruction pool */
17551 NM_POOL32A0
= 0x00,
17552 NM_SPECIAL2
= 0x01,
17555 NM_POOL32A5
= 0x05,
17556 NM_POOL32A7
= 0x07,
17559 /* P.GP.W instruction pool */
17561 NM_ADDIUGP_W
= 0x00,
17566 /* P48I instruction pool */
17570 NM_ADDIUGP48
= 0x02,
17571 NM_ADDIUPC48
= 0x03,
17576 /* P.U12 instruction pool */
17585 NM_ADDIUNEG
= 0x08,
17592 /* POOL32F instruction pool */
17594 NM_POOL32F_0
= 0x00,
17595 NM_POOL32F_3
= 0x03,
17596 NM_POOL32F_5
= 0x05,
17599 /* POOL32S instruction pool */
17601 NM_POOL32S_0
= 0x00,
17602 NM_POOL32S_4
= 0x04,
17605 /* P.LUI instruction pool */
17611 /* P.GP.BH instruction pool */
17616 NM_ADDIUGP_B
= 0x03,
17619 NM_P_GP_CP1
= 0x06,
17622 /* P.LS.U12 instruction pool */
17627 NM_P_PREFU12
= 0x03,
17640 /* P.LS.S9 instruction pool */
17646 NM_P_LS_UAWM
= 0x05,
17649 /* P.BAL instruction pool */
17655 /* P.J instruction pool */
17658 NM_JALRC_HB
= 0x01,
17659 NM_P_BALRSC
= 0x08,
17662 /* P.BR1 instruction pool */
17670 /* P.BR2 instruction pool */
17677 /* P.BRI instruction pool */
17689 /* P16.SHIFT instruction pool */
17695 /* POOL16C instruction pool */
17697 NM_POOL16C_0
= 0x00,
17701 /* P16.A1 instruction pool */
17703 NM_ADDIUR1SP
= 0x01,
17706 /* P16.A2 instruction pool */
17709 NM_P_ADDIURS5
= 0x01,
17712 /* P16.ADDU instruction pool */
17718 /* P16.SR instruction pool */
17721 NM_RESTORE_JRC16
= 0x01,
17724 /* P16.4X4 instruction pool */
17730 /* P16.LB instruction pool */
17737 /* P16.LH instruction pool */
17744 /* P.RI instruction pool */
17747 NM_P_SYSCALL
= 0x01,
17752 /* POOL32A0 instruction pool */
17787 NM_D_E_MT_VPE
= 0x56,
17795 /* CRC32 instruction pool */
17805 /* POOL32A5 instruction pool */
17807 NM_CMP_EQ_PH
= 0x00,
17808 NM_CMP_LT_PH
= 0x08,
17809 NM_CMP_LE_PH
= 0x10,
17810 NM_CMPGU_EQ_QB
= 0x18,
17811 NM_CMPGU_LT_QB
= 0x20,
17812 NM_CMPGU_LE_QB
= 0x28,
17813 NM_CMPGDU_EQ_QB
= 0x30,
17814 NM_CMPGDU_LT_QB
= 0x38,
17815 NM_CMPGDU_LE_QB
= 0x40,
17816 NM_CMPU_EQ_QB
= 0x48,
17817 NM_CMPU_LT_QB
= 0x50,
17818 NM_CMPU_LE_QB
= 0x58,
17819 NM_ADDQ_S_W
= 0x60,
17820 NM_SUBQ_S_W
= 0x68,
17824 NM_ADDQ_S_PH
= 0x01,
17825 NM_ADDQH_R_PH
= 0x09,
17826 NM_ADDQH_R_W
= 0x11,
17827 NM_ADDU_S_QB
= 0x19,
17828 NM_ADDU_S_PH
= 0x21,
17829 NM_ADDUH_R_QB
= 0x29,
17830 NM_SHRAV_R_PH
= 0x31,
17831 NM_SHRAV_R_QB
= 0x39,
17832 NM_SUBQ_S_PH
= 0x41,
17833 NM_SUBQH_R_PH
= 0x49,
17834 NM_SUBQH_R_W
= 0x51,
17835 NM_SUBU_S_QB
= 0x59,
17836 NM_SUBU_S_PH
= 0x61,
17837 NM_SUBUH_R_QB
= 0x69,
17838 NM_SHLLV_S_PH
= 0x71,
17839 NM_PRECR_SRA_R_PH_W
= 0x79,
17841 NM_MULEU_S_PH_QBL
= 0x12,
17842 NM_MULEU_S_PH_QBR
= 0x1a,
17843 NM_MULQ_RS_PH
= 0x22,
17844 NM_MULQ_S_PH
= 0x2a,
17845 NM_MULQ_RS_W
= 0x32,
17846 NM_MULQ_S_W
= 0x3a,
17849 NM_SHRAV_R_W
= 0x5a,
17850 NM_SHRLV_PH
= 0x62,
17851 NM_SHRLV_QB
= 0x6a,
17852 NM_SHLLV_QB
= 0x72,
17853 NM_SHLLV_S_W
= 0x7a,
17857 NM_MULEQ_S_W_PHL
= 0x04,
17858 NM_MULEQ_S_W_PHR
= 0x0c,
17860 NM_MUL_S_PH
= 0x05,
17861 NM_PRECR_QB_PH
= 0x0d,
17862 NM_PRECRQ_QB_PH
= 0x15,
17863 NM_PRECRQ_PH_W
= 0x1d,
17864 NM_PRECRQ_RS_PH_W
= 0x25,
17865 NM_PRECRQU_S_QB_PH
= 0x2d,
17866 NM_PACKRL_PH
= 0x35,
17870 NM_SHRA_R_W
= 0x5e,
17871 NM_SHRA_R_PH
= 0x66,
17872 NM_SHLL_S_PH
= 0x76,
17873 NM_SHLL_S_W
= 0x7e,
17878 /* POOL32A7 instruction pool */
17883 NM_POOL32AXF
= 0x07,
17886 /* P.SR instruction pool */
17892 /* P.SHIFT instruction pool */
17900 /* P.ROTX instruction pool */
17905 /* P.INS instruction pool */
17910 /* P.EXT instruction pool */
17915 /* POOL32F_0 (fmt) instruction pool */
17920 NM_SELEQZ_S
= 0x07,
17921 NM_SELEQZ_D
= 0x47,
17925 NM_SELNEZ_S
= 0x0f,
17926 NM_SELNEZ_D
= 0x4f,
17941 /* POOL32F_3 instruction pool */
17945 NM_MINA_FMT
= 0x04,
17946 NM_MAXA_FMT
= 0x05,
17947 NM_POOL32FXF
= 0x07,
17950 /* POOL32F_5 instruction pool */
17952 NM_CMP_CONDN_S
= 0x00,
17953 NM_CMP_CONDN_D
= 0x02,
17956 /* P.GP.LH instruction pool */
17962 /* P.GP.SH instruction pool */
17967 /* P.GP.CP1 instruction pool */
17975 /* P.LS.S0 instruction pool */
17992 NM_P_PREFS9
= 0x03,
17998 /* P.LS.S1 instruction pool */
18000 NM_ASET_ACLR
= 0x02,
18008 /* P.LS.E0 instruction pool */
18024 /* P.PREFE instruction pool */
18030 /* P.LLE instruction pool */
18036 /* P.SCE instruction pool */
18042 /* P.LS.WM instruction pool */
18048 /* P.LS.UAWM instruction pool */
18054 /* P.BR3A instruction pool */
18060 NM_BPOSGE32C
= 0x04,
18063 /* P16.RI instruction pool */
18065 NM_P16_SYSCALL
= 0x01,
18070 /* POOL16C_0 instruction pool */
18072 NM_POOL16C_00
= 0x00,
18075 /* P16.JRC instruction pool */
18081 /* P.SYSCALL instruction pool */
18087 /* P.TRAP instruction pool */
18093 /* P.CMOVE instruction pool */
18099 /* POOL32Axf instruction pool */
18101 NM_POOL32AXF_1
= 0x01,
18102 NM_POOL32AXF_2
= 0x02,
18103 NM_POOL32AXF_4
= 0x04,
18104 NM_POOL32AXF_5
= 0x05,
18105 NM_POOL32AXF_7
= 0x07,
18108 /* POOL32Axf_1 instruction pool */
18110 NM_POOL32AXF_1_0
= 0x00,
18111 NM_POOL32AXF_1_1
= 0x01,
18112 NM_POOL32AXF_1_3
= 0x03,
18113 NM_POOL32AXF_1_4
= 0x04,
18114 NM_POOL32AXF_1_5
= 0x05,
18115 NM_POOL32AXF_1_7
= 0x07,
18118 /* POOL32Axf_2 instruction pool */
18120 NM_POOL32AXF_2_0_7
= 0x00,
18121 NM_POOL32AXF_2_8_15
= 0x01,
18122 NM_POOL32AXF_2_16_23
= 0x02,
18123 NM_POOL32AXF_2_24_31
= 0x03,
18126 /* POOL32Axf_7 instruction pool */
18128 NM_SHRA_R_QB
= 0x0,
18133 /* POOL32Axf_1_0 instruction pool */
18141 /* POOL32Axf_1_1 instruction pool */
18147 /* POOL32Axf_1_3 instruction pool */
18155 /* POOL32Axf_1_4 instruction pool */
18161 /* POOL32Axf_1_5 instruction pool */
18163 NM_MAQ_S_W_PHR
= 0x0,
18164 NM_MAQ_S_W_PHL
= 0x1,
18165 NM_MAQ_SA_W_PHR
= 0x2,
18166 NM_MAQ_SA_W_PHL
= 0x3,
18169 /* POOL32Axf_1_7 instruction pool */
18173 NM_EXTR_RS_W
= 0x2,
18177 /* POOL32Axf_2_0_7 instruction pool */
18180 NM_DPAQ_S_W_PH
= 0x1,
18182 NM_DPSQ_S_W_PH
= 0x3,
18189 /* POOL32Axf_2_8_15 instruction pool */
18191 NM_DPAX_W_PH
= 0x0,
18192 NM_DPAQ_SA_L_W
= 0x1,
18193 NM_DPSX_W_PH
= 0x2,
18194 NM_DPSQ_SA_L_W
= 0x3,
18197 NM_EXTRV_R_W
= 0x7,
18200 /* POOL32Axf_2_16_23 instruction pool */
18202 NM_DPAU_H_QBL
= 0x0,
18203 NM_DPAQX_S_W_PH
= 0x1,
18204 NM_DPSU_H_QBL
= 0x2,
18205 NM_DPSQX_S_W_PH
= 0x3,
18208 NM_MULSA_W_PH
= 0x6,
18209 NM_EXTRV_RS_W
= 0x7,
18212 /* POOL32Axf_2_24_31 instruction pool */
18214 NM_DPAU_H_QBR
= 0x0,
18215 NM_DPAQX_SA_W_PH
= 0x1,
18216 NM_DPSU_H_QBR
= 0x2,
18217 NM_DPSQX_SA_W_PH
= 0x3,
18220 NM_MULSAQ_S_W_PH
= 0x6,
18221 NM_EXTRV_S_H
= 0x7,
18224 /* POOL32Axf_{4, 5} instruction pool */
18243 /* nanoMIPS DSP instructions */
18244 NM_ABSQ_S_QB
= 0x00,
18245 NM_ABSQ_S_PH
= 0x08,
18246 NM_ABSQ_S_W
= 0x10,
18247 NM_PRECEQ_W_PHL
= 0x28,
18248 NM_PRECEQ_W_PHR
= 0x30,
18249 NM_PRECEQU_PH_QBL
= 0x38,
18250 NM_PRECEQU_PH_QBR
= 0x48,
18251 NM_PRECEU_PH_QBL
= 0x58,
18252 NM_PRECEU_PH_QBR
= 0x68,
18253 NM_PRECEQU_PH_QBLA
= 0x39,
18254 NM_PRECEQU_PH_QBRA
= 0x49,
18255 NM_PRECEU_PH_QBLA
= 0x59,
18256 NM_PRECEU_PH_QBRA
= 0x69,
18257 NM_REPLV_PH
= 0x01,
18258 NM_REPLV_QB
= 0x09,
18261 NM_RADDU_W_QB
= 0x78,
18267 /* PP.SR instruction pool */
18271 NM_RESTORE_JRC
= 0x03,
18274 /* P.SR.F instruction pool */
18277 NM_RESTOREF
= 0x01,
18280 /* P16.SYSCALL instruction pool */
18282 NM_SYSCALL16
= 0x00,
18283 NM_HYPCALL16
= 0x01,
18286 /* POOL16C_00 instruction pool */
18294 /* PP.LSX and PP.LSXS instruction pool */
18332 /* ERETx instruction pool */
18338 /* POOL32FxF_{0, 1} insturction pool */
18347 NM_CVT_S_PL
= 0x84,
18348 NM_CVT_S_PU
= 0xa4,
18350 NM_CVT_L_S
= 0x004,
18351 NM_CVT_L_D
= 0x104,
18352 NM_CVT_W_S
= 0x024,
18353 NM_CVT_W_D
= 0x124,
18355 NM_RSQRT_S
= 0x008,
18356 NM_RSQRT_D
= 0x108,
18361 NM_RECIP_S
= 0x048,
18362 NM_RECIP_D
= 0x148,
18364 NM_FLOOR_L_S
= 0x00c,
18365 NM_FLOOR_L_D
= 0x10c,
18367 NM_FLOOR_W_S
= 0x02c,
18368 NM_FLOOR_W_D
= 0x12c,
18370 NM_CEIL_L_S
= 0x04c,
18371 NM_CEIL_L_D
= 0x14c,
18372 NM_CEIL_W_S
= 0x06c,
18373 NM_CEIL_W_D
= 0x16c,
18374 NM_TRUNC_L_S
= 0x08c,
18375 NM_TRUNC_L_D
= 0x18c,
18376 NM_TRUNC_W_S
= 0x0ac,
18377 NM_TRUNC_W_D
= 0x1ac,
18378 NM_ROUND_L_S
= 0x0cc,
18379 NM_ROUND_L_D
= 0x1cc,
18380 NM_ROUND_W_S
= 0x0ec,
18381 NM_ROUND_W_D
= 0x1ec,
18389 NM_CVT_D_S
= 0x04d,
18390 NM_CVT_D_W
= 0x0cd,
18391 NM_CVT_D_L
= 0x14d,
18392 NM_CVT_S_D
= 0x06d,
18393 NM_CVT_S_W
= 0x0ed,
18394 NM_CVT_S_L
= 0x16d,
18397 /* P.LL instruction pool */
18403 /* P.SC instruction pool */
18409 /* P.DVP instruction pool */
18418 * nanoMIPS decoding engine
18423 /* extraction utilities */
18425 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18426 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18427 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18428 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18429 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18431 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18432 static inline int decode_gpr_gpr3(int r
)
18434 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18436 return map
[r
& 0x7];
18439 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18440 static inline int decode_gpr_gpr3_src_store(int r
)
18442 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18444 return map
[r
& 0x7];
18447 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18448 static inline int decode_gpr_gpr4(int r
)
18450 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18451 16, 17, 18, 19, 20, 21, 22, 23 };
18453 return map
[r
& 0xf];
18456 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18457 static inline int decode_gpr_gpr4_zero(int r
)
18459 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18460 16, 17, 18, 19, 20, 21, 22, 23 };
18462 return map
[r
& 0xf];
18466 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18468 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18471 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18472 uint8_t gp
, uint16_t u
)
18475 TCGv va
= tcg_temp_new();
18476 TCGv t0
= tcg_temp_new();
18478 while (counter
!= count
) {
18479 bool use_gp
= gp
&& (counter
== count
- 1);
18480 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18481 int this_offset
= -((counter
+ 1) << 2);
18482 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18483 gen_load_gpr(t0
, this_rt
);
18484 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18485 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18489 /* adjust stack pointer */
18490 gen_adjust_sp(ctx
, -u
);
18496 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18497 uint8_t gp
, uint16_t u
)
18500 TCGv va
= tcg_temp_new();
18501 TCGv t0
= tcg_temp_new();
18503 while (counter
!= count
) {
18504 bool use_gp
= gp
&& (counter
== count
- 1);
18505 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18506 int this_offset
= u
- ((counter
+ 1) << 2);
18507 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18508 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18509 ctx
->default_tcg_memop_mask
);
18510 tcg_gen_ext32s_tl(t0
, t0
);
18511 gen_store_gpr(t0
, this_rt
);
18515 /* adjust stack pointer */
18516 gen_adjust_sp(ctx
, u
);
18522 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18524 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18525 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18527 switch (extract32(ctx
->opcode
, 2, 2)) {
18529 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18532 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18535 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18538 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18543 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18545 int rt
= extract32(ctx
->opcode
, 21, 5);
18546 int rs
= extract32(ctx
->opcode
, 16, 5);
18547 int rd
= extract32(ctx
->opcode
, 11, 5);
18549 switch (extract32(ctx
->opcode
, 3, 7)) {
18551 switch (extract32(ctx
->opcode
, 10, 1)) {
18554 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18558 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18564 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18568 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18571 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18574 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18577 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18580 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18583 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18586 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18589 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18593 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18596 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18599 switch (extract32(ctx
->opcode
, 10, 1)) {
18601 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18604 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18609 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18612 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18615 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18618 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18621 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18626 #ifndef CONFIG_USER_ONLY
18627 TCGv t0
= tcg_temp_new();
18628 switch (extract32(ctx
->opcode
, 10, 1)) {
18631 check_cp0_enabled(ctx
);
18632 gen_helper_dvp(t0
, cpu_env
);
18633 gen_store_gpr(t0
, rt
);
18638 check_cp0_enabled(ctx
);
18639 gen_helper_evp(t0
, cpu_env
);
18640 gen_store_gpr(t0
, rt
);
18647 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18652 TCGv t0
= tcg_temp_new();
18653 TCGv t1
= tcg_temp_new();
18654 TCGv t2
= tcg_temp_new();
18656 gen_load_gpr(t1
, rs
);
18657 gen_load_gpr(t2
, rt
);
18658 tcg_gen_add_tl(t0
, t1
, t2
);
18659 tcg_gen_ext32s_tl(t0
, t0
);
18660 tcg_gen_xor_tl(t1
, t1
, t2
);
18661 tcg_gen_xor_tl(t2
, t0
, t2
);
18662 tcg_gen_andc_tl(t1
, t2
, t1
);
18664 /* operands of same sign, result different sign */
18665 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18666 gen_store_gpr(t0
, rd
);
18674 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18677 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18680 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18683 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18686 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18689 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18692 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18695 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18697 #ifndef CONFIG_USER_ONLY
18699 check_cp0_enabled(ctx
);
18701 /* Treat as NOP. */
18704 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18707 check_cp0_enabled(ctx
);
18709 TCGv t0
= tcg_temp_new();
18711 gen_load_gpr(t0
, rt
);
18712 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18716 case NM_D_E_MT_VPE
:
18718 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18719 TCGv t0
= tcg_temp_new();
18726 gen_helper_dmt(t0
);
18727 gen_store_gpr(t0
, rt
);
18728 } else if (rs
== 0) {
18731 gen_helper_dvpe(t0
, cpu_env
);
18732 gen_store_gpr(t0
, rt
);
18734 generate_exception_end(ctx
, EXCP_RI
);
18741 gen_helper_emt(t0
);
18742 gen_store_gpr(t0
, rt
);
18743 } else if (rs
== 0) {
18746 gen_helper_evpe(t0
, cpu_env
);
18747 gen_store_gpr(t0
, rt
);
18749 generate_exception_end(ctx
, EXCP_RI
);
18760 TCGv t0
= tcg_temp_new();
18761 TCGv t1
= tcg_temp_new();
18763 gen_load_gpr(t0
, rt
);
18764 gen_load_gpr(t1
, rs
);
18765 gen_helper_fork(t0
, t1
);
18772 check_cp0_enabled(ctx
);
18774 /* Treat as NOP. */
18777 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18778 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18782 check_cp0_enabled(ctx
);
18783 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18784 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18789 TCGv t0
= tcg_temp_new();
18791 gen_load_gpr(t0
, rs
);
18792 gen_helper_yield(t0
, cpu_env
, t0
);
18793 gen_store_gpr(t0
, rt
);
18799 generate_exception_end(ctx
, EXCP_RI
);
18805 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18806 int ret
, int v1
, int v2
)
18812 t0
= tcg_temp_new_i32();
18814 v0_t
= tcg_temp_new();
18815 v1_t
= tcg_temp_new();
18817 tcg_gen_movi_i32(t0
, v2
>> 3);
18819 gen_load_gpr(v0_t
, ret
);
18820 gen_load_gpr(v1_t
, v1
);
18823 case NM_MAQ_S_W_PHR
:
18825 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18827 case NM_MAQ_S_W_PHL
:
18829 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18831 case NM_MAQ_SA_W_PHR
:
18833 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18835 case NM_MAQ_SA_W_PHL
:
18837 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18840 generate_exception_end(ctx
, EXCP_RI
);
18844 tcg_temp_free_i32(t0
);
18846 tcg_temp_free(v0_t
);
18847 tcg_temp_free(v1_t
);
18851 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18852 int ret
, int v1
, int v2
)
18855 TCGv t0
= tcg_temp_new();
18856 TCGv t1
= tcg_temp_new();
18857 TCGv v0_t
= tcg_temp_new();
18859 gen_load_gpr(v0_t
, v1
);
18862 case NM_POOL32AXF_1_0
:
18864 switch (extract32(ctx
->opcode
, 12, 2)) {
18866 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18869 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18872 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18875 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18879 case NM_POOL32AXF_1_1
:
18881 switch (extract32(ctx
->opcode
, 12, 2)) {
18883 tcg_gen_movi_tl(t0
, v2
);
18884 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18887 tcg_gen_movi_tl(t0
, v2
>> 3);
18888 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18891 generate_exception_end(ctx
, EXCP_RI
);
18895 case NM_POOL32AXF_1_3
:
18897 imm
= extract32(ctx
->opcode
, 14, 7);
18898 switch (extract32(ctx
->opcode
, 12, 2)) {
18900 tcg_gen_movi_tl(t0
, imm
);
18901 gen_helper_rddsp(t0
, t0
, cpu_env
);
18902 gen_store_gpr(t0
, ret
);
18905 gen_load_gpr(t0
, ret
);
18906 tcg_gen_movi_tl(t1
, imm
);
18907 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18910 tcg_gen_movi_tl(t0
, v2
>> 3);
18911 tcg_gen_movi_tl(t1
, v1
);
18912 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18913 gen_store_gpr(t0
, ret
);
18916 tcg_gen_movi_tl(t0
, v2
>> 3);
18917 tcg_gen_movi_tl(t1
, v1
);
18918 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18919 gen_store_gpr(t0
, ret
);
18923 case NM_POOL32AXF_1_4
:
18925 tcg_gen_movi_tl(t0
, v2
>> 2);
18926 switch (extract32(ctx
->opcode
, 12, 1)) {
18928 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18929 gen_store_gpr(t0
, ret
);
18932 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18933 gen_store_gpr(t0
, ret
);
18937 case NM_POOL32AXF_1_5
:
18938 opc
= extract32(ctx
->opcode
, 12, 2);
18939 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18941 case NM_POOL32AXF_1_7
:
18943 tcg_gen_movi_tl(t0
, v2
>> 3);
18944 tcg_gen_movi_tl(t1
, v1
);
18945 switch (extract32(ctx
->opcode
, 12, 2)) {
18947 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18948 gen_store_gpr(t0
, ret
);
18951 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18952 gen_store_gpr(t0
, ret
);
18955 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18956 gen_store_gpr(t0
, ret
);
18959 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18960 gen_store_gpr(t0
, ret
);
18965 generate_exception_end(ctx
, EXCP_RI
);
18971 tcg_temp_free(v0_t
);
18974 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18975 TCGv v0
, TCGv v1
, int rd
)
18979 t0
= tcg_temp_new_i32();
18981 tcg_gen_movi_i32(t0
, rd
>> 3);
18984 case NM_POOL32AXF_2_0_7
:
18985 switch (extract32(ctx
->opcode
, 9, 3)) {
18988 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18990 case NM_DPAQ_S_W_PH
:
18992 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18996 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18998 case NM_DPSQ_S_W_PH
:
19000 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19003 generate_exception_end(ctx
, EXCP_RI
);
19007 case NM_POOL32AXF_2_8_15
:
19008 switch (extract32(ctx
->opcode
, 9, 3)) {
19011 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19013 case NM_DPAQ_SA_L_W
:
19015 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19019 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19021 case NM_DPSQ_SA_L_W
:
19023 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19026 generate_exception_end(ctx
, EXCP_RI
);
19030 case NM_POOL32AXF_2_16_23
:
19031 switch (extract32(ctx
->opcode
, 9, 3)) {
19032 case NM_DPAU_H_QBL
:
19034 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19036 case NM_DPAQX_S_W_PH
:
19038 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19040 case NM_DPSU_H_QBL
:
19042 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19044 case NM_DPSQX_S_W_PH
:
19046 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19048 case NM_MULSA_W_PH
:
19050 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19053 generate_exception_end(ctx
, EXCP_RI
);
19057 case NM_POOL32AXF_2_24_31
:
19058 switch (extract32(ctx
->opcode
, 9, 3)) {
19059 case NM_DPAU_H_QBR
:
19061 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19063 case NM_DPAQX_SA_W_PH
:
19065 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19067 case NM_DPSU_H_QBR
:
19069 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19071 case NM_DPSQX_SA_W_PH
:
19073 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19075 case NM_MULSAQ_S_W_PH
:
19077 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19080 generate_exception_end(ctx
, EXCP_RI
);
19085 generate_exception_end(ctx
, EXCP_RI
);
19089 tcg_temp_free_i32(t0
);
19092 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19093 int rt
, int rs
, int rd
)
19096 TCGv t0
= tcg_temp_new();
19097 TCGv t1
= tcg_temp_new();
19098 TCGv v0_t
= tcg_temp_new();
19099 TCGv v1_t
= tcg_temp_new();
19101 gen_load_gpr(v0_t
, rt
);
19102 gen_load_gpr(v1_t
, rs
);
19105 case NM_POOL32AXF_2_0_7
:
19106 switch (extract32(ctx
->opcode
, 9, 3)) {
19108 case NM_DPAQ_S_W_PH
:
19110 case NM_DPSQ_S_W_PH
:
19111 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19116 gen_load_gpr(t0
, rs
);
19118 if (rd
!= 0 && rd
!= 2) {
19119 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19120 tcg_gen_ext32u_tl(t0
, t0
);
19121 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19122 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19124 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19130 int acc
= extract32(ctx
->opcode
, 14, 2);
19131 TCGv_i64 t2
= tcg_temp_new_i64();
19132 TCGv_i64 t3
= tcg_temp_new_i64();
19134 gen_load_gpr(t0
, rt
);
19135 gen_load_gpr(t1
, rs
);
19136 tcg_gen_ext_tl_i64(t2
, t0
);
19137 tcg_gen_ext_tl_i64(t3
, t1
);
19138 tcg_gen_mul_i64(t2
, t2
, t3
);
19139 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19140 tcg_gen_add_i64(t2
, t2
, t3
);
19141 tcg_temp_free_i64(t3
);
19142 gen_move_low32(cpu_LO
[acc
], t2
);
19143 gen_move_high32(cpu_HI
[acc
], t2
);
19144 tcg_temp_free_i64(t2
);
19150 int acc
= extract32(ctx
->opcode
, 14, 2);
19151 TCGv_i32 t2
= tcg_temp_new_i32();
19152 TCGv_i32 t3
= tcg_temp_new_i32();
19154 gen_load_gpr(t0
, rs
);
19155 gen_load_gpr(t1
, rt
);
19156 tcg_gen_trunc_tl_i32(t2
, t0
);
19157 tcg_gen_trunc_tl_i32(t3
, t1
);
19158 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19159 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19160 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19161 tcg_temp_free_i32(t2
);
19162 tcg_temp_free_i32(t3
);
19167 gen_load_gpr(v1_t
, rs
);
19168 tcg_gen_movi_tl(t0
, rd
>> 3);
19169 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19170 gen_store_gpr(t0
, ret
);
19174 case NM_POOL32AXF_2_8_15
:
19175 switch (extract32(ctx
->opcode
, 9, 3)) {
19177 case NM_DPAQ_SA_L_W
:
19179 case NM_DPSQ_SA_L_W
:
19180 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19185 int acc
= extract32(ctx
->opcode
, 14, 2);
19186 TCGv_i64 t2
= tcg_temp_new_i64();
19187 TCGv_i64 t3
= tcg_temp_new_i64();
19189 gen_load_gpr(t0
, rs
);
19190 gen_load_gpr(t1
, rt
);
19191 tcg_gen_ext32u_tl(t0
, t0
);
19192 tcg_gen_ext32u_tl(t1
, t1
);
19193 tcg_gen_extu_tl_i64(t2
, t0
);
19194 tcg_gen_extu_tl_i64(t3
, t1
);
19195 tcg_gen_mul_i64(t2
, t2
, t3
);
19196 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19197 tcg_gen_add_i64(t2
, t2
, t3
);
19198 tcg_temp_free_i64(t3
);
19199 gen_move_low32(cpu_LO
[acc
], t2
);
19200 gen_move_high32(cpu_HI
[acc
], t2
);
19201 tcg_temp_free_i64(t2
);
19207 int acc
= extract32(ctx
->opcode
, 14, 2);
19208 TCGv_i32 t2
= tcg_temp_new_i32();
19209 TCGv_i32 t3
= tcg_temp_new_i32();
19211 gen_load_gpr(t0
, rs
);
19212 gen_load_gpr(t1
, rt
);
19213 tcg_gen_trunc_tl_i32(t2
, t0
);
19214 tcg_gen_trunc_tl_i32(t3
, t1
);
19215 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19216 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19217 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19218 tcg_temp_free_i32(t2
);
19219 tcg_temp_free_i32(t3
);
19224 tcg_gen_movi_tl(t0
, rd
>> 3);
19225 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19226 gen_store_gpr(t0
, ret
);
19229 generate_exception_end(ctx
, EXCP_RI
);
19233 case NM_POOL32AXF_2_16_23
:
19234 switch (extract32(ctx
->opcode
, 9, 3)) {
19235 case NM_DPAU_H_QBL
:
19236 case NM_DPAQX_S_W_PH
:
19237 case NM_DPSU_H_QBL
:
19238 case NM_DPSQX_S_W_PH
:
19239 case NM_MULSA_W_PH
:
19240 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19244 tcg_gen_movi_tl(t0
, rd
>> 3);
19245 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19246 gen_store_gpr(t0
, ret
);
19251 int acc
= extract32(ctx
->opcode
, 14, 2);
19252 TCGv_i64 t2
= tcg_temp_new_i64();
19253 TCGv_i64 t3
= tcg_temp_new_i64();
19255 gen_load_gpr(t0
, rs
);
19256 gen_load_gpr(t1
, rt
);
19257 tcg_gen_ext_tl_i64(t2
, t0
);
19258 tcg_gen_ext_tl_i64(t3
, t1
);
19259 tcg_gen_mul_i64(t2
, t2
, t3
);
19260 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19261 tcg_gen_sub_i64(t2
, t3
, t2
);
19262 tcg_temp_free_i64(t3
);
19263 gen_move_low32(cpu_LO
[acc
], t2
);
19264 gen_move_high32(cpu_HI
[acc
], t2
);
19265 tcg_temp_free_i64(t2
);
19268 case NM_EXTRV_RS_W
:
19270 tcg_gen_movi_tl(t0
, rd
>> 3);
19271 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19272 gen_store_gpr(t0
, ret
);
19276 case NM_POOL32AXF_2_24_31
:
19277 switch (extract32(ctx
->opcode
, 9, 3)) {
19278 case NM_DPAU_H_QBR
:
19279 case NM_DPAQX_SA_W_PH
:
19280 case NM_DPSU_H_QBR
:
19281 case NM_DPSQX_SA_W_PH
:
19282 case NM_MULSAQ_S_W_PH
:
19283 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19287 tcg_gen_movi_tl(t0
, rd
>> 3);
19288 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19289 gen_store_gpr(t0
, ret
);
19294 int acc
= extract32(ctx
->opcode
, 14, 2);
19295 TCGv_i64 t2
= tcg_temp_new_i64();
19296 TCGv_i64 t3
= tcg_temp_new_i64();
19298 gen_load_gpr(t0
, rs
);
19299 gen_load_gpr(t1
, rt
);
19300 tcg_gen_ext32u_tl(t0
, t0
);
19301 tcg_gen_ext32u_tl(t1
, t1
);
19302 tcg_gen_extu_tl_i64(t2
, t0
);
19303 tcg_gen_extu_tl_i64(t3
, t1
);
19304 tcg_gen_mul_i64(t2
, t2
, t3
);
19305 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19306 tcg_gen_sub_i64(t2
, t3
, t2
);
19307 tcg_temp_free_i64(t3
);
19308 gen_move_low32(cpu_LO
[acc
], t2
);
19309 gen_move_high32(cpu_HI
[acc
], t2
);
19310 tcg_temp_free_i64(t2
);
19315 tcg_gen_movi_tl(t0
, rd
>> 3);
19316 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19317 gen_store_gpr(t0
, ret
);
19322 generate_exception_end(ctx
, EXCP_RI
);
19329 tcg_temp_free(v0_t
);
19330 tcg_temp_free(v1_t
);
19333 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19337 TCGv t0
= tcg_temp_new();
19338 TCGv v0_t
= tcg_temp_new();
19340 gen_load_gpr(v0_t
, rs
);
19345 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19346 gen_store_gpr(v0_t
, ret
);
19350 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19351 gen_store_gpr(v0_t
, ret
);
19355 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19356 gen_store_gpr(v0_t
, ret
);
19358 case NM_PRECEQ_W_PHL
:
19360 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19361 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19362 gen_store_gpr(v0_t
, ret
);
19364 case NM_PRECEQ_W_PHR
:
19366 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19367 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19368 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19369 gen_store_gpr(v0_t
, ret
);
19371 case NM_PRECEQU_PH_QBL
:
19373 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19374 gen_store_gpr(v0_t
, ret
);
19376 case NM_PRECEQU_PH_QBR
:
19378 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19379 gen_store_gpr(v0_t
, ret
);
19381 case NM_PRECEQU_PH_QBLA
:
19383 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19384 gen_store_gpr(v0_t
, ret
);
19386 case NM_PRECEQU_PH_QBRA
:
19388 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19389 gen_store_gpr(v0_t
, ret
);
19391 case NM_PRECEU_PH_QBL
:
19393 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19394 gen_store_gpr(v0_t
, ret
);
19396 case NM_PRECEU_PH_QBR
:
19398 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19399 gen_store_gpr(v0_t
, ret
);
19401 case NM_PRECEU_PH_QBLA
:
19403 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19404 gen_store_gpr(v0_t
, ret
);
19406 case NM_PRECEU_PH_QBRA
:
19408 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19409 gen_store_gpr(v0_t
, ret
);
19413 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19414 tcg_gen_shli_tl(t0
, v0_t
, 16);
19415 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19416 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19417 gen_store_gpr(v0_t
, ret
);
19421 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19422 tcg_gen_shli_tl(t0
, v0_t
, 8);
19423 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19424 tcg_gen_shli_tl(t0
, v0_t
, 16);
19425 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19426 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19427 gen_store_gpr(v0_t
, ret
);
19431 gen_helper_bitrev(v0_t
, v0_t
);
19432 gen_store_gpr(v0_t
, ret
);
19437 TCGv tv0
= tcg_temp_new();
19439 gen_load_gpr(tv0
, rt
);
19440 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19441 gen_store_gpr(v0_t
, ret
);
19442 tcg_temp_free(tv0
);
19445 case NM_RADDU_W_QB
:
19447 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19448 gen_store_gpr(v0_t
, ret
);
19451 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19455 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19459 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19462 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19465 generate_exception_end(ctx
, EXCP_RI
);
19469 tcg_temp_free(v0_t
);
19473 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19474 int rt
, int rs
, int rd
)
19476 TCGv t0
= tcg_temp_new();
19477 TCGv rs_t
= tcg_temp_new();
19479 gen_load_gpr(rs_t
, rs
);
19484 tcg_gen_movi_tl(t0
, rd
>> 2);
19485 switch (extract32(ctx
->opcode
, 12, 1)) {
19488 gen_helper_shra_qb(t0
, t0
, rs_t
);
19489 gen_store_gpr(t0
, rt
);
19493 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19494 gen_store_gpr(t0
, rt
);
19500 tcg_gen_movi_tl(t0
, rd
>> 1);
19501 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19502 gen_store_gpr(t0
, rt
);
19508 target_long result
;
19509 imm
= extract32(ctx
->opcode
, 13, 8);
19510 result
= (uint32_t)imm
<< 24 |
19511 (uint32_t)imm
<< 16 |
19512 (uint32_t)imm
<< 8 |
19514 result
= (int32_t)result
;
19515 tcg_gen_movi_tl(t0
, result
);
19516 gen_store_gpr(t0
, rt
);
19520 generate_exception_end(ctx
, EXCP_RI
);
19524 tcg_temp_free(rs_t
);
19528 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19530 int rt
= extract32(ctx
->opcode
, 21, 5);
19531 int rs
= extract32(ctx
->opcode
, 16, 5);
19532 int rd
= extract32(ctx
->opcode
, 11, 5);
19534 switch (extract32(ctx
->opcode
, 6, 3)) {
19535 case NM_POOL32AXF_1
:
19537 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19538 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19541 case NM_POOL32AXF_2
:
19543 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19544 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19547 case NM_POOL32AXF_4
:
19549 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19550 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19553 case NM_POOL32AXF_5
:
19554 switch (extract32(ctx
->opcode
, 9, 7)) {
19555 #ifndef CONFIG_USER_ONLY
19557 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19560 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19563 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19566 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19569 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19572 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19575 check_cp0_enabled(ctx
);
19577 TCGv t0
= tcg_temp_new();
19579 save_cpu_state(ctx
, 1);
19580 gen_helper_di(t0
, cpu_env
);
19581 gen_store_gpr(t0
, rt
);
19582 /* Stop translation as we may have switched the execution mode */
19583 ctx
->base
.is_jmp
= DISAS_STOP
;
19588 check_cp0_enabled(ctx
);
19590 TCGv t0
= tcg_temp_new();
19592 save_cpu_state(ctx
, 1);
19593 gen_helper_ei(t0
, cpu_env
);
19594 gen_store_gpr(t0
, rt
);
19595 /* Stop translation as we may have switched the execution mode */
19596 ctx
->base
.is_jmp
= DISAS_STOP
;
19601 gen_load_srsgpr(rs
, rt
);
19604 gen_store_srsgpr(rs
, rt
);
19607 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19610 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19613 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19617 generate_exception_end(ctx
, EXCP_RI
);
19621 case NM_POOL32AXF_7
:
19623 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19624 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19628 generate_exception_end(ctx
, EXCP_RI
);
19633 /* Immediate Value Compact Branches */
19634 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19635 int rt
, int32_t imm
, int32_t offset
)
19638 int bcond_compute
= 0;
19639 TCGv t0
= tcg_temp_new();
19640 TCGv t1
= tcg_temp_new();
19642 gen_load_gpr(t0
, rt
);
19643 tcg_gen_movi_tl(t1
, imm
);
19644 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19646 /* Load needed operands and calculate btarget */
19649 if (rt
== 0 && imm
== 0) {
19650 /* Unconditional branch */
19651 } else if (rt
== 0 && imm
!= 0) {
19656 cond
= TCG_COND_EQ
;
19662 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19663 generate_exception_end(ctx
, EXCP_RI
);
19665 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19666 /* Unconditional branch */
19667 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19671 tcg_gen_shri_tl(t0
, t0
, imm
);
19672 tcg_gen_andi_tl(t0
, t0
, 1);
19673 tcg_gen_movi_tl(t1
, 0);
19675 if (opc
== NM_BBEQZC
) {
19676 cond
= TCG_COND_EQ
;
19678 cond
= TCG_COND_NE
;
19683 if (rt
== 0 && imm
== 0) {
19686 } else if (rt
== 0 && imm
!= 0) {
19687 /* Unconditional branch */
19690 cond
= TCG_COND_NE
;
19694 if (rt
== 0 && imm
== 0) {
19695 /* Unconditional branch */
19698 cond
= TCG_COND_GE
;
19703 cond
= TCG_COND_LT
;
19706 if (rt
== 0 && imm
== 0) {
19707 /* Unconditional branch */
19710 cond
= TCG_COND_GEU
;
19715 cond
= TCG_COND_LTU
;
19718 MIPS_INVAL("Immediate Value Compact branch");
19719 generate_exception_end(ctx
, EXCP_RI
);
19723 /* branch completion */
19724 clear_branch_hflags(ctx
);
19725 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19727 if (bcond_compute
== 0) {
19728 /* Uncoditional compact branch */
19729 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19731 /* Conditional compact branch */
19732 TCGLabel
*fs
= gen_new_label();
19734 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19736 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19739 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19747 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19748 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19751 TCGv t0
= tcg_temp_new();
19752 TCGv t1
= tcg_temp_new();
19755 gen_load_gpr(t0
, rs
);
19759 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19762 /* calculate btarget */
19763 tcg_gen_shli_tl(t0
, t0
, 1);
19764 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19765 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19767 /* branch completion */
19768 clear_branch_hflags(ctx
);
19769 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19771 /* unconditional branch to register */
19772 tcg_gen_mov_tl(cpu_PC
, btarget
);
19773 tcg_gen_lookup_and_goto_ptr();
19779 /* nanoMIPS Branches */
19780 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19781 int rs
, int rt
, int32_t offset
)
19783 int bcond_compute
= 0;
19784 TCGv t0
= tcg_temp_new();
19785 TCGv t1
= tcg_temp_new();
19787 /* Load needed operands and calculate btarget */
19789 /* compact branch */
19792 gen_load_gpr(t0
, rs
);
19793 gen_load_gpr(t1
, rt
);
19795 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19799 if (rs
== 0 || rs
== rt
) {
19800 /* OPC_BLEZALC, OPC_BGEZALC */
19801 /* OPC_BGTZALC, OPC_BLTZALC */
19802 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19804 gen_load_gpr(t0
, rs
);
19805 gen_load_gpr(t1
, rt
);
19807 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19810 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19814 /* OPC_BEQZC, OPC_BNEZC */
19815 gen_load_gpr(t0
, rs
);
19817 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19819 /* OPC_JIC, OPC_JIALC */
19820 TCGv tbase
= tcg_temp_new();
19821 TCGv toffset
= tcg_temp_new();
19823 gen_load_gpr(tbase
, rt
);
19824 tcg_gen_movi_tl(toffset
, offset
);
19825 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19826 tcg_temp_free(tbase
);
19827 tcg_temp_free(toffset
);
19831 MIPS_INVAL("Compact branch/jump");
19832 generate_exception_end(ctx
, EXCP_RI
);
19836 if (bcond_compute
== 0) {
19837 /* Uncoditional compact branch */
19840 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19843 MIPS_INVAL("Compact branch/jump");
19844 generate_exception_end(ctx
, EXCP_RI
);
19848 /* Conditional compact branch */
19849 TCGLabel
*fs
= gen_new_label();
19853 if (rs
== 0 && rt
!= 0) {
19855 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19856 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19858 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19861 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19865 if (rs
== 0 && rt
!= 0) {
19867 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19868 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19870 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19873 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19877 if (rs
== 0 && rt
!= 0) {
19879 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19880 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19882 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19885 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19889 if (rs
== 0 && rt
!= 0) {
19891 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19892 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19894 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19897 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19901 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19904 MIPS_INVAL("Compact conditional branch/jump");
19905 generate_exception_end(ctx
, EXCP_RI
);
19909 /* branch completion */
19910 clear_branch_hflags(ctx
);
19911 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19913 /* Generating branch here as compact branches don't have delay slot */
19914 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19917 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19926 /* nanoMIPS CP1 Branches */
19927 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19928 int32_t ft
, int32_t offset
)
19930 target_ulong btarget
;
19931 TCGv_i64 t0
= tcg_temp_new_i64();
19933 gen_load_fpr64(ctx
, t0
, ft
);
19934 tcg_gen_andi_i64(t0
, t0
, 1);
19936 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19940 tcg_gen_xori_i64(t0
, t0
, 1);
19941 ctx
->hflags
|= MIPS_HFLAG_BC
;
19944 /* t0 already set */
19945 ctx
->hflags
|= MIPS_HFLAG_BC
;
19948 MIPS_INVAL("cp1 cond branch");
19949 generate_exception_end(ctx
, EXCP_RI
);
19953 tcg_gen_trunc_i64_tl(bcond
, t0
);
19955 ctx
->btarget
= btarget
;
19958 tcg_temp_free_i64(t0
);
19962 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19965 t0
= tcg_temp_new();
19966 t1
= tcg_temp_new();
19968 gen_load_gpr(t0
, rs
);
19969 gen_load_gpr(t1
, rt
);
19971 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19972 /* PP.LSXS instructions require shifting */
19973 switch (extract32(ctx
->opcode
, 7, 4)) {
19978 tcg_gen_shli_tl(t0
, t0
, 1);
19985 tcg_gen_shli_tl(t0
, t0
, 2);
19989 tcg_gen_shli_tl(t0
, t0
, 3);
19993 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19995 switch (extract32(ctx
->opcode
, 7, 4)) {
19997 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19999 gen_store_gpr(t0
, rd
);
20003 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20005 gen_store_gpr(t0
, rd
);
20009 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20011 gen_store_gpr(t0
, rd
);
20014 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20016 gen_store_gpr(t0
, rd
);
20020 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20022 gen_store_gpr(t0
, rd
);
20026 gen_load_gpr(t1
, rd
);
20027 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20033 gen_load_gpr(t1
, rd
);
20034 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20040 gen_load_gpr(t1
, rd
);
20041 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20045 /*case NM_LWC1XS:*/
20047 /*case NM_LDC1XS:*/
20049 /*case NM_SWC1XS:*/
20051 /*case NM_SDC1XS:*/
20052 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20053 check_cp1_enabled(ctx
);
20054 switch (extract32(ctx
->opcode
, 7, 4)) {
20056 /*case NM_LWC1XS:*/
20057 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20060 /*case NM_LDC1XS:*/
20061 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20064 /*case NM_SWC1XS:*/
20065 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20068 /*case NM_SDC1XS:*/
20069 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20073 generate_exception_err(ctx
, EXCP_CpU
, 1);
20077 generate_exception_end(ctx
, EXCP_RI
);
20085 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20089 rt
= extract32(ctx
->opcode
, 21, 5);
20090 rs
= extract32(ctx
->opcode
, 16, 5);
20091 rd
= extract32(ctx
->opcode
, 11, 5);
20093 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20094 generate_exception_end(ctx
, EXCP_RI
);
20097 check_cp1_enabled(ctx
);
20098 switch (extract32(ctx
->opcode
, 0, 3)) {
20100 switch (extract32(ctx
->opcode
, 3, 7)) {
20102 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20105 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20108 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20111 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20114 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20117 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20120 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20123 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20126 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20129 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20132 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20135 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20138 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20141 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20144 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20147 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20150 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20153 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20156 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20159 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20162 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20165 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20168 generate_exception_end(ctx
, EXCP_RI
);
20173 switch (extract32(ctx
->opcode
, 3, 3)) {
20175 switch (extract32(ctx
->opcode
, 9, 1)) {
20177 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20180 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20185 switch (extract32(ctx
->opcode
, 9, 1)) {
20187 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20190 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20195 switch (extract32(ctx
->opcode
, 9, 1)) {
20197 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20200 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20205 switch (extract32(ctx
->opcode
, 9, 1)) {
20207 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20210 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20215 switch (extract32(ctx
->opcode
, 6, 8)) {
20217 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20220 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20223 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20226 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20229 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20232 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20235 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20238 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20241 switch (extract32(ctx
->opcode
, 6, 9)) {
20243 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20246 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20249 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20252 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20255 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20258 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20261 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20264 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20267 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20270 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20273 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20276 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20279 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20282 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20285 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20288 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20291 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20294 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20297 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20300 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20303 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20306 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20309 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20312 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20315 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20318 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20321 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20324 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20327 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20330 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20333 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20336 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20339 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20342 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20345 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20348 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20351 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20354 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20357 generate_exception_end(ctx
, EXCP_RI
);
20366 switch (extract32(ctx
->opcode
, 3, 3)) {
20367 case NM_CMP_CONDN_S
:
20368 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20370 case NM_CMP_CONDN_D
:
20371 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20374 generate_exception_end(ctx
, EXCP_RI
);
20379 generate_exception_end(ctx
, EXCP_RI
);
20384 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20385 int rd
, int rs
, int rt
)
20388 TCGv t0
= tcg_temp_new();
20389 TCGv v1_t
= tcg_temp_new();
20390 TCGv v2_t
= tcg_temp_new();
20392 gen_load_gpr(v1_t
, rs
);
20393 gen_load_gpr(v2_t
, rt
);
20398 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20402 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20406 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20408 case NM_CMPU_EQ_QB
:
20410 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20412 case NM_CMPU_LT_QB
:
20414 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20416 case NM_CMPU_LE_QB
:
20418 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20420 case NM_CMPGU_EQ_QB
:
20422 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20423 gen_store_gpr(v1_t
, ret
);
20425 case NM_CMPGU_LT_QB
:
20427 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20428 gen_store_gpr(v1_t
, ret
);
20430 case NM_CMPGU_LE_QB
:
20432 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20433 gen_store_gpr(v1_t
, ret
);
20435 case NM_CMPGDU_EQ_QB
:
20437 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20438 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20439 gen_store_gpr(v1_t
, ret
);
20441 case NM_CMPGDU_LT_QB
:
20443 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20444 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20445 gen_store_gpr(v1_t
, ret
);
20447 case NM_CMPGDU_LE_QB
:
20449 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20450 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20451 gen_store_gpr(v1_t
, ret
);
20455 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20456 gen_store_gpr(v1_t
, ret
);
20460 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20461 gen_store_gpr(v1_t
, ret
);
20465 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20466 gen_store_gpr(v1_t
, ret
);
20470 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20471 gen_store_gpr(v1_t
, ret
);
20475 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20476 gen_store_gpr(v1_t
, ret
);
20480 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20481 gen_store_gpr(v1_t
, ret
);
20485 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20486 gen_store_gpr(v1_t
, ret
);
20490 switch (extract32(ctx
->opcode
, 10, 1)) {
20493 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20494 gen_store_gpr(v1_t
, ret
);
20498 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20499 gen_store_gpr(v1_t
, ret
);
20503 case NM_ADDQH_R_PH
:
20505 switch (extract32(ctx
->opcode
, 10, 1)) {
20508 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20509 gen_store_gpr(v1_t
, ret
);
20513 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20514 gen_store_gpr(v1_t
, ret
);
20520 switch (extract32(ctx
->opcode
, 10, 1)) {
20523 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20524 gen_store_gpr(v1_t
, ret
);
20528 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20529 gen_store_gpr(v1_t
, ret
);
20535 switch (extract32(ctx
->opcode
, 10, 1)) {
20538 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20539 gen_store_gpr(v1_t
, ret
);
20543 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20544 gen_store_gpr(v1_t
, ret
);
20550 switch (extract32(ctx
->opcode
, 10, 1)) {
20553 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20554 gen_store_gpr(v1_t
, ret
);
20558 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20559 gen_store_gpr(v1_t
, ret
);
20563 case NM_ADDUH_R_QB
:
20565 switch (extract32(ctx
->opcode
, 10, 1)) {
20568 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20569 gen_store_gpr(v1_t
, ret
);
20573 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20574 gen_store_gpr(v1_t
, ret
);
20578 case NM_SHRAV_R_PH
:
20580 switch (extract32(ctx
->opcode
, 10, 1)) {
20583 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20584 gen_store_gpr(v1_t
, ret
);
20588 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20589 gen_store_gpr(v1_t
, ret
);
20593 case NM_SHRAV_R_QB
:
20595 switch (extract32(ctx
->opcode
, 10, 1)) {
20598 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20599 gen_store_gpr(v1_t
, ret
);
20603 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20604 gen_store_gpr(v1_t
, ret
);
20610 switch (extract32(ctx
->opcode
, 10, 1)) {
20613 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20614 gen_store_gpr(v1_t
, ret
);
20618 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20619 gen_store_gpr(v1_t
, ret
);
20623 case NM_SUBQH_R_PH
:
20625 switch (extract32(ctx
->opcode
, 10, 1)) {
20628 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20629 gen_store_gpr(v1_t
, ret
);
20633 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20634 gen_store_gpr(v1_t
, ret
);
20640 switch (extract32(ctx
->opcode
, 10, 1)) {
20643 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20644 gen_store_gpr(v1_t
, ret
);
20648 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20649 gen_store_gpr(v1_t
, ret
);
20655 switch (extract32(ctx
->opcode
, 10, 1)) {
20658 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20659 gen_store_gpr(v1_t
, ret
);
20663 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20664 gen_store_gpr(v1_t
, ret
);
20670 switch (extract32(ctx
->opcode
, 10, 1)) {
20673 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20674 gen_store_gpr(v1_t
, ret
);
20678 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20679 gen_store_gpr(v1_t
, ret
);
20683 case NM_SUBUH_R_QB
:
20685 switch (extract32(ctx
->opcode
, 10, 1)) {
20688 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20689 gen_store_gpr(v1_t
, ret
);
20693 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20694 gen_store_gpr(v1_t
, ret
);
20698 case NM_SHLLV_S_PH
:
20700 switch (extract32(ctx
->opcode
, 10, 1)) {
20703 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20704 gen_store_gpr(v1_t
, ret
);
20708 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20709 gen_store_gpr(v1_t
, ret
);
20713 case NM_PRECR_SRA_R_PH_W
:
20715 switch (extract32(ctx
->opcode
, 10, 1)) {
20717 /* PRECR_SRA_PH_W */
20719 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20720 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20722 gen_store_gpr(v1_t
, rt
);
20723 tcg_temp_free_i32(sa_t
);
20727 /* PRECR_SRA_R_PH_W */
20729 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20730 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20732 gen_store_gpr(v1_t
, rt
);
20733 tcg_temp_free_i32(sa_t
);
20738 case NM_MULEU_S_PH_QBL
:
20740 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20741 gen_store_gpr(v1_t
, ret
);
20743 case NM_MULEU_S_PH_QBR
:
20745 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20746 gen_store_gpr(v1_t
, ret
);
20748 case NM_MULQ_RS_PH
:
20750 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20751 gen_store_gpr(v1_t
, ret
);
20755 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20756 gen_store_gpr(v1_t
, ret
);
20760 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20761 gen_store_gpr(v1_t
, ret
);
20765 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20766 gen_store_gpr(v1_t
, ret
);
20770 gen_load_gpr(t0
, rs
);
20772 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20774 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20778 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20779 gen_store_gpr(v1_t
, ret
);
20783 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20784 gen_store_gpr(v1_t
, ret
);
20788 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20789 gen_store_gpr(v1_t
, ret
);
20793 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20794 gen_store_gpr(v1_t
, ret
);
20798 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20799 gen_store_gpr(v1_t
, ret
);
20803 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20804 gen_store_gpr(v1_t
, ret
);
20809 TCGv tv0
= tcg_temp_new();
20810 TCGv tv1
= tcg_temp_new();
20811 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20813 tcg_gen_movi_tl(tv0
, rd
>> 3);
20814 tcg_gen_movi_tl(tv1
, imm
);
20815 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20818 case NM_MULEQ_S_W_PHL
:
20820 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20821 gen_store_gpr(v1_t
, ret
);
20823 case NM_MULEQ_S_W_PHR
:
20825 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20826 gen_store_gpr(v1_t
, ret
);
20830 switch (extract32(ctx
->opcode
, 10, 1)) {
20833 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20834 gen_store_gpr(v1_t
, ret
);
20838 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20839 gen_store_gpr(v1_t
, ret
);
20843 case NM_PRECR_QB_PH
:
20845 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20846 gen_store_gpr(v1_t
, ret
);
20848 case NM_PRECRQ_QB_PH
:
20850 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20851 gen_store_gpr(v1_t
, ret
);
20853 case NM_PRECRQ_PH_W
:
20855 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20856 gen_store_gpr(v1_t
, ret
);
20858 case NM_PRECRQ_RS_PH_W
:
20860 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20861 gen_store_gpr(v1_t
, ret
);
20863 case NM_PRECRQU_S_QB_PH
:
20865 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20866 gen_store_gpr(v1_t
, ret
);
20870 tcg_gen_movi_tl(t0
, rd
);
20871 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20872 gen_store_gpr(v1_t
, rt
);
20876 tcg_gen_movi_tl(t0
, rd
>> 1);
20877 switch (extract32(ctx
->opcode
, 10, 1)) {
20880 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20881 gen_store_gpr(v1_t
, rt
);
20885 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20886 gen_store_gpr(v1_t
, rt
);
20892 tcg_gen_movi_tl(t0
, rd
>> 1);
20893 switch (extract32(ctx
->opcode
, 10, 2)) {
20896 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20897 gen_store_gpr(v1_t
, rt
);
20901 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20902 gen_store_gpr(v1_t
, rt
);
20905 generate_exception_end(ctx
, EXCP_RI
);
20911 tcg_gen_movi_tl(t0
, rd
);
20912 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20913 gen_store_gpr(v1_t
, rt
);
20919 imm
= sextract32(ctx
->opcode
, 11, 11);
20920 imm
= (int16_t)(imm
<< 6) >> 6;
20922 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20927 generate_exception_end(ctx
, EXCP_RI
);
20932 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20940 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20941 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20943 rt
= extract32(ctx
->opcode
, 21, 5);
20944 rs
= extract32(ctx
->opcode
, 16, 5);
20945 rd
= extract32(ctx
->opcode
, 11, 5);
20947 op
= extract32(ctx
->opcode
, 26, 6);
20952 switch (extract32(ctx
->opcode
, 19, 2)) {
20955 generate_exception_end(ctx
, EXCP_RI
);
20958 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20959 generate_exception_end(ctx
, EXCP_SYSCALL
);
20961 generate_exception_end(ctx
, EXCP_RI
);
20965 generate_exception_end(ctx
, EXCP_BREAK
);
20968 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20969 gen_helper_do_semihosting(cpu_env
);
20971 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20972 generate_exception_end(ctx
, EXCP_RI
);
20974 generate_exception_end(ctx
, EXCP_DBp
);
20981 imm
= extract32(ctx
->opcode
, 0, 16);
20983 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20985 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20987 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20992 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20993 extract32(ctx
->opcode
, 1, 20) << 1;
20994 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20995 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20999 switch (ctx
->opcode
& 0x07) {
21001 gen_pool32a0_nanomips_insn(env
, ctx
);
21005 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21006 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21010 switch (extract32(ctx
->opcode
, 3, 3)) {
21012 gen_p_lsx(ctx
, rd
, rs
, rt
);
21015 /* In nanoMIPS, the shift field directly encodes the shift
21016 * amount, meaning that the supported shift values are in
21017 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
21018 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21019 extract32(ctx
->opcode
, 9, 2) - 1);
21022 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21025 gen_pool32axf_nanomips_insn(env
, ctx
);
21028 generate_exception_end(ctx
, EXCP_RI
);
21033 generate_exception_end(ctx
, EXCP_RI
);
21038 switch (ctx
->opcode
& 0x03) {
21041 offset
= extract32(ctx
->opcode
, 0, 21);
21042 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21046 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21049 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21052 generate_exception_end(ctx
, EXCP_RI
);
21058 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21059 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21060 switch (extract32(ctx
->opcode
, 16, 5)) {
21064 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21070 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21071 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21077 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21083 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21086 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21093 t0
= tcg_temp_new();
21095 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21098 tcg_gen_movi_tl(t0
, addr
);
21099 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21107 t0
= tcg_temp_new();
21108 t1
= tcg_temp_new();
21110 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21113 tcg_gen_movi_tl(t0
, addr
);
21114 gen_load_gpr(t1
, rt
);
21116 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21123 generate_exception_end(ctx
, EXCP_RI
);
21129 switch (extract32(ctx
->opcode
, 12, 4)) {
21131 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21134 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21137 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21140 switch (extract32(ctx
->opcode
, 20, 1)) {
21142 switch (ctx
->opcode
& 3) {
21144 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21145 extract32(ctx
->opcode
, 2, 1),
21146 extract32(ctx
->opcode
, 3, 9) << 3);
21149 case NM_RESTORE_JRC
:
21150 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21151 extract32(ctx
->opcode
, 2, 1),
21152 extract32(ctx
->opcode
, 3, 9) << 3);
21153 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21154 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21158 generate_exception_end(ctx
, EXCP_RI
);
21163 generate_exception_end(ctx
, EXCP_RI
);
21168 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21171 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21175 TCGv t0
= tcg_temp_new();
21177 imm
= extract32(ctx
->opcode
, 0, 12);
21178 gen_load_gpr(t0
, rs
);
21179 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21180 gen_store_gpr(t0
, rt
);
21186 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21187 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21191 int shift
= extract32(ctx
->opcode
, 0, 5);
21192 switch (extract32(ctx
->opcode
, 5, 4)) {
21194 if (rt
== 0 && shift
== 0) {
21196 } else if (rt
== 0 && shift
== 3) {
21197 /* EHB - treat as NOP */
21198 } else if (rt
== 0 && shift
== 5) {
21199 /* PAUSE - treat as NOP */
21200 } else if (rt
== 0 && shift
== 6) {
21202 gen_sync(extract32(ctx
->opcode
, 16, 5));
21205 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21206 extract32(ctx
->opcode
, 0, 5));
21210 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21211 extract32(ctx
->opcode
, 0, 5));
21214 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21215 extract32(ctx
->opcode
, 0, 5));
21218 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21219 extract32(ctx
->opcode
, 0, 5));
21227 TCGv t0
= tcg_temp_new();
21228 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21229 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21231 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21233 gen_load_gpr(t0
, rs
);
21234 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21237 tcg_temp_free_i32(shift
);
21238 tcg_temp_free_i32(shiftx
);
21239 tcg_temp_free_i32(stripe
);
21243 switch (((ctx
->opcode
>> 10) & 2) |
21244 (extract32(ctx
->opcode
, 5, 1))) {
21247 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21248 extract32(ctx
->opcode
, 6, 5));
21251 generate_exception_end(ctx
, EXCP_RI
);
21256 switch (((ctx
->opcode
>> 10) & 2) |
21257 (extract32(ctx
->opcode
, 5, 1))) {
21260 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21261 extract32(ctx
->opcode
, 6, 5));
21264 generate_exception_end(ctx
, EXCP_RI
);
21269 generate_exception_end(ctx
, EXCP_RI
);
21274 gen_pool32f_nanomips_insn(ctx
);
21279 switch (extract32(ctx
->opcode
, 1, 1)) {
21282 tcg_gen_movi_tl(cpu_gpr
[rt
],
21283 sextract32(ctx
->opcode
, 0, 1) << 31 |
21284 extract32(ctx
->opcode
, 2, 10) << 21 |
21285 extract32(ctx
->opcode
, 12, 9) << 12);
21290 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21291 extract32(ctx
->opcode
, 2, 10) << 21 |
21292 extract32(ctx
->opcode
, 12, 9) << 12;
21294 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21295 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21302 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21304 switch (extract32(ctx
->opcode
, 18, 3)) {
21306 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21309 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21312 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21316 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21321 switch (ctx
->opcode
& 1) {
21323 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21326 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21332 switch (ctx
->opcode
& 1) {
21334 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21337 generate_exception_end(ctx
, EXCP_RI
);
21343 switch (ctx
->opcode
& 0x3) {
21345 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21348 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21351 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21354 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21359 generate_exception_end(ctx
, EXCP_RI
);
21366 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21368 switch (extract32(ctx
->opcode
, 12, 4)) {
21372 /* Break the TB to be able to sync copied instructions
21374 ctx
->base
.is_jmp
= DISAS_STOP
;
21377 /* Treat as NOP. */
21381 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21384 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21387 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21390 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21393 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21396 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21399 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21402 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21405 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21408 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21411 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21414 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21417 generate_exception_end(ctx
, EXCP_RI
);
21424 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21425 extract32(ctx
->opcode
, 0, 8);
21427 switch (extract32(ctx
->opcode
, 8, 3)) {
21429 switch (extract32(ctx
->opcode
, 11, 4)) {
21431 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21434 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21437 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21440 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21443 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21446 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21449 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21452 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21455 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21458 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21461 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21464 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21469 /* Break the TB to be able to sync copied instructions
21471 ctx
->base
.is_jmp
= DISAS_STOP
;
21474 /* Treat as NOP. */
21478 generate_exception_end(ctx
, EXCP_RI
);
21483 switch (extract32(ctx
->opcode
, 11, 4)) {
21488 TCGv t0
= tcg_temp_new();
21489 TCGv t1
= tcg_temp_new();
21491 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21493 switch (extract32(ctx
->opcode
, 11, 4)) {
21495 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21497 gen_store_gpr(t0
, rt
);
21500 gen_load_gpr(t1
, rt
);
21501 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21510 switch (ctx
->opcode
& 0x03) {
21512 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21516 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21521 switch (ctx
->opcode
& 0x03) {
21523 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21527 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21533 check_cp0_enabled(ctx
);
21534 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21535 gen_cache_operation(ctx
, rt
, rs
, s
);
21541 switch (extract32(ctx
->opcode
, 11, 4)) {
21544 check_cp0_enabled(ctx
);
21545 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21549 check_cp0_enabled(ctx
);
21550 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21554 check_cp0_enabled(ctx
);
21555 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21559 /* case NM_SYNCIE */
21561 check_cp0_enabled(ctx
);
21562 /* Break the TB to be able to sync copied instructions
21564 ctx
->base
.is_jmp
= DISAS_STOP
;
21566 /* case NM_PREFE */
21568 check_cp0_enabled(ctx
);
21569 /* Treat as NOP. */
21574 check_cp0_enabled(ctx
);
21575 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21579 check_cp0_enabled(ctx
);
21580 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21584 check_cp0_enabled(ctx
);
21585 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21588 check_nms_dl_il_sl_tl_l2c(ctx
);
21589 gen_cache_operation(ctx
, rt
, rs
, s
);
21593 check_cp0_enabled(ctx
);
21594 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21598 check_cp0_enabled(ctx
);
21599 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21602 switch (extract32(ctx
->opcode
, 2, 2)) {
21606 check_cp0_enabled(ctx
);
21607 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21612 check_cp0_enabled(ctx
);
21613 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21616 generate_exception_end(ctx
, EXCP_RI
);
21621 switch (extract32(ctx
->opcode
, 2, 2)) {
21625 check_cp0_enabled(ctx
);
21626 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21631 check_cp0_enabled(ctx
);
21632 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21636 generate_exception_end(ctx
, EXCP_RI
);
21646 int count
= extract32(ctx
->opcode
, 12, 3);
21649 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21650 extract32(ctx
->opcode
, 0, 8);
21651 TCGv va
= tcg_temp_new();
21652 TCGv t1
= tcg_temp_new();
21653 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21654 NM_P_LS_UAWM
? MO_UNALN
: 0;
21656 count
= (count
== 0) ? 8 : count
;
21657 while (counter
!= count
) {
21658 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21659 int this_offset
= offset
+ (counter
<< 2);
21661 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21663 switch (extract32(ctx
->opcode
, 11, 1)) {
21665 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21667 gen_store_gpr(t1
, this_rt
);
21668 if ((this_rt
== rs
) &&
21669 (counter
!= (count
- 1))) {
21670 /* UNPREDICTABLE */
21674 this_rt
= (rt
== 0) ? 0 : this_rt
;
21675 gen_load_gpr(t1
, this_rt
);
21676 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21687 generate_exception_end(ctx
, EXCP_RI
);
21695 TCGv t0
= tcg_temp_new();
21696 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21697 extract32(ctx
->opcode
, 1, 20) << 1;
21698 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21699 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21700 extract32(ctx
->opcode
, 21, 3));
21701 gen_load_gpr(t0
, rt
);
21702 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21703 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21709 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21710 extract32(ctx
->opcode
, 1, 24) << 1;
21712 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21714 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21717 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21722 switch (extract32(ctx
->opcode
, 12, 4)) {
21725 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21728 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21731 generate_exception_end(ctx
, EXCP_RI
);
21737 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21738 extract32(ctx
->opcode
, 1, 13) << 1;
21739 switch (extract32(ctx
->opcode
, 14, 2)) {
21742 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21745 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21746 extract32(ctx
->opcode
, 1, 13) << 1;
21747 check_cp1_enabled(ctx
);
21748 switch (extract32(ctx
->opcode
, 16, 5)) {
21750 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21753 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21758 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21759 extract32(ctx
->opcode
, 0, 1) << 13;
21761 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21766 generate_exception_end(ctx
, EXCP_RI
);
21772 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21774 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21778 if (rs
== rt
|| rt
== 0) {
21779 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21780 } else if (rs
== 0) {
21781 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21783 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21791 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21792 extract32(ctx
->opcode
, 1, 13) << 1;
21793 switch (extract32(ctx
->opcode
, 14, 2)) {
21796 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21799 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21801 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21803 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21807 if (rs
== 0 || rs
== rt
) {
21809 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21811 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21815 generate_exception_end(ctx
, EXCP_RI
);
21822 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21823 extract32(ctx
->opcode
, 1, 10) << 1;
21824 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21826 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21831 generate_exception_end(ctx
, EXCP_RI
);
21837 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21840 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
21841 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
21842 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
21846 /* make sure instructions are on a halfword boundary */
21847 if (ctx
->base
.pc_next
& 0x1) {
21848 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21849 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21850 tcg_temp_free(tmp
);
21851 generate_exception_end(ctx
, EXCP_AdEL
);
21855 op
= extract32(ctx
->opcode
, 10, 6);
21858 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21861 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21862 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21865 switch (extract32(ctx
->opcode
, 3, 2)) {
21866 case NM_P16_SYSCALL
:
21867 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21868 generate_exception_end(ctx
, EXCP_SYSCALL
);
21870 generate_exception_end(ctx
, EXCP_RI
);
21874 generate_exception_end(ctx
, EXCP_BREAK
);
21877 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21878 gen_helper_do_semihosting(cpu_env
);
21880 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21881 generate_exception_end(ctx
, EXCP_RI
);
21883 generate_exception_end(ctx
, EXCP_DBp
);
21888 generate_exception_end(ctx
, EXCP_RI
);
21895 int shift
= extract32(ctx
->opcode
, 0, 3);
21897 shift
= (shift
== 0) ? 8 : shift
;
21899 switch (extract32(ctx
->opcode
, 3, 1)) {
21907 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21911 switch (ctx
->opcode
& 1) {
21913 gen_pool16c_nanomips_insn(ctx
);
21916 gen_ldxs(ctx
, rt
, rs
, rd
);
21921 switch (extract32(ctx
->opcode
, 6, 1)) {
21923 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21924 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21927 generate_exception_end(ctx
, EXCP_RI
);
21932 switch (extract32(ctx
->opcode
, 3, 1)) {
21934 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21935 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21937 case NM_P_ADDIURS5
:
21938 rt
= extract32(ctx
->opcode
, 5, 5);
21940 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21941 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21942 (extract32(ctx
->opcode
, 0, 3));
21943 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21949 switch (ctx
->opcode
& 0x1) {
21951 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21954 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21959 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21960 extract32(ctx
->opcode
, 5, 3);
21961 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21962 extract32(ctx
->opcode
, 0, 3);
21963 rt
= decode_gpr_gpr4(rt
);
21964 rs
= decode_gpr_gpr4(rs
);
21965 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21966 (extract32(ctx
->opcode
, 3, 1))) {
21969 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21973 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21976 generate_exception_end(ctx
, EXCP_RI
);
21982 int imm
= extract32(ctx
->opcode
, 0, 7);
21983 imm
= (imm
== 0x7f ? -1 : imm
);
21985 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21991 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21992 u
= (u
== 12) ? 0xff :
21993 (u
== 13) ? 0xffff : u
;
21994 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21998 offset
= extract32(ctx
->opcode
, 0, 2);
21999 switch (extract32(ctx
->opcode
, 2, 2)) {
22001 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22004 rt
= decode_gpr_gpr3_src_store(
22005 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22006 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22009 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22012 generate_exception_end(ctx
, EXCP_RI
);
22017 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22018 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22020 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22023 rt
= decode_gpr_gpr3_src_store(
22024 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22025 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22028 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22031 generate_exception_end(ctx
, EXCP_RI
);
22036 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22037 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22040 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22041 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22042 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22046 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22047 extract32(ctx
->opcode
, 5, 3);
22048 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22049 extract32(ctx
->opcode
, 0, 3);
22050 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22051 (extract32(ctx
->opcode
, 8, 1) << 2);
22052 rt
= decode_gpr_gpr4(rt
);
22053 rs
= decode_gpr_gpr4(rs
);
22054 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22058 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22059 extract32(ctx
->opcode
, 5, 3);
22060 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22061 extract32(ctx
->opcode
, 0, 3);
22062 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22063 (extract32(ctx
->opcode
, 8, 1) << 2);
22064 rt
= decode_gpr_gpr4_zero(rt
);
22065 rs
= decode_gpr_gpr4(rs
);
22066 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22069 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22070 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22073 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22074 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22075 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22078 rt
= decode_gpr_gpr3_src_store(
22079 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22080 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22081 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22082 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22085 rt
= decode_gpr_gpr3_src_store(
22086 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22087 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22088 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22091 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22092 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22093 (extract32(ctx
->opcode
, 1, 9) << 1));
22096 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22097 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22098 (extract32(ctx
->opcode
, 1, 9) << 1));
22101 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22102 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22103 (extract32(ctx
->opcode
, 1, 6) << 1));
22106 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22107 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22108 (extract32(ctx
->opcode
, 1, 6) << 1));
22111 switch (ctx
->opcode
& 0xf) {
22114 switch (extract32(ctx
->opcode
, 4, 1)) {
22116 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22117 extract32(ctx
->opcode
, 5, 5), 0, 0);
22120 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22121 extract32(ctx
->opcode
, 5, 5), 31, 0);
22128 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22129 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22130 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22131 extract32(ctx
->opcode
, 0, 4) << 1);
22138 int count
= extract32(ctx
->opcode
, 0, 4);
22139 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22141 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22142 switch (extract32(ctx
->opcode
, 8, 1)) {
22144 gen_save(ctx
, rt
, count
, 0, u
);
22146 case NM_RESTORE_JRC16
:
22147 gen_restore(ctx
, rt
, count
, 0, u
);
22148 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22157 static const int gpr2reg1
[] = {4, 5, 6, 7};
22158 static const int gpr2reg2
[] = {5, 6, 7, 8};
22160 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22161 extract32(ctx
->opcode
, 8, 1);
22162 int r1
= gpr2reg1
[rd2
];
22163 int r2
= gpr2reg2
[rd2
];
22164 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22165 extract32(ctx
->opcode
, 0, 3);
22166 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22167 extract32(ctx
->opcode
, 5, 3);
22168 TCGv t0
= tcg_temp_new();
22169 TCGv t1
= tcg_temp_new();
22170 if (op
== NM_MOVEP
) {
22173 rs
= decode_gpr_gpr4_zero(r3
);
22174 rt
= decode_gpr_gpr4_zero(r4
);
22176 rd
= decode_gpr_gpr4(r3
);
22177 re
= decode_gpr_gpr4(r4
);
22181 gen_load_gpr(t0
, rs
);
22182 gen_load_gpr(t1
, rt
);
22183 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22184 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22190 return decode_nanomips_32_48_opc(env
, ctx
);
22197 /* SmartMIPS extension to MIPS32 */
22199 #if defined(TARGET_MIPS64)
22201 /* MDMX extension to MIPS64 */
22205 /* MIPSDSP functions. */
22206 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22207 int rd
, int base
, int offset
)
22212 t0
= tcg_temp_new();
22215 gen_load_gpr(t0
, offset
);
22216 } else if (offset
== 0) {
22217 gen_load_gpr(t0
, base
);
22219 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22225 gen_store_gpr(t0
, rd
);
22228 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22229 gen_store_gpr(t0
, rd
);
22232 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22233 gen_store_gpr(t0
, rd
);
22235 #if defined(TARGET_MIPS64)
22237 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22238 gen_store_gpr(t0
, rd
);
22245 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22246 int ret
, int v1
, int v2
)
22252 /* Treat as NOP. */
22256 v1_t
= tcg_temp_new();
22257 v2_t
= tcg_temp_new();
22259 gen_load_gpr(v1_t
, v1
);
22260 gen_load_gpr(v2_t
, v2
);
22263 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22264 case OPC_MULT_G_2E
:
22268 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22270 case OPC_ADDUH_R_QB
:
22271 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22274 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22276 case OPC_ADDQH_R_PH
:
22277 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22280 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22282 case OPC_ADDQH_R_W
:
22283 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22286 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22288 case OPC_SUBUH_R_QB
:
22289 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22292 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22294 case OPC_SUBQH_R_PH
:
22295 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22298 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22300 case OPC_SUBQH_R_W
:
22301 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22305 case OPC_ABSQ_S_PH_DSP
:
22307 case OPC_ABSQ_S_QB
:
22309 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22311 case OPC_ABSQ_S_PH
:
22313 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22317 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22319 case OPC_PRECEQ_W_PHL
:
22321 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22322 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22324 case OPC_PRECEQ_W_PHR
:
22326 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22327 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22328 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22330 case OPC_PRECEQU_PH_QBL
:
22332 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22334 case OPC_PRECEQU_PH_QBR
:
22336 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22338 case OPC_PRECEQU_PH_QBLA
:
22340 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22342 case OPC_PRECEQU_PH_QBRA
:
22344 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22346 case OPC_PRECEU_PH_QBL
:
22348 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22350 case OPC_PRECEU_PH_QBR
:
22352 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22354 case OPC_PRECEU_PH_QBLA
:
22356 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22358 case OPC_PRECEU_PH_QBRA
:
22360 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22364 case OPC_ADDU_QB_DSP
:
22368 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22370 case OPC_ADDQ_S_PH
:
22372 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22376 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22380 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22382 case OPC_ADDU_S_QB
:
22384 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22388 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22390 case OPC_ADDU_S_PH
:
22392 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22396 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22398 case OPC_SUBQ_S_PH
:
22400 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22404 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22408 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22410 case OPC_SUBU_S_QB
:
22412 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22416 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22418 case OPC_SUBU_S_PH
:
22420 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22424 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22428 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22432 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22434 case OPC_RADDU_W_QB
:
22436 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22440 case OPC_CMPU_EQ_QB_DSP
:
22442 case OPC_PRECR_QB_PH
:
22444 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22446 case OPC_PRECRQ_QB_PH
:
22448 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22450 case OPC_PRECR_SRA_PH_W
:
22453 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22454 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22456 tcg_temp_free_i32(sa_t
);
22459 case OPC_PRECR_SRA_R_PH_W
:
22462 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22463 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22465 tcg_temp_free_i32(sa_t
);
22468 case OPC_PRECRQ_PH_W
:
22470 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22472 case OPC_PRECRQ_RS_PH_W
:
22474 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22476 case OPC_PRECRQU_S_QB_PH
:
22478 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22482 #ifdef TARGET_MIPS64
22483 case OPC_ABSQ_S_QH_DSP
:
22485 case OPC_PRECEQ_L_PWL
:
22487 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22489 case OPC_PRECEQ_L_PWR
:
22491 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22493 case OPC_PRECEQ_PW_QHL
:
22495 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22497 case OPC_PRECEQ_PW_QHR
:
22499 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22501 case OPC_PRECEQ_PW_QHLA
:
22503 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22505 case OPC_PRECEQ_PW_QHRA
:
22507 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22509 case OPC_PRECEQU_QH_OBL
:
22511 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22513 case OPC_PRECEQU_QH_OBR
:
22515 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22517 case OPC_PRECEQU_QH_OBLA
:
22519 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22521 case OPC_PRECEQU_QH_OBRA
:
22523 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22525 case OPC_PRECEU_QH_OBL
:
22527 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22529 case OPC_PRECEU_QH_OBR
:
22531 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22533 case OPC_PRECEU_QH_OBLA
:
22535 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22537 case OPC_PRECEU_QH_OBRA
:
22539 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22541 case OPC_ABSQ_S_OB
:
22543 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22545 case OPC_ABSQ_S_PW
:
22547 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22549 case OPC_ABSQ_S_QH
:
22551 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22555 case OPC_ADDU_OB_DSP
:
22557 case OPC_RADDU_L_OB
:
22559 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22563 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22565 case OPC_SUBQ_S_PW
:
22567 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22571 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22573 case OPC_SUBQ_S_QH
:
22575 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22579 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22581 case OPC_SUBU_S_OB
:
22583 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22587 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22589 case OPC_SUBU_S_QH
:
22591 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22595 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22597 case OPC_SUBUH_R_OB
:
22599 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22603 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22605 case OPC_ADDQ_S_PW
:
22607 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22611 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22613 case OPC_ADDQ_S_QH
:
22615 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22619 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22621 case OPC_ADDU_S_OB
:
22623 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22627 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22629 case OPC_ADDU_S_QH
:
22631 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22635 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22637 case OPC_ADDUH_R_OB
:
22639 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22643 case OPC_CMPU_EQ_OB_DSP
:
22645 case OPC_PRECR_OB_QH
:
22647 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22649 case OPC_PRECR_SRA_QH_PW
:
22652 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22653 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22654 tcg_temp_free_i32(ret_t
);
22657 case OPC_PRECR_SRA_R_QH_PW
:
22660 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22661 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22662 tcg_temp_free_i32(sa_v
);
22665 case OPC_PRECRQ_OB_QH
:
22667 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22669 case OPC_PRECRQ_PW_L
:
22671 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22673 case OPC_PRECRQ_QH_PW
:
22675 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22677 case OPC_PRECRQ_RS_QH_PW
:
22679 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22681 case OPC_PRECRQU_S_OB_QH
:
22683 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22690 tcg_temp_free(v1_t
);
22691 tcg_temp_free(v2_t
);
22694 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22695 int ret
, int v1
, int v2
)
22703 /* Treat as NOP. */
22707 t0
= tcg_temp_new();
22708 v1_t
= tcg_temp_new();
22709 v2_t
= tcg_temp_new();
22711 tcg_gen_movi_tl(t0
, v1
);
22712 gen_load_gpr(v1_t
, v1
);
22713 gen_load_gpr(v2_t
, v2
);
22716 case OPC_SHLL_QB_DSP
:
22718 op2
= MASK_SHLL_QB(ctx
->opcode
);
22722 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22726 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22730 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22734 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22736 case OPC_SHLL_S_PH
:
22738 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22740 case OPC_SHLLV_S_PH
:
22742 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22746 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22748 case OPC_SHLLV_S_W
:
22750 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22754 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22758 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22762 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22766 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22770 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22772 case OPC_SHRA_R_QB
:
22774 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22778 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22780 case OPC_SHRAV_R_QB
:
22782 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22786 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22788 case OPC_SHRA_R_PH
:
22790 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22794 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22796 case OPC_SHRAV_R_PH
:
22798 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22802 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22804 case OPC_SHRAV_R_W
:
22806 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22808 default: /* Invalid */
22809 MIPS_INVAL("MASK SHLL.QB");
22810 generate_exception_end(ctx
, EXCP_RI
);
22815 #ifdef TARGET_MIPS64
22816 case OPC_SHLL_OB_DSP
:
22817 op2
= MASK_SHLL_OB(ctx
->opcode
);
22821 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22825 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22827 case OPC_SHLL_S_PW
:
22829 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22831 case OPC_SHLLV_S_PW
:
22833 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22837 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22841 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22845 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22849 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22851 case OPC_SHLL_S_QH
:
22853 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22855 case OPC_SHLLV_S_QH
:
22857 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22861 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22865 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22867 case OPC_SHRA_R_OB
:
22869 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22871 case OPC_SHRAV_R_OB
:
22873 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22877 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22881 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22883 case OPC_SHRA_R_PW
:
22885 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22887 case OPC_SHRAV_R_PW
:
22889 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22893 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22897 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22899 case OPC_SHRA_R_QH
:
22901 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22903 case OPC_SHRAV_R_QH
:
22905 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22909 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22913 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22917 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22921 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22923 default: /* Invalid */
22924 MIPS_INVAL("MASK SHLL.OB");
22925 generate_exception_end(ctx
, EXCP_RI
);
22933 tcg_temp_free(v1_t
);
22934 tcg_temp_free(v2_t
);
22937 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22938 int ret
, int v1
, int v2
, int check_ret
)
22944 if ((ret
== 0) && (check_ret
== 1)) {
22945 /* Treat as NOP. */
22949 t0
= tcg_temp_new_i32();
22950 v1_t
= tcg_temp_new();
22951 v2_t
= tcg_temp_new();
22953 tcg_gen_movi_i32(t0
, ret
);
22954 gen_load_gpr(v1_t
, v1
);
22955 gen_load_gpr(v2_t
, v2
);
22958 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22959 * the same mask and op1. */
22960 case OPC_MULT_G_2E
:
22964 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22967 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22970 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22972 case OPC_MULQ_RS_W
:
22973 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22977 case OPC_DPA_W_PH_DSP
:
22979 case OPC_DPAU_H_QBL
:
22981 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22983 case OPC_DPAU_H_QBR
:
22985 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22987 case OPC_DPSU_H_QBL
:
22989 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22991 case OPC_DPSU_H_QBR
:
22993 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22997 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22999 case OPC_DPAX_W_PH
:
23001 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23003 case OPC_DPAQ_S_W_PH
:
23005 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23007 case OPC_DPAQX_S_W_PH
:
23009 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23011 case OPC_DPAQX_SA_W_PH
:
23013 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23017 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23019 case OPC_DPSX_W_PH
:
23021 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23023 case OPC_DPSQ_S_W_PH
:
23025 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23027 case OPC_DPSQX_S_W_PH
:
23029 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23031 case OPC_DPSQX_SA_W_PH
:
23033 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23035 case OPC_MULSAQ_S_W_PH
:
23037 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23039 case OPC_DPAQ_SA_L_W
:
23041 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23043 case OPC_DPSQ_SA_L_W
:
23045 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23047 case OPC_MAQ_S_W_PHL
:
23049 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23051 case OPC_MAQ_S_W_PHR
:
23053 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23055 case OPC_MAQ_SA_W_PHL
:
23057 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23059 case OPC_MAQ_SA_W_PHR
:
23061 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23063 case OPC_MULSA_W_PH
:
23065 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23069 #ifdef TARGET_MIPS64
23070 case OPC_DPAQ_W_QH_DSP
:
23072 int ac
= ret
& 0x03;
23073 tcg_gen_movi_i32(t0
, ac
);
23078 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23082 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23086 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23090 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23094 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23096 case OPC_DPAQ_S_W_QH
:
23098 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23100 case OPC_DPAQ_SA_L_PW
:
23102 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23104 case OPC_DPAU_H_OBL
:
23106 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23108 case OPC_DPAU_H_OBR
:
23110 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23114 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23116 case OPC_DPSQ_S_W_QH
:
23118 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23120 case OPC_DPSQ_SA_L_PW
:
23122 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23124 case OPC_DPSU_H_OBL
:
23126 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23128 case OPC_DPSU_H_OBR
:
23130 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23132 case OPC_MAQ_S_L_PWL
:
23134 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23136 case OPC_MAQ_S_L_PWR
:
23138 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23140 case OPC_MAQ_S_W_QHLL
:
23142 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23144 case OPC_MAQ_SA_W_QHLL
:
23146 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23148 case OPC_MAQ_S_W_QHLR
:
23150 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23152 case OPC_MAQ_SA_W_QHLR
:
23154 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23156 case OPC_MAQ_S_W_QHRL
:
23158 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23160 case OPC_MAQ_SA_W_QHRL
:
23162 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23164 case OPC_MAQ_S_W_QHRR
:
23166 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23168 case OPC_MAQ_SA_W_QHRR
:
23170 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23172 case OPC_MULSAQ_S_L_PW
:
23174 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23176 case OPC_MULSAQ_S_W_QH
:
23178 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23184 case OPC_ADDU_QB_DSP
:
23186 case OPC_MULEU_S_PH_QBL
:
23188 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23190 case OPC_MULEU_S_PH_QBR
:
23192 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23194 case OPC_MULQ_RS_PH
:
23196 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23198 case OPC_MULEQ_S_W_PHL
:
23200 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23202 case OPC_MULEQ_S_W_PHR
:
23204 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23206 case OPC_MULQ_S_PH
:
23208 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23212 #ifdef TARGET_MIPS64
23213 case OPC_ADDU_OB_DSP
:
23215 case OPC_MULEQ_S_PW_QHL
:
23217 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23219 case OPC_MULEQ_S_PW_QHR
:
23221 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23223 case OPC_MULEU_S_QH_OBL
:
23225 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23227 case OPC_MULEU_S_QH_OBR
:
23229 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23231 case OPC_MULQ_RS_QH
:
23233 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23240 tcg_temp_free_i32(t0
);
23241 tcg_temp_free(v1_t
);
23242 tcg_temp_free(v2_t
);
23245 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23253 /* Treat as NOP. */
23257 t0
= tcg_temp_new();
23258 val_t
= tcg_temp_new();
23259 gen_load_gpr(val_t
, val
);
23262 case OPC_ABSQ_S_PH_DSP
:
23266 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23271 target_long result
;
23272 imm
= (ctx
->opcode
>> 16) & 0xFF;
23273 result
= (uint32_t)imm
<< 24 |
23274 (uint32_t)imm
<< 16 |
23275 (uint32_t)imm
<< 8 |
23277 result
= (int32_t)result
;
23278 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23283 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23284 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23285 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23286 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23287 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23288 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23293 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23294 imm
= (int16_t)(imm
<< 6) >> 6;
23295 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23296 (target_long
)((int32_t)imm
<< 16 | \
23302 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23303 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23304 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23305 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23309 #ifdef TARGET_MIPS64
23310 case OPC_ABSQ_S_QH_DSP
:
23317 imm
= (ctx
->opcode
>> 16) & 0xFF;
23318 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23319 temp
= (temp
<< 16) | temp
;
23320 temp
= (temp
<< 32) | temp
;
23321 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23329 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23330 imm
= (int16_t)(imm
<< 6) >> 6;
23331 temp
= ((target_long
)imm
<< 32) \
23332 | ((target_long
)imm
& 0xFFFFFFFF);
23333 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23341 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23342 imm
= (int16_t)(imm
<< 6) >> 6;
23344 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23345 ((uint64_t)(uint16_t)imm
<< 32) |
23346 ((uint64_t)(uint16_t)imm
<< 16) |
23347 (uint64_t)(uint16_t)imm
;
23348 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23353 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23354 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23355 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23356 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23357 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23358 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23359 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23363 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23364 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23365 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23369 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23370 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23371 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23372 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23373 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23380 tcg_temp_free(val_t
);
23383 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23384 uint32_t op1
, uint32_t op2
,
23385 int ret
, int v1
, int v2
, int check_ret
)
23391 if ((ret
== 0) && (check_ret
== 1)) {
23392 /* Treat as NOP. */
23396 t1
= tcg_temp_new();
23397 v1_t
= tcg_temp_new();
23398 v2_t
= tcg_temp_new();
23400 gen_load_gpr(v1_t
, v1
);
23401 gen_load_gpr(v2_t
, v2
);
23404 case OPC_CMPU_EQ_QB_DSP
:
23406 case OPC_CMPU_EQ_QB
:
23408 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23410 case OPC_CMPU_LT_QB
:
23412 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23414 case OPC_CMPU_LE_QB
:
23416 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23418 case OPC_CMPGU_EQ_QB
:
23420 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23422 case OPC_CMPGU_LT_QB
:
23424 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23426 case OPC_CMPGU_LE_QB
:
23428 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23430 case OPC_CMPGDU_EQ_QB
:
23432 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23433 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23434 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23435 tcg_gen_shli_tl(t1
, t1
, 24);
23436 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23438 case OPC_CMPGDU_LT_QB
:
23440 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23441 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23442 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23443 tcg_gen_shli_tl(t1
, t1
, 24);
23444 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23446 case OPC_CMPGDU_LE_QB
:
23448 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23449 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23450 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23451 tcg_gen_shli_tl(t1
, t1
, 24);
23452 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23454 case OPC_CMP_EQ_PH
:
23456 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23458 case OPC_CMP_LT_PH
:
23460 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23462 case OPC_CMP_LE_PH
:
23464 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23468 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23472 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23474 case OPC_PACKRL_PH
:
23476 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23480 #ifdef TARGET_MIPS64
23481 case OPC_CMPU_EQ_OB_DSP
:
23483 case OPC_CMP_EQ_PW
:
23485 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23487 case OPC_CMP_LT_PW
:
23489 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23491 case OPC_CMP_LE_PW
:
23493 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23495 case OPC_CMP_EQ_QH
:
23497 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23499 case OPC_CMP_LT_QH
:
23501 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23503 case OPC_CMP_LE_QH
:
23505 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23507 case OPC_CMPGDU_EQ_OB
:
23509 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23511 case OPC_CMPGDU_LT_OB
:
23513 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23515 case OPC_CMPGDU_LE_OB
:
23517 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23519 case OPC_CMPGU_EQ_OB
:
23521 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23523 case OPC_CMPGU_LT_OB
:
23525 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23527 case OPC_CMPGU_LE_OB
:
23529 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23531 case OPC_CMPU_EQ_OB
:
23533 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23535 case OPC_CMPU_LT_OB
:
23537 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23539 case OPC_CMPU_LE_OB
:
23541 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23543 case OPC_PACKRL_PW
:
23545 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23549 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23553 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23557 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23565 tcg_temp_free(v1_t
);
23566 tcg_temp_free(v2_t
);
23569 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23570 uint32_t op1
, int rt
, int rs
, int sa
)
23577 /* Treat as NOP. */
23581 t0
= tcg_temp_new();
23582 gen_load_gpr(t0
, rs
);
23585 case OPC_APPEND_DSP
:
23586 switch (MASK_APPEND(ctx
->opcode
)) {
23589 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23591 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23595 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23596 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23597 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23598 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23600 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23604 if (sa
!= 0 && sa
!= 2) {
23605 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23606 tcg_gen_ext32u_tl(t0
, t0
);
23607 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23608 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23610 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23612 default: /* Invalid */
23613 MIPS_INVAL("MASK APPEND");
23614 generate_exception_end(ctx
, EXCP_RI
);
23618 #ifdef TARGET_MIPS64
23619 case OPC_DAPPEND_DSP
:
23620 switch (MASK_DAPPEND(ctx
->opcode
)) {
23623 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23627 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23628 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23629 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23633 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23634 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23635 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23640 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23641 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23642 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23643 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23646 default: /* Invalid */
23647 MIPS_INVAL("MASK DAPPEND");
23648 generate_exception_end(ctx
, EXCP_RI
);
23657 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23658 int ret
, int v1
, int v2
, int check_ret
)
23667 if ((ret
== 0) && (check_ret
== 1)) {
23668 /* Treat as NOP. */
23672 t0
= tcg_temp_new();
23673 t1
= tcg_temp_new();
23674 v1_t
= tcg_temp_new();
23675 v2_t
= tcg_temp_new();
23677 gen_load_gpr(v1_t
, v1
);
23678 gen_load_gpr(v2_t
, v2
);
23681 case OPC_EXTR_W_DSP
:
23685 tcg_gen_movi_tl(t0
, v2
);
23686 tcg_gen_movi_tl(t1
, v1
);
23687 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23690 tcg_gen_movi_tl(t0
, v2
);
23691 tcg_gen_movi_tl(t1
, v1
);
23692 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23694 case OPC_EXTR_RS_W
:
23695 tcg_gen_movi_tl(t0
, v2
);
23696 tcg_gen_movi_tl(t1
, v1
);
23697 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23700 tcg_gen_movi_tl(t0
, v2
);
23701 tcg_gen_movi_tl(t1
, v1
);
23702 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23704 case OPC_EXTRV_S_H
:
23705 tcg_gen_movi_tl(t0
, v2
);
23706 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23709 tcg_gen_movi_tl(t0
, v2
);
23710 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23712 case OPC_EXTRV_R_W
:
23713 tcg_gen_movi_tl(t0
, v2
);
23714 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23716 case OPC_EXTRV_RS_W
:
23717 tcg_gen_movi_tl(t0
, v2
);
23718 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23721 tcg_gen_movi_tl(t0
, v2
);
23722 tcg_gen_movi_tl(t1
, v1
);
23723 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23726 tcg_gen_movi_tl(t0
, v2
);
23727 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23730 tcg_gen_movi_tl(t0
, v2
);
23731 tcg_gen_movi_tl(t1
, v1
);
23732 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23735 tcg_gen_movi_tl(t0
, v2
);
23736 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23739 imm
= (ctx
->opcode
>> 20) & 0x3F;
23740 tcg_gen_movi_tl(t0
, ret
);
23741 tcg_gen_movi_tl(t1
, imm
);
23742 gen_helper_shilo(t0
, t1
, cpu_env
);
23745 tcg_gen_movi_tl(t0
, ret
);
23746 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23749 tcg_gen_movi_tl(t0
, ret
);
23750 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23753 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23754 tcg_gen_movi_tl(t0
, imm
);
23755 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23758 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23759 tcg_gen_movi_tl(t0
, imm
);
23760 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23764 #ifdef TARGET_MIPS64
23765 case OPC_DEXTR_W_DSP
:
23769 tcg_gen_movi_tl(t0
, ret
);
23770 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23774 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23775 int ac
= (ctx
->opcode
>> 11) & 0x03;
23776 tcg_gen_movi_tl(t0
, shift
);
23777 tcg_gen_movi_tl(t1
, ac
);
23778 gen_helper_dshilo(t0
, t1
, cpu_env
);
23783 int ac
= (ctx
->opcode
>> 11) & 0x03;
23784 tcg_gen_movi_tl(t0
, ac
);
23785 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23789 tcg_gen_movi_tl(t0
, v2
);
23790 tcg_gen_movi_tl(t1
, v1
);
23792 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23795 tcg_gen_movi_tl(t0
, v2
);
23796 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23799 tcg_gen_movi_tl(t0
, v2
);
23800 tcg_gen_movi_tl(t1
, v1
);
23801 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23804 tcg_gen_movi_tl(t0
, v2
);
23805 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23808 tcg_gen_movi_tl(t0
, v2
);
23809 tcg_gen_movi_tl(t1
, v1
);
23810 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23812 case OPC_DEXTR_R_L
:
23813 tcg_gen_movi_tl(t0
, v2
);
23814 tcg_gen_movi_tl(t1
, v1
);
23815 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23817 case OPC_DEXTR_RS_L
:
23818 tcg_gen_movi_tl(t0
, v2
);
23819 tcg_gen_movi_tl(t1
, v1
);
23820 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23823 tcg_gen_movi_tl(t0
, v2
);
23824 tcg_gen_movi_tl(t1
, v1
);
23825 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23827 case OPC_DEXTR_R_W
:
23828 tcg_gen_movi_tl(t0
, v2
);
23829 tcg_gen_movi_tl(t1
, v1
);
23830 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23832 case OPC_DEXTR_RS_W
:
23833 tcg_gen_movi_tl(t0
, v2
);
23834 tcg_gen_movi_tl(t1
, v1
);
23835 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23837 case OPC_DEXTR_S_H
:
23838 tcg_gen_movi_tl(t0
, v2
);
23839 tcg_gen_movi_tl(t1
, v1
);
23840 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23842 case OPC_DEXTRV_S_H
:
23843 tcg_gen_movi_tl(t0
, v2
);
23844 tcg_gen_movi_tl(t1
, v1
);
23845 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23848 tcg_gen_movi_tl(t0
, v2
);
23849 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23851 case OPC_DEXTRV_R_L
:
23852 tcg_gen_movi_tl(t0
, v2
);
23853 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23855 case OPC_DEXTRV_RS_L
:
23856 tcg_gen_movi_tl(t0
, v2
);
23857 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23860 tcg_gen_movi_tl(t0
, v2
);
23861 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23863 case OPC_DEXTRV_R_W
:
23864 tcg_gen_movi_tl(t0
, v2
);
23865 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23867 case OPC_DEXTRV_RS_W
:
23868 tcg_gen_movi_tl(t0
, v2
);
23869 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23878 tcg_temp_free(v1_t
);
23879 tcg_temp_free(v2_t
);
23882 /* End MIPSDSP functions. */
23884 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23886 int rs
, rt
, rd
, sa
;
23889 rs
= (ctx
->opcode
>> 21) & 0x1f;
23890 rt
= (ctx
->opcode
>> 16) & 0x1f;
23891 rd
= (ctx
->opcode
>> 11) & 0x1f;
23892 sa
= (ctx
->opcode
>> 6) & 0x1f;
23894 op1
= MASK_SPECIAL(ctx
->opcode
);
23897 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23903 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23913 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23916 MIPS_INVAL("special_r6 muldiv");
23917 generate_exception_end(ctx
, EXCP_RI
);
23923 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23927 if (rt
== 0 && sa
== 1) {
23928 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23929 We need additionally to check other fields */
23930 gen_cl(ctx
, op1
, rd
, rs
);
23932 generate_exception_end(ctx
, EXCP_RI
);
23936 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23937 gen_helper_do_semihosting(cpu_env
);
23939 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23940 generate_exception_end(ctx
, EXCP_RI
);
23942 generate_exception_end(ctx
, EXCP_DBp
);
23946 #if defined(TARGET_MIPS64)
23948 check_mips_64(ctx
);
23949 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23953 if (rt
== 0 && sa
== 1) {
23954 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23955 We need additionally to check other fields */
23956 check_mips_64(ctx
);
23957 gen_cl(ctx
, op1
, rd
, rs
);
23959 generate_exception_end(ctx
, EXCP_RI
);
23967 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23977 check_mips_64(ctx
);
23978 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23981 MIPS_INVAL("special_r6 muldiv");
23982 generate_exception_end(ctx
, EXCP_RI
);
23987 default: /* Invalid */
23988 MIPS_INVAL("special_r6");
23989 generate_exception_end(ctx
, EXCP_RI
);
23994 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
23996 int rs
= extract32(ctx
->opcode
, 21, 5);
23997 int rt
= extract32(ctx
->opcode
, 16, 5);
23998 int rd
= extract32(ctx
->opcode
, 11, 5);
23999 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24002 case OPC_MOVN
: /* Conditional move */
24004 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24006 case OPC_MFHI
: /* Move from HI/LO */
24008 gen_HILO(ctx
, op1
, 0, rd
);
24011 case OPC_MTLO
: /* Move to HI/LO */
24012 gen_HILO(ctx
, op1
, 0, rs
);
24016 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24020 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24022 #if defined(TARGET_MIPS64)
24027 check_insn_opc_user_only(ctx
, INSN_R5900
);
24028 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24032 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24034 default: /* Invalid */
24035 MIPS_INVAL("special_tx79");
24036 generate_exception_end(ctx
, EXCP_RI
);
24041 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24043 int rs
, rt
, rd
, sa
;
24046 rs
= (ctx
->opcode
>> 21) & 0x1f;
24047 rt
= (ctx
->opcode
>> 16) & 0x1f;
24048 rd
= (ctx
->opcode
>> 11) & 0x1f;
24049 sa
= (ctx
->opcode
>> 6) & 0x1f;
24051 op1
= MASK_SPECIAL(ctx
->opcode
);
24053 case OPC_MOVN
: /* Conditional move */
24055 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24056 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24057 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24059 case OPC_MFHI
: /* Move from HI/LO */
24061 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24064 case OPC_MTLO
: /* Move to HI/LO */
24065 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24068 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24069 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24070 check_cp1_enabled(ctx
);
24071 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24072 (ctx
->opcode
>> 16) & 1);
24074 generate_exception_err(ctx
, EXCP_CpU
, 1);
24080 check_insn(ctx
, INSN_VR54XX
);
24081 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24082 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24084 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24089 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24091 #if defined(TARGET_MIPS64)
24096 check_insn(ctx
, ISA_MIPS3
);
24097 check_mips_64(ctx
);
24098 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24102 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24105 #ifdef MIPS_STRICT_STANDARD
24106 MIPS_INVAL("SPIM");
24107 generate_exception_end(ctx
, EXCP_RI
);
24109 /* Implemented as RI exception for now. */
24110 MIPS_INVAL("spim (unofficial)");
24111 generate_exception_end(ctx
, EXCP_RI
);
24114 default: /* Invalid */
24115 MIPS_INVAL("special_legacy");
24116 generate_exception_end(ctx
, EXCP_RI
);
24121 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24123 int rs
, rt
, rd
, sa
;
24126 rs
= (ctx
->opcode
>> 21) & 0x1f;
24127 rt
= (ctx
->opcode
>> 16) & 0x1f;
24128 rd
= (ctx
->opcode
>> 11) & 0x1f;
24129 sa
= (ctx
->opcode
>> 6) & 0x1f;
24131 op1
= MASK_SPECIAL(ctx
->opcode
);
24133 case OPC_SLL
: /* Shift with immediate */
24134 if (sa
== 5 && rd
== 0 &&
24135 rs
== 0 && rt
== 0) { /* PAUSE */
24136 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24137 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24138 generate_exception_end(ctx
, EXCP_RI
);
24144 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24147 switch ((ctx
->opcode
>> 21) & 0x1f) {
24149 /* rotr is decoded as srl on non-R2 CPUs */
24150 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24155 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24158 generate_exception_end(ctx
, EXCP_RI
);
24166 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24168 case OPC_SLLV
: /* Shifts */
24170 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24173 switch ((ctx
->opcode
>> 6) & 0x1f) {
24175 /* rotrv is decoded as srlv on non-R2 CPUs */
24176 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24181 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24184 generate_exception_end(ctx
, EXCP_RI
);
24188 case OPC_SLT
: /* Set on less than */
24190 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24192 case OPC_AND
: /* Logic*/
24196 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24199 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24201 case OPC_TGE
: /* Traps */
24207 check_insn(ctx
, ISA_MIPS2
);
24208 gen_trap(ctx
, op1
, rs
, rt
, -1);
24210 case OPC_LSA
: /* OPC_PMON */
24211 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24212 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24213 decode_opc_special_r6(env
, ctx
);
24215 /* Pmon entry point, also R4010 selsl */
24216 #ifdef MIPS_STRICT_STANDARD
24217 MIPS_INVAL("PMON / selsl");
24218 generate_exception_end(ctx
, EXCP_RI
);
24220 gen_helper_0e0i(pmon
, sa
);
24225 generate_exception_end(ctx
, EXCP_SYSCALL
);
24228 generate_exception_end(ctx
, EXCP_BREAK
);
24231 check_insn(ctx
, ISA_MIPS2
);
24232 gen_sync(extract32(ctx
->opcode
, 6, 5));
24235 #if defined(TARGET_MIPS64)
24236 /* MIPS64 specific opcodes */
24241 check_insn(ctx
, ISA_MIPS3
);
24242 check_mips_64(ctx
);
24243 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24246 switch ((ctx
->opcode
>> 21) & 0x1f) {
24248 /* drotr is decoded as dsrl on non-R2 CPUs */
24249 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24254 check_insn(ctx
, ISA_MIPS3
);
24255 check_mips_64(ctx
);
24256 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24259 generate_exception_end(ctx
, EXCP_RI
);
24264 switch ((ctx
->opcode
>> 21) & 0x1f) {
24266 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24267 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24272 check_insn(ctx
, ISA_MIPS3
);
24273 check_mips_64(ctx
);
24274 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24277 generate_exception_end(ctx
, EXCP_RI
);
24285 check_insn(ctx
, ISA_MIPS3
);
24286 check_mips_64(ctx
);
24287 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24291 check_insn(ctx
, ISA_MIPS3
);
24292 check_mips_64(ctx
);
24293 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24296 switch ((ctx
->opcode
>> 6) & 0x1f) {
24298 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24299 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24304 check_insn(ctx
, ISA_MIPS3
);
24305 check_mips_64(ctx
);
24306 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24309 generate_exception_end(ctx
, EXCP_RI
);
24314 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24315 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24316 decode_opc_special_r6(env
, ctx
);
24321 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24322 decode_opc_special_r6(env
, ctx
);
24323 } else if (ctx
->insn_flags
& INSN_R5900
) {
24324 decode_opc_special_tx79(env
, ctx
);
24326 decode_opc_special_legacy(env
, ctx
);
24332 #if defined(TARGET_MIPS64)
24336 * MMI (MultiMedia Interface) ASE instructions
24337 * ===========================================
24341 * MMI instructions category: data communication
24342 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24344 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24345 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24346 * PCPYUD PEXEH PEXTLW PPACW
24355 #if !defined(TARGET_MIPS64)
24357 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24358 #define MXU_APTN1_A 0
24359 #define MXU_APTN1_S 1
24361 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24362 #define MXU_APTN2_AA 0
24363 #define MXU_APTN2_AS 1
24364 #define MXU_APTN2_SA 2
24365 #define MXU_APTN2_SS 3
24367 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24368 #define MXU_EPTN2_AA 0
24369 #define MXU_EPTN2_AS 1
24370 #define MXU_EPTN2_SA 2
24371 #define MXU_EPTN2_SS 3
24373 /* MXU operand getting pattern 'optn2' */
24374 #define MXU_OPTN2_PTN0 0
24375 #define MXU_OPTN2_PTN1 1
24376 #define MXU_OPTN2_PTN2 2
24377 #define MXU_OPTN2_PTN3 3
24378 /* alternative naming scheme for 'optn2' */
24379 #define MXU_OPTN2_WW 0
24380 #define MXU_OPTN2_LW 1
24381 #define MXU_OPTN2_HW 2
24382 #define MXU_OPTN2_XW 3
24384 /* MXU operand getting pattern 'optn3' */
24385 #define MXU_OPTN3_PTN0 0
24386 #define MXU_OPTN3_PTN1 1
24387 #define MXU_OPTN3_PTN2 2
24388 #define MXU_OPTN3_PTN3 3
24389 #define MXU_OPTN3_PTN4 4
24390 #define MXU_OPTN3_PTN5 5
24391 #define MXU_OPTN3_PTN6 6
24392 #define MXU_OPTN3_PTN7 7
24396 * S32I2M XRa, rb - Register move from GRF to XRF
24398 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24403 t0
= tcg_temp_new();
24405 XRa
= extract32(ctx
->opcode
, 6, 5);
24406 Rb
= extract32(ctx
->opcode
, 16, 5);
24408 gen_load_gpr(t0
, Rb
);
24410 gen_store_mxu_gpr(t0
, XRa
);
24411 } else if (XRa
== 16) {
24412 gen_store_mxu_cr(t0
);
24419 * S32M2I XRa, rb - Register move from XRF to GRF
24421 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24426 t0
= tcg_temp_new();
24428 XRa
= extract32(ctx
->opcode
, 6, 5);
24429 Rb
= extract32(ctx
->opcode
, 16, 5);
24432 gen_load_mxu_gpr(t0
, XRa
);
24433 } else if (XRa
== 16) {
24434 gen_load_mxu_cr(t0
);
24437 gen_store_gpr(t0
, Rb
);
24443 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24445 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24448 uint32_t XRa
, Rb
, s8
, optn3
;
24450 t0
= tcg_temp_new();
24451 t1
= tcg_temp_new();
24453 XRa
= extract32(ctx
->opcode
, 6, 4);
24454 s8
= extract32(ctx
->opcode
, 10, 8);
24455 optn3
= extract32(ctx
->opcode
, 18, 3);
24456 Rb
= extract32(ctx
->opcode
, 21, 5);
24458 gen_load_gpr(t0
, Rb
);
24459 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24462 /* XRa[7:0] = tmp8 */
24463 case MXU_OPTN3_PTN0
:
24464 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24465 gen_load_mxu_gpr(t0
, XRa
);
24466 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24468 /* XRa[15:8] = tmp8 */
24469 case MXU_OPTN3_PTN1
:
24470 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24471 gen_load_mxu_gpr(t0
, XRa
);
24472 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24474 /* XRa[23:16] = tmp8 */
24475 case MXU_OPTN3_PTN2
:
24476 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24477 gen_load_mxu_gpr(t0
, XRa
);
24478 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24480 /* XRa[31:24] = tmp8 */
24481 case MXU_OPTN3_PTN3
:
24482 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24483 gen_load_mxu_gpr(t0
, XRa
);
24484 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24486 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24487 case MXU_OPTN3_PTN4
:
24488 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24489 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24491 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24492 case MXU_OPTN3_PTN5
:
24493 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24494 tcg_gen_shli_tl(t1
, t1
, 8);
24495 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24497 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24498 case MXU_OPTN3_PTN6
:
24499 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24500 tcg_gen_mov_tl(t0
, t1
);
24501 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24502 tcg_gen_shli_tl(t1
, t1
, 16);
24503 tcg_gen_or_tl(t0
, t0
, t1
);
24505 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24506 case MXU_OPTN3_PTN7
:
24507 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24508 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24509 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24513 gen_store_mxu_gpr(t0
, XRa
);
24520 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24522 static void gen_mxu_d16mul(DisasContext
*ctx
)
24524 TCGv t0
, t1
, t2
, t3
;
24525 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24527 t0
= tcg_temp_new();
24528 t1
= tcg_temp_new();
24529 t2
= tcg_temp_new();
24530 t3
= tcg_temp_new();
24532 XRa
= extract32(ctx
->opcode
, 6, 4);
24533 XRb
= extract32(ctx
->opcode
, 10, 4);
24534 XRc
= extract32(ctx
->opcode
, 14, 4);
24535 XRd
= extract32(ctx
->opcode
, 18, 4);
24536 optn2
= extract32(ctx
->opcode
, 22, 2);
24538 gen_load_mxu_gpr(t1
, XRb
);
24539 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24540 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24541 gen_load_mxu_gpr(t3
, XRc
);
24542 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24543 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24546 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24547 tcg_gen_mul_tl(t3
, t1
, t3
);
24548 tcg_gen_mul_tl(t2
, t0
, t2
);
24550 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24551 tcg_gen_mul_tl(t3
, t0
, t3
);
24552 tcg_gen_mul_tl(t2
, t0
, t2
);
24554 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24555 tcg_gen_mul_tl(t3
, t1
, t3
);
24556 tcg_gen_mul_tl(t2
, t1
, t2
);
24558 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24559 tcg_gen_mul_tl(t3
, t0
, t3
);
24560 tcg_gen_mul_tl(t2
, t1
, t2
);
24563 gen_store_mxu_gpr(t3
, XRa
);
24564 gen_store_mxu_gpr(t2
, XRd
);
24573 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24576 static void gen_mxu_d16mac(DisasContext
*ctx
)
24578 TCGv t0
, t1
, t2
, t3
;
24579 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24581 t0
= tcg_temp_new();
24582 t1
= tcg_temp_new();
24583 t2
= tcg_temp_new();
24584 t3
= tcg_temp_new();
24586 XRa
= extract32(ctx
->opcode
, 6, 4);
24587 XRb
= extract32(ctx
->opcode
, 10, 4);
24588 XRc
= extract32(ctx
->opcode
, 14, 4);
24589 XRd
= extract32(ctx
->opcode
, 18, 4);
24590 optn2
= extract32(ctx
->opcode
, 22, 2);
24591 aptn2
= extract32(ctx
->opcode
, 24, 2);
24593 gen_load_mxu_gpr(t1
, XRb
);
24594 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24595 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24597 gen_load_mxu_gpr(t3
, XRc
);
24598 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24599 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24602 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24603 tcg_gen_mul_tl(t3
, t1
, t3
);
24604 tcg_gen_mul_tl(t2
, t0
, t2
);
24606 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24607 tcg_gen_mul_tl(t3
, t0
, t3
);
24608 tcg_gen_mul_tl(t2
, t0
, t2
);
24610 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24611 tcg_gen_mul_tl(t3
, t1
, t3
);
24612 tcg_gen_mul_tl(t2
, t1
, t2
);
24614 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24615 tcg_gen_mul_tl(t3
, t0
, t3
);
24616 tcg_gen_mul_tl(t2
, t1
, t2
);
24619 gen_load_mxu_gpr(t0
, XRa
);
24620 gen_load_mxu_gpr(t1
, XRd
);
24624 tcg_gen_add_tl(t3
, t0
, t3
);
24625 tcg_gen_add_tl(t2
, t1
, t2
);
24628 tcg_gen_add_tl(t3
, t0
, t3
);
24629 tcg_gen_sub_tl(t2
, t1
, t2
);
24632 tcg_gen_sub_tl(t3
, t0
, t3
);
24633 tcg_gen_add_tl(t2
, t1
, t2
);
24636 tcg_gen_sub_tl(t3
, t0
, t3
);
24637 tcg_gen_sub_tl(t2
, t1
, t2
);
24640 gen_store_mxu_gpr(t3
, XRa
);
24641 gen_store_mxu_gpr(t2
, XRd
);
24650 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24651 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24653 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24655 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24656 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24658 t0
= tcg_temp_new();
24659 t1
= tcg_temp_new();
24660 t2
= tcg_temp_new();
24661 t3
= tcg_temp_new();
24662 t4
= tcg_temp_new();
24663 t5
= tcg_temp_new();
24664 t6
= tcg_temp_new();
24665 t7
= tcg_temp_new();
24667 XRa
= extract32(ctx
->opcode
, 6, 4);
24668 XRb
= extract32(ctx
->opcode
, 10, 4);
24669 XRc
= extract32(ctx
->opcode
, 14, 4);
24670 XRd
= extract32(ctx
->opcode
, 18, 4);
24671 sel
= extract32(ctx
->opcode
, 22, 2);
24673 gen_load_mxu_gpr(t3
, XRb
);
24674 gen_load_mxu_gpr(t7
, XRc
);
24678 tcg_gen_ext8s_tl(t0
, t3
);
24679 tcg_gen_shri_tl(t3
, t3
, 8);
24680 tcg_gen_ext8s_tl(t1
, t3
);
24681 tcg_gen_shri_tl(t3
, t3
, 8);
24682 tcg_gen_ext8s_tl(t2
, t3
);
24683 tcg_gen_shri_tl(t3
, t3
, 8);
24684 tcg_gen_ext8s_tl(t3
, t3
);
24687 tcg_gen_ext8u_tl(t0
, t3
);
24688 tcg_gen_shri_tl(t3
, t3
, 8);
24689 tcg_gen_ext8u_tl(t1
, t3
);
24690 tcg_gen_shri_tl(t3
, t3
, 8);
24691 tcg_gen_ext8u_tl(t2
, t3
);
24692 tcg_gen_shri_tl(t3
, t3
, 8);
24693 tcg_gen_ext8u_tl(t3
, t3
);
24696 tcg_gen_ext8u_tl(t4
, t7
);
24697 tcg_gen_shri_tl(t7
, t7
, 8);
24698 tcg_gen_ext8u_tl(t5
, t7
);
24699 tcg_gen_shri_tl(t7
, t7
, 8);
24700 tcg_gen_ext8u_tl(t6
, t7
);
24701 tcg_gen_shri_tl(t7
, t7
, 8);
24702 tcg_gen_ext8u_tl(t7
, t7
);
24704 tcg_gen_mul_tl(t0
, t0
, t4
);
24705 tcg_gen_mul_tl(t1
, t1
, t5
);
24706 tcg_gen_mul_tl(t2
, t2
, t6
);
24707 tcg_gen_mul_tl(t3
, t3
, t7
);
24709 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24710 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24711 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24712 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24714 tcg_gen_shli_tl(t1
, t1
, 16);
24715 tcg_gen_shli_tl(t3
, t3
, 16);
24717 tcg_gen_or_tl(t0
, t0
, t1
);
24718 tcg_gen_or_tl(t1
, t2
, t3
);
24720 gen_store_mxu_gpr(t0
, XRd
);
24721 gen_store_mxu_gpr(t1
, XRa
);
24734 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24735 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24737 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24740 uint32_t XRa
, Rb
, s12
, sel
;
24742 t0
= tcg_temp_new();
24743 t1
= tcg_temp_new();
24745 XRa
= extract32(ctx
->opcode
, 6, 4);
24746 s12
= extract32(ctx
->opcode
, 10, 10);
24747 sel
= extract32(ctx
->opcode
, 20, 1);
24748 Rb
= extract32(ctx
->opcode
, 21, 5);
24750 gen_load_gpr(t0
, Rb
);
24752 tcg_gen_movi_tl(t1
, s12
);
24753 tcg_gen_shli_tl(t1
, t1
, 2);
24755 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24757 tcg_gen_add_tl(t1
, t0
, t1
);
24758 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24762 tcg_gen_bswap32_tl(t1
, t1
);
24764 gen_store_mxu_gpr(t1
, XRa
);
24772 * MXU instruction category: logic
24773 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24775 * S32NOR S32AND S32OR S32XOR
24779 * S32NOR XRa, XRb, XRc
24780 * Update XRa with the result of logical bitwise 'nor' operation
24781 * applied to the content of XRb and XRc.
24783 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24784 * +-----------+---------+-----+-------+-------+-------+-----------+
24785 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24786 * +-----------+---------+-----+-------+-------+-------+-----------+
24788 static void gen_mxu_S32NOR(DisasContext
*ctx
)
24790 uint32_t pad
, XRc
, XRb
, XRa
;
24792 pad
= extract32(ctx
->opcode
, 21, 5);
24793 XRc
= extract32(ctx
->opcode
, 14, 4);
24794 XRb
= extract32(ctx
->opcode
, 10, 4);
24795 XRa
= extract32(ctx
->opcode
, 6, 4);
24797 if (unlikely(pad
!= 0)) {
24798 /* opcode padding incorrect -> do nothing */
24799 } else if (unlikely(XRa
== 0)) {
24800 /* destination is zero register -> do nothing */
24801 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24802 /* both operands zero registers -> just set destination to all 1s */
24803 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
24804 } else if (unlikely(XRb
== 0)) {
24805 /* XRb zero register -> just set destination to the negation of XRc */
24806 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24807 } else if (unlikely(XRc
== 0)) {
24808 /* XRa zero register -> just set destination to the negation of XRb */
24809 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24810 } else if (unlikely(XRb
== XRc
)) {
24811 /* both operands same -> just set destination to the negation of XRb */
24812 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24814 /* the most general case */
24815 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24820 * S32AND XRa, XRb, XRc
24821 * Update XRa with the result of logical bitwise 'and' operation
24822 * applied to the content of XRb and XRc.
24824 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24825 * +-----------+---------+-----+-------+-------+-------+-----------+
24826 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24827 * +-----------+---------+-----+-------+-------+-------+-----------+
24829 static void gen_mxu_S32AND(DisasContext
*ctx
)
24831 uint32_t pad
, XRc
, XRb
, XRa
;
24833 pad
= extract32(ctx
->opcode
, 21, 5);
24834 XRc
= extract32(ctx
->opcode
, 14, 4);
24835 XRb
= extract32(ctx
->opcode
, 10, 4);
24836 XRa
= extract32(ctx
->opcode
, 6, 4);
24838 if (unlikely(pad
!= 0)) {
24839 /* opcode padding incorrect -> do nothing */
24840 } else if (unlikely(XRa
== 0)) {
24841 /* destination is zero register -> do nothing */
24842 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24843 /* one of operands zero register -> just set destination to all 0s */
24844 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24845 } else if (unlikely(XRb
== XRc
)) {
24846 /* both operands same -> just set destination to one of them */
24847 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24849 /* the most general case */
24850 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24855 * S32OR XRa, XRb, XRc
24856 * Update XRa with the result of logical bitwise 'or' operation
24857 * applied to the content of XRb and XRc.
24859 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24860 * +-----------+---------+-----+-------+-------+-------+-----------+
24861 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24862 * +-----------+---------+-----+-------+-------+-------+-----------+
24864 static void gen_mxu_S32OR(DisasContext
*ctx
)
24866 uint32_t pad
, XRc
, XRb
, XRa
;
24868 pad
= extract32(ctx
->opcode
, 21, 5);
24869 XRc
= extract32(ctx
->opcode
, 14, 4);
24870 XRb
= extract32(ctx
->opcode
, 10, 4);
24871 XRa
= extract32(ctx
->opcode
, 6, 4);
24873 if (unlikely(pad
!= 0)) {
24874 /* opcode padding incorrect -> do nothing */
24875 } else if (unlikely(XRa
== 0)) {
24876 /* destination is zero register -> do nothing */
24877 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24878 /* both operands zero registers -> just set destination to all 0s */
24879 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24880 } else if (unlikely(XRb
== 0)) {
24881 /* XRb zero register -> just set destination to the content of XRc */
24882 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24883 } else if (unlikely(XRc
== 0)) {
24884 /* XRc zero register -> just set destination to the content of XRb */
24885 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24886 } else if (unlikely(XRb
== XRc
)) {
24887 /* both operands same -> just set destination to one of them */
24888 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24890 /* the most general case */
24891 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24896 * S32XOR XRa, XRb, XRc
24897 * Update XRa with the result of logical bitwise 'xor' operation
24898 * applied to the content of XRb and XRc.
24900 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24901 * +-----------+---------+-----+-------+-------+-------+-----------+
24902 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24903 * +-----------+---------+-----+-------+-------+-------+-----------+
24905 static void gen_mxu_S32XOR(DisasContext
*ctx
)
24907 uint32_t pad
, XRc
, XRb
, XRa
;
24909 pad
= extract32(ctx
->opcode
, 21, 5);
24910 XRc
= extract32(ctx
->opcode
, 14, 4);
24911 XRb
= extract32(ctx
->opcode
, 10, 4);
24912 XRa
= extract32(ctx
->opcode
, 6, 4);
24914 if (unlikely(pad
!= 0)) {
24915 /* opcode padding incorrect -> do nothing */
24916 } else if (unlikely(XRa
== 0)) {
24917 /* destination is zero register -> do nothing */
24918 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24919 /* both operands zero registers -> just set destination to all 0s */
24920 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24921 } else if (unlikely(XRb
== 0)) {
24922 /* XRb zero register -> just set destination to the content of XRc */
24923 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24924 } else if (unlikely(XRc
== 0)) {
24925 /* XRc zero register -> just set destination to the content of XRb */
24926 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24927 } else if (unlikely(XRb
== XRc
)) {
24928 /* both operands same -> just set destination to all 0s */
24929 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24931 /* the most general case */
24932 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24938 * MXU instruction category max/min
24939 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24941 * S32MAX D16MAX Q8MAX
24942 * S32MIN D16MIN Q8MIN
24946 * S32MAX XRa, XRb, XRc
24947 * Update XRa with the maximum of signed 32-bit integers contained
24950 * S32MIN XRa, XRb, XRc
24951 * Update XRa with the minimum of signed 32-bit integers contained
24954 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24955 * +-----------+---------+-----+-------+-------+-------+-----------+
24956 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24957 * +-----------+---------+-----+-------+-------+-------+-----------+
24959 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
24961 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24963 pad
= extract32(ctx
->opcode
, 21, 5);
24964 opc
= extract32(ctx
->opcode
, 18, 3);
24965 XRc
= extract32(ctx
->opcode
, 14, 4);
24966 XRb
= extract32(ctx
->opcode
, 10, 4);
24967 XRa
= extract32(ctx
->opcode
, 6, 4);
24969 if (unlikely(pad
!= 0)) {
24970 /* opcode padding incorrect -> do nothing */
24971 } else if (unlikely(XRa
== 0)) {
24972 /* destination is zero register -> do nothing */
24973 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24974 /* both operands zero registers -> just set destination to zero */
24975 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24976 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24977 /* exactly one operand is zero register - find which one is not...*/
24978 uint32_t XRx
= XRb
? XRb
: XRc
;
24979 /* ...and do max/min operation with one operand 0 */
24980 if (opc
== OPC_MXU_S32MAX
) {
24981 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24983 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24985 } else if (unlikely(XRb
== XRc
)) {
24986 /* both operands same -> just set destination to one of them */
24987 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24989 /* the most general case */
24990 if (opc
== OPC_MXU_S32MAX
) {
24991 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
24994 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25002 * Update XRa with the 16-bit-wise maximums of signed integers
25003 * contained in XRb and XRc.
25006 * Update XRa with the 16-bit-wise minimums of signed integers
25007 * contained in XRb and XRc.
25009 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25010 * +-----------+---------+-----+-------+-------+-------+-----------+
25011 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25012 * +-----------+---------+-----+-------+-------+-------+-----------+
25014 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25016 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25018 pad
= extract32(ctx
->opcode
, 21, 5);
25019 opc
= extract32(ctx
->opcode
, 18, 3);
25020 XRc
= extract32(ctx
->opcode
, 14, 4);
25021 XRb
= extract32(ctx
->opcode
, 10, 4);
25022 XRa
= extract32(ctx
->opcode
, 6, 4);
25024 if (unlikely(pad
!= 0)) {
25025 /* opcode padding incorrect -> do nothing */
25026 } else if (unlikely(XRc
== 0)) {
25027 /* destination is zero register -> do nothing */
25028 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25029 /* both operands zero registers -> just set destination to zero */
25030 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25031 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25032 /* exactly one operand is zero register - find which one is not...*/
25033 uint32_t XRx
= XRb
? XRb
: XRc
;
25034 /* ...and do half-word-wise max/min with one operand 0 */
25035 TCGv_i32 t0
= tcg_temp_new();
25036 TCGv_i32 t1
= tcg_const_i32(0);
25038 /* the left half-word first */
25039 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25040 if (opc
== OPC_MXU_D16MAX
) {
25041 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25043 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25046 /* the right half-word */
25047 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25048 /* move half-words to the leftmost position */
25049 tcg_gen_shli_i32(t0
, t0
, 16);
25050 /* t0 will be max/min of t0 and t1 */
25051 if (opc
== OPC_MXU_D16MAX
) {
25052 tcg_gen_smax_i32(t0
, t0
, t1
);
25054 tcg_gen_smin_i32(t0
, t0
, t1
);
25056 /* return resulting half-words to its original position */
25057 tcg_gen_shri_i32(t0
, t0
, 16);
25058 /* finaly update the destination */
25059 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25063 } else if (unlikely(XRb
== XRc
)) {
25064 /* both operands same -> just set destination to one of them */
25065 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25067 /* the most general case */
25068 TCGv_i32 t0
= tcg_temp_new();
25069 TCGv_i32 t1
= tcg_temp_new();
25071 /* the left half-word first */
25072 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25073 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25074 if (opc
== OPC_MXU_D16MAX
) {
25075 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25077 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25080 /* the right half-word */
25081 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25082 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25083 /* move half-words to the leftmost position */
25084 tcg_gen_shli_i32(t0
, t0
, 16);
25085 tcg_gen_shli_i32(t1
, t1
, 16);
25086 /* t0 will be max/min of t0 and t1 */
25087 if (opc
== OPC_MXU_D16MAX
) {
25088 tcg_gen_smax_i32(t0
, t0
, t1
);
25090 tcg_gen_smin_i32(t0
, t0
, t1
);
25092 /* return resulting half-words to its original position */
25093 tcg_gen_shri_i32(t0
, t0
, 16);
25094 /* finaly update the destination */
25095 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25104 * Update XRa with the 8-bit-wise maximums of signed integers
25105 * contained in XRb and XRc.
25108 * Update XRa with the 8-bit-wise minimums of signed integers
25109 * contained in XRb and XRc.
25111 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25112 * +-----------+---------+-----+-------+-------+-------+-----------+
25113 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25114 * +-----------+---------+-----+-------+-------+-------+-----------+
25116 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25118 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25120 pad
= extract32(ctx
->opcode
, 21, 5);
25121 opc
= extract32(ctx
->opcode
, 18, 3);
25122 XRc
= extract32(ctx
->opcode
, 14, 4);
25123 XRb
= extract32(ctx
->opcode
, 10, 4);
25124 XRa
= extract32(ctx
->opcode
, 6, 4);
25126 if (unlikely(pad
!= 0)) {
25127 /* opcode padding incorrect -> do nothing */
25128 } else if (unlikely(XRa
== 0)) {
25129 /* destination is zero register -> do nothing */
25130 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25131 /* both operands zero registers -> just set destination to zero */
25132 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25133 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25134 /* exactly one operand is zero register - make it be the first...*/
25135 uint32_t XRx
= XRb
? XRb
: XRc
;
25136 /* ...and do byte-wise max/min with one operand 0 */
25137 TCGv_i32 t0
= tcg_temp_new();
25138 TCGv_i32 t1
= tcg_const_i32(0);
25141 /* the leftmost byte (byte 3) first */
25142 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25143 if (opc
== OPC_MXU_Q8MAX
) {
25144 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25146 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25149 /* bytes 2, 1, 0 */
25150 for (i
= 2; i
>= 0; i
--) {
25151 /* extract the byte */
25152 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25153 /* move the byte to the leftmost position */
25154 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25155 /* t0 will be max/min of t0 and t1 */
25156 if (opc
== OPC_MXU_Q8MAX
) {
25157 tcg_gen_smax_i32(t0
, t0
, t1
);
25159 tcg_gen_smin_i32(t0
, t0
, t1
);
25161 /* return resulting byte to its original position */
25162 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25163 /* finaly update the destination */
25164 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25169 } else if (unlikely(XRb
== XRc
)) {
25170 /* both operands same -> just set destination to one of them */
25171 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25173 /* the most general case */
25174 TCGv_i32 t0
= tcg_temp_new();
25175 TCGv_i32 t1
= tcg_temp_new();
25178 /* the leftmost bytes (bytes 3) first */
25179 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25180 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25181 if (opc
== OPC_MXU_Q8MAX
) {
25182 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25184 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25187 /* bytes 2, 1, 0 */
25188 for (i
= 2; i
>= 0; i
--) {
25189 /* extract corresponding bytes */
25190 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25191 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25192 /* move the bytes to the leftmost position */
25193 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25194 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25195 /* t0 will be max/min of t0 and t1 */
25196 if (opc
== OPC_MXU_Q8MAX
) {
25197 tcg_gen_smax_i32(t0
, t0
, t1
);
25199 tcg_gen_smin_i32(t0
, t0
, t1
);
25201 /* return resulting byte to its original position */
25202 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25203 /* finaly update the destination */
25204 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25214 * MXU instruction category: align
25215 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25221 * S32ALNI XRc, XRb, XRa, optn3
25222 * Arrange bytes from XRb and XRc according to one of five sets of
25223 * rules determined by optn3, and place the result in XRa.
25225 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25226 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25227 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25228 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25231 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25233 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25235 optn3
= extract32(ctx
->opcode
, 23, 3);
25236 pad
= extract32(ctx
->opcode
, 21, 2);
25237 XRc
= extract32(ctx
->opcode
, 14, 4);
25238 XRb
= extract32(ctx
->opcode
, 10, 4);
25239 XRa
= extract32(ctx
->opcode
, 6, 4);
25241 if (unlikely(pad
!= 0)) {
25242 /* opcode padding incorrect -> do nothing */
25243 } else if (unlikely(XRa
== 0)) {
25244 /* destination is zero register -> do nothing */
25245 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25246 /* both operands zero registers -> just set destination to all 0s */
25247 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25248 } else if (unlikely(XRb
== 0)) {
25249 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25251 case MXU_OPTN3_PTN0
:
25252 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25254 case MXU_OPTN3_PTN1
:
25255 case MXU_OPTN3_PTN2
:
25256 case MXU_OPTN3_PTN3
:
25257 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25260 case MXU_OPTN3_PTN4
:
25261 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25264 } else if (unlikely(XRc
== 0)) {
25265 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25267 case MXU_OPTN3_PTN0
:
25268 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25270 case MXU_OPTN3_PTN1
:
25271 case MXU_OPTN3_PTN2
:
25272 case MXU_OPTN3_PTN3
:
25273 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25275 case MXU_OPTN3_PTN4
:
25276 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25279 } else if (unlikely(XRb
== XRc
)) {
25280 /* both operands same -> just rotation or moving from any of them */
25282 case MXU_OPTN3_PTN0
:
25283 case MXU_OPTN3_PTN4
:
25284 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25286 case MXU_OPTN3_PTN1
:
25287 case MXU_OPTN3_PTN2
:
25288 case MXU_OPTN3_PTN3
:
25289 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25293 /* the most general case */
25295 case MXU_OPTN3_PTN0
:
25299 /* +---------------+ */
25300 /* | A B C D | E F G H */
25301 /* +-------+-------+ */
25306 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25309 case MXU_OPTN3_PTN1
:
25313 /* +-------------------+ */
25314 /* A | B C D E | F G H */
25315 /* +---------+---------+ */
25320 TCGv_i32 t0
= tcg_temp_new();
25321 TCGv_i32 t1
= tcg_temp_new();
25323 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25324 tcg_gen_shli_i32(t0
, t0
, 8);
25326 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25327 tcg_gen_shri_i32(t1
, t1
, 24);
25329 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25335 case MXU_OPTN3_PTN2
:
25339 /* +-------------------+ */
25340 /* A B | C D E F | G H */
25341 /* +---------+---------+ */
25346 TCGv_i32 t0
= tcg_temp_new();
25347 TCGv_i32 t1
= tcg_temp_new();
25349 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25350 tcg_gen_shli_i32(t0
, t0
, 16);
25352 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25353 tcg_gen_shri_i32(t1
, t1
, 16);
25355 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25361 case MXU_OPTN3_PTN3
:
25365 /* +-------------------+ */
25366 /* A B C | D E F G | H */
25367 /* +---------+---------+ */
25372 TCGv_i32 t0
= tcg_temp_new();
25373 TCGv_i32 t1
= tcg_temp_new();
25375 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25376 tcg_gen_shli_i32(t0
, t0
, 24);
25378 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25379 tcg_gen_shri_i32(t1
, t1
, 8);
25381 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25387 case MXU_OPTN3_PTN4
:
25391 /* +---------------+ */
25392 /* A B C D | E F G H | */
25393 /* +-------+-------+ */
25398 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25407 * Decoding engine for MXU
25408 * =======================
25413 * Decode MXU pool00
25415 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25416 * +-----------+---------+-----+-------+-------+-------+-----------+
25417 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25418 * +-----------+---------+-----+-------+-------+-------+-----------+
25421 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25423 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25426 case OPC_MXU_S32MAX
:
25427 case OPC_MXU_S32MIN
:
25428 gen_mxu_S32MAX_S32MIN(ctx
);
25430 case OPC_MXU_D16MAX
:
25431 case OPC_MXU_D16MIN
:
25432 gen_mxu_D16MAX_D16MIN(ctx
);
25434 case OPC_MXU_Q8MAX
:
25435 case OPC_MXU_Q8MIN
:
25436 gen_mxu_Q8MAX_Q8MIN(ctx
);
25438 case OPC_MXU_Q8SLT
:
25439 /* TODO: Implement emulation of Q8SLT instruction. */
25440 MIPS_INVAL("OPC_MXU_Q8SLT");
25441 generate_exception_end(ctx
, EXCP_RI
);
25443 case OPC_MXU_Q8SLTU
:
25444 /* TODO: Implement emulation of Q8SLTU instruction. */
25445 MIPS_INVAL("OPC_MXU_Q8SLTU");
25446 generate_exception_end(ctx
, EXCP_RI
);
25449 MIPS_INVAL("decode_opc_mxu");
25450 generate_exception_end(ctx
, EXCP_RI
);
25457 * Decode MXU pool01
25459 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25460 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25461 * +-----------+---------+-----+-------+-------+-------+-----------+
25462 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25463 * +-----------+---------+-----+-------+-------+-------+-----------+
25466 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25467 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25468 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25469 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25472 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25474 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25477 case OPC_MXU_S32SLT
:
25478 /* TODO: Implement emulation of S32SLT instruction. */
25479 MIPS_INVAL("OPC_MXU_S32SLT");
25480 generate_exception_end(ctx
, EXCP_RI
);
25482 case OPC_MXU_D16SLT
:
25483 /* TODO: Implement emulation of D16SLT instruction. */
25484 MIPS_INVAL("OPC_MXU_D16SLT");
25485 generate_exception_end(ctx
, EXCP_RI
);
25487 case OPC_MXU_D16AVG
:
25488 /* TODO: Implement emulation of D16AVG instruction. */
25489 MIPS_INVAL("OPC_MXU_D16AVG");
25490 generate_exception_end(ctx
, EXCP_RI
);
25492 case OPC_MXU_D16AVGR
:
25493 /* TODO: Implement emulation of D16AVGR instruction. */
25494 MIPS_INVAL("OPC_MXU_D16AVGR");
25495 generate_exception_end(ctx
, EXCP_RI
);
25497 case OPC_MXU_Q8AVG
:
25498 /* TODO: Implement emulation of Q8AVG instruction. */
25499 MIPS_INVAL("OPC_MXU_Q8AVG");
25500 generate_exception_end(ctx
, EXCP_RI
);
25502 case OPC_MXU_Q8AVGR
:
25503 /* TODO: Implement emulation of Q8AVGR instruction. */
25504 MIPS_INVAL("OPC_MXU_Q8AVGR");
25505 generate_exception_end(ctx
, EXCP_RI
);
25507 case OPC_MXU_Q8ADD
:
25508 /* TODO: Implement emulation of Q8ADD instruction. */
25509 MIPS_INVAL("OPC_MXU_Q8ADD");
25510 generate_exception_end(ctx
, EXCP_RI
);
25513 MIPS_INVAL("decode_opc_mxu");
25514 generate_exception_end(ctx
, EXCP_RI
);
25521 * Decode MXU pool02
25523 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25524 * +-----------+---------+-----+-------+-------+-------+-----------+
25525 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25526 * +-----------+---------+-----+-------+-------+-------+-----------+
25529 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25531 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25534 case OPC_MXU_S32CPS
:
25535 /* TODO: Implement emulation of S32CPS instruction. */
25536 MIPS_INVAL("OPC_MXU_S32CPS");
25537 generate_exception_end(ctx
, EXCP_RI
);
25539 case OPC_MXU_D16CPS
:
25540 /* TODO: Implement emulation of D16CPS instruction. */
25541 MIPS_INVAL("OPC_MXU_D16CPS");
25542 generate_exception_end(ctx
, EXCP_RI
);
25544 case OPC_MXU_Q8ABD
:
25545 /* TODO: Implement emulation of Q8ABD instruction. */
25546 MIPS_INVAL("OPC_MXU_Q8ABD");
25547 generate_exception_end(ctx
, EXCP_RI
);
25549 case OPC_MXU_Q16SAT
:
25550 /* TODO: Implement emulation of Q16SAT instruction. */
25551 MIPS_INVAL("OPC_MXU_Q16SAT");
25552 generate_exception_end(ctx
, EXCP_RI
);
25555 MIPS_INVAL("decode_opc_mxu");
25556 generate_exception_end(ctx
, EXCP_RI
);
25563 * Decode MXU pool03
25566 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25567 * +-----------+---+---+-------+-------+-------+-------+-----------+
25568 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25569 * +-----------+---+---+-------+-------+-------+-------+-----------+
25572 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25573 * +-----------+---+---+-------+-------+-------+-------+-----------+
25574 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25575 * +-----------+---+---+-------+-------+-------+-------+-----------+
25578 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25580 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25583 case OPC_MXU_D16MULF
:
25584 /* TODO: Implement emulation of D16MULF instruction. */
25585 MIPS_INVAL("OPC_MXU_D16MULF");
25586 generate_exception_end(ctx
, EXCP_RI
);
25588 case OPC_MXU_D16MULE
:
25589 /* TODO: Implement emulation of D16MULE instruction. */
25590 MIPS_INVAL("OPC_MXU_D16MULE");
25591 generate_exception_end(ctx
, EXCP_RI
);
25594 MIPS_INVAL("decode_opc_mxu");
25595 generate_exception_end(ctx
, EXCP_RI
);
25602 * Decode MXU pool04
25604 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25605 * +-----------+---------+-+-------------------+-------+-----------+
25606 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25607 * +-----------+---------+-+-------------------+-------+-----------+
25610 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25612 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25615 case OPC_MXU_S32LDD
:
25616 case OPC_MXU_S32LDDR
:
25617 gen_mxu_s32ldd_s32lddr(ctx
);
25620 MIPS_INVAL("decode_opc_mxu");
25621 generate_exception_end(ctx
, EXCP_RI
);
25628 * Decode MXU pool05
25630 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25631 * +-----------+---------+-+-------------------+-------+-----------+
25632 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25633 * +-----------+---------+-+-------------------+-------+-----------+
25636 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25638 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25641 case OPC_MXU_S32STD
:
25642 /* TODO: Implement emulation of S32STD instruction. */
25643 MIPS_INVAL("OPC_MXU_S32STD");
25644 generate_exception_end(ctx
, EXCP_RI
);
25646 case OPC_MXU_S32STDR
:
25647 /* TODO: Implement emulation of S32STDR instruction. */
25648 MIPS_INVAL("OPC_MXU_S32STDR");
25649 generate_exception_end(ctx
, EXCP_RI
);
25652 MIPS_INVAL("decode_opc_mxu");
25653 generate_exception_end(ctx
, EXCP_RI
);
25660 * Decode MXU pool06
25662 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25663 * +-----------+---------+---------+---+-------+-------+-----------+
25664 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25665 * +-----------+---------+---------+---+-------+-------+-----------+
25668 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25670 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25673 case OPC_MXU_S32LDDV
:
25674 /* TODO: Implement emulation of S32LDDV instruction. */
25675 MIPS_INVAL("OPC_MXU_S32LDDV");
25676 generate_exception_end(ctx
, EXCP_RI
);
25678 case OPC_MXU_S32LDDVR
:
25679 /* TODO: Implement emulation of S32LDDVR instruction. */
25680 MIPS_INVAL("OPC_MXU_S32LDDVR");
25681 generate_exception_end(ctx
, EXCP_RI
);
25684 MIPS_INVAL("decode_opc_mxu");
25685 generate_exception_end(ctx
, EXCP_RI
);
25692 * Decode MXU pool07
25694 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25695 * +-----------+---------+---------+---+-------+-------+-----------+
25696 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
25697 * +-----------+---------+---------+---+-------+-------+-----------+
25700 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
25702 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25705 case OPC_MXU_S32STDV
:
25706 /* TODO: Implement emulation of S32TDV instruction. */
25707 MIPS_INVAL("OPC_MXU_S32TDV");
25708 generate_exception_end(ctx
, EXCP_RI
);
25710 case OPC_MXU_S32STDVR
:
25711 /* TODO: Implement emulation of S32TDVR instruction. */
25712 MIPS_INVAL("OPC_MXU_S32TDVR");
25713 generate_exception_end(ctx
, EXCP_RI
);
25716 MIPS_INVAL("decode_opc_mxu");
25717 generate_exception_end(ctx
, EXCP_RI
);
25724 * Decode MXU pool08
25726 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25727 * +-----------+---------+-+-------------------+-------+-----------+
25728 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
25729 * +-----------+---------+-+-------------------+-------+-----------+
25732 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
25734 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25737 case OPC_MXU_S32LDI
:
25738 /* TODO: Implement emulation of S32LDI instruction. */
25739 MIPS_INVAL("OPC_MXU_S32LDI");
25740 generate_exception_end(ctx
, EXCP_RI
);
25742 case OPC_MXU_S32LDIR
:
25743 /* TODO: Implement emulation of S32LDIR instruction. */
25744 MIPS_INVAL("OPC_MXU_S32LDIR");
25745 generate_exception_end(ctx
, EXCP_RI
);
25748 MIPS_INVAL("decode_opc_mxu");
25749 generate_exception_end(ctx
, EXCP_RI
);
25756 * Decode MXU pool09
25758 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25759 * +-----------+---------+-+-------------------+-------+-----------+
25760 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25761 * +-----------+---------+-+-------------------+-------+-----------+
25764 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25766 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25769 case OPC_MXU_S32SDI
:
25770 /* TODO: Implement emulation of S32SDI instruction. */
25771 MIPS_INVAL("OPC_MXU_S32SDI");
25772 generate_exception_end(ctx
, EXCP_RI
);
25774 case OPC_MXU_S32SDIR
:
25775 /* TODO: Implement emulation of S32SDIR instruction. */
25776 MIPS_INVAL("OPC_MXU_S32SDIR");
25777 generate_exception_end(ctx
, EXCP_RI
);
25780 MIPS_INVAL("decode_opc_mxu");
25781 generate_exception_end(ctx
, EXCP_RI
);
25788 * Decode MXU pool10
25790 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25791 * +-----------+---------+---------+---+-------+-------+-----------+
25792 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
25793 * +-----------+---------+---------+---+-------+-------+-----------+
25796 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
25798 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25801 case OPC_MXU_S32LDIV
:
25802 /* TODO: Implement emulation of S32LDIV instruction. */
25803 MIPS_INVAL("OPC_MXU_S32LDIV");
25804 generate_exception_end(ctx
, EXCP_RI
);
25806 case OPC_MXU_S32LDIVR
:
25807 /* TODO: Implement emulation of S32LDIVR instruction. */
25808 MIPS_INVAL("OPC_MXU_S32LDIVR");
25809 generate_exception_end(ctx
, EXCP_RI
);
25812 MIPS_INVAL("decode_opc_mxu");
25813 generate_exception_end(ctx
, EXCP_RI
);
25820 * Decode MXU pool11
25822 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25823 * +-----------+---------+---------+---+-------+-------+-----------+
25824 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
25825 * +-----------+---------+---------+---+-------+-------+-----------+
25828 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
25830 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25833 case OPC_MXU_S32SDIV
:
25834 /* TODO: Implement emulation of S32SDIV instruction. */
25835 MIPS_INVAL("OPC_MXU_S32SDIV");
25836 generate_exception_end(ctx
, EXCP_RI
);
25838 case OPC_MXU_S32SDIVR
:
25839 /* TODO: Implement emulation of S32SDIVR instruction. */
25840 MIPS_INVAL("OPC_MXU_S32SDIVR");
25841 generate_exception_end(ctx
, EXCP_RI
);
25844 MIPS_INVAL("decode_opc_mxu");
25845 generate_exception_end(ctx
, EXCP_RI
);
25852 * Decode MXU pool12
25854 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25855 * +-----------+---+---+-------+-------+-------+-------+-----------+
25856 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
25857 * +-----------+---+---+-------+-------+-------+-------+-----------+
25860 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
25862 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25865 case OPC_MXU_D32ACC
:
25866 /* TODO: Implement emulation of D32ACC instruction. */
25867 MIPS_INVAL("OPC_MXU_D32ACC");
25868 generate_exception_end(ctx
, EXCP_RI
);
25870 case OPC_MXU_D32ACCM
:
25871 /* TODO: Implement emulation of D32ACCM instruction. */
25872 MIPS_INVAL("OPC_MXU_D32ACCM");
25873 generate_exception_end(ctx
, EXCP_RI
);
25875 case OPC_MXU_D32ASUM
:
25876 /* TODO: Implement emulation of D32ASUM instruction. */
25877 MIPS_INVAL("OPC_MXU_D32ASUM");
25878 generate_exception_end(ctx
, EXCP_RI
);
25881 MIPS_INVAL("decode_opc_mxu");
25882 generate_exception_end(ctx
, EXCP_RI
);
25889 * Decode MXU pool13
25891 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25892 * +-----------+---+---+-------+-------+-------+-------+-----------+
25893 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
25894 * +-----------+---+---+-------+-------+-------+-------+-----------+
25897 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
25899 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25902 case OPC_MXU_Q16ACC
:
25903 /* TODO: Implement emulation of Q16ACC instruction. */
25904 MIPS_INVAL("OPC_MXU_Q16ACC");
25905 generate_exception_end(ctx
, EXCP_RI
);
25907 case OPC_MXU_Q16ACCM
:
25908 /* TODO: Implement emulation of Q16ACCM instruction. */
25909 MIPS_INVAL("OPC_MXU_Q16ACCM");
25910 generate_exception_end(ctx
, EXCP_RI
);
25912 case OPC_MXU_Q16ASUM
:
25913 /* TODO: Implement emulation of Q16ASUM instruction. */
25914 MIPS_INVAL("OPC_MXU_Q16ASUM");
25915 generate_exception_end(ctx
, EXCP_RI
);
25918 MIPS_INVAL("decode_opc_mxu");
25919 generate_exception_end(ctx
, EXCP_RI
);
25926 * Decode MXU pool14
25929 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25930 * +-----------+---+---+-------+-------+-------+-------+-----------+
25931 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
25932 * +-----------+---+---+-------+-------+-------+-------+-----------+
25935 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25936 * +-----------+---+---+-------+-------+-------+-------+-----------+
25937 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
25938 * +-----------+---+---+-------+-------+-------+-------+-----------+
25941 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
25943 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25946 case OPC_MXU_Q8ADDE
:
25947 /* TODO: Implement emulation of Q8ADDE instruction. */
25948 MIPS_INVAL("OPC_MXU_Q8ADDE");
25949 generate_exception_end(ctx
, EXCP_RI
);
25951 case OPC_MXU_D8SUM
:
25952 /* TODO: Implement emulation of D8SUM instruction. */
25953 MIPS_INVAL("OPC_MXU_D8SUM");
25954 generate_exception_end(ctx
, EXCP_RI
);
25956 case OPC_MXU_D8SUMC
:
25957 /* TODO: Implement emulation of D8SUMC instruction. */
25958 MIPS_INVAL("OPC_MXU_D8SUMC");
25959 generate_exception_end(ctx
, EXCP_RI
);
25962 MIPS_INVAL("decode_opc_mxu");
25963 generate_exception_end(ctx
, EXCP_RI
);
25970 * Decode MXU pool15
25972 * S32MUL, S32MULU, S32EXTRV:
25973 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25974 * +-----------+---------+---------+---+-------+-------+-----------+
25975 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
25976 * +-----------+---------+---------+---+-------+-------+-----------+
25979 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25980 * +-----------+---------+---------+---+-------+-------+-----------+
25981 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
25982 * +-----------+---------+---------+---+-------+-------+-----------+
25985 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
25987 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
25990 case OPC_MXU_S32MUL
:
25991 /* TODO: Implement emulation of S32MUL instruction. */
25992 MIPS_INVAL("OPC_MXU_S32MUL");
25993 generate_exception_end(ctx
, EXCP_RI
);
25995 case OPC_MXU_S32MULU
:
25996 /* TODO: Implement emulation of S32MULU instruction. */
25997 MIPS_INVAL("OPC_MXU_S32MULU");
25998 generate_exception_end(ctx
, EXCP_RI
);
26000 case OPC_MXU_S32EXTR
:
26001 /* TODO: Implement emulation of S32EXTR instruction. */
26002 MIPS_INVAL("OPC_MXU_S32EXTR");
26003 generate_exception_end(ctx
, EXCP_RI
);
26005 case OPC_MXU_S32EXTRV
:
26006 /* TODO: Implement emulation of S32EXTRV instruction. */
26007 MIPS_INVAL("OPC_MXU_S32EXTRV");
26008 generate_exception_end(ctx
, EXCP_RI
);
26011 MIPS_INVAL("decode_opc_mxu");
26012 generate_exception_end(ctx
, EXCP_RI
);
26019 * Decode MXU pool16
26022 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26023 * +-----------+---------+-----+-------+-------+-------+-----------+
26024 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26025 * +-----------+---------+-----+-------+-------+-------+-----------+
26028 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26029 * +-----------+---------+-----+-------+-------+-------+-----------+
26030 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26031 * +-----------+---------+-----+-------+-------+-------+-----------+
26034 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26035 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26036 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26037 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26040 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26041 * +-----------+-----+---+-----+-------+---------------+-----------+
26042 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26043 * +-----------+-----+---+-----+-------+---------------+-----------+
26045 * S32NOR, S32AND, S32OR, S32XOR:
26046 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26047 * +-----------+---------+-----+-------+-------+-------+-----------+
26048 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26049 * +-----------+---------+-----+-------+-------+-------+-----------+
26052 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26054 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26057 case OPC_MXU_D32SARW
:
26058 /* TODO: Implement emulation of D32SARW instruction. */
26059 MIPS_INVAL("OPC_MXU_D32SARW");
26060 generate_exception_end(ctx
, EXCP_RI
);
26062 case OPC_MXU_S32ALN
:
26063 /* TODO: Implement emulation of S32ALN instruction. */
26064 MIPS_INVAL("OPC_MXU_S32ALN");
26065 generate_exception_end(ctx
, EXCP_RI
);
26067 case OPC_MXU_S32ALNI
:
26068 gen_mxu_S32ALNI(ctx
);
26070 case OPC_MXU_S32LUI
:
26071 /* TODO: Implement emulation of S32LUI instruction. */
26072 MIPS_INVAL("OPC_MXU_S32LUI");
26073 generate_exception_end(ctx
, EXCP_RI
);
26075 case OPC_MXU_S32NOR
:
26076 gen_mxu_S32NOR(ctx
);
26078 case OPC_MXU_S32AND
:
26079 gen_mxu_S32AND(ctx
);
26081 case OPC_MXU_S32OR
:
26082 gen_mxu_S32OR(ctx
);
26084 case OPC_MXU_S32XOR
:
26085 gen_mxu_S32XOR(ctx
);
26088 MIPS_INVAL("decode_opc_mxu");
26089 generate_exception_end(ctx
, EXCP_RI
);
26096 * Decode MXU pool17
26098 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26099 * +-----------+---------+---------+---+---------+-----+-----------+
26100 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26101 * +-----------+---------+---------+---+---------+-----+-----------+
26104 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26106 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26110 /* TODO: Implement emulation of LXW instruction. */
26111 MIPS_INVAL("OPC_MXU_LXW");
26112 generate_exception_end(ctx
, EXCP_RI
);
26115 /* TODO: Implement emulation of LXH instruction. */
26116 MIPS_INVAL("OPC_MXU_LXH");
26117 generate_exception_end(ctx
, EXCP_RI
);
26120 /* TODO: Implement emulation of LXHU instruction. */
26121 MIPS_INVAL("OPC_MXU_LXHU");
26122 generate_exception_end(ctx
, EXCP_RI
);
26125 /* TODO: Implement emulation of LXB instruction. */
26126 MIPS_INVAL("OPC_MXU_LXB");
26127 generate_exception_end(ctx
, EXCP_RI
);
26130 /* TODO: Implement emulation of LXBU instruction. */
26131 MIPS_INVAL("OPC_MXU_LXBU");
26132 generate_exception_end(ctx
, EXCP_RI
);
26135 MIPS_INVAL("decode_opc_mxu");
26136 generate_exception_end(ctx
, EXCP_RI
);
26142 * Decode MXU pool18
26144 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26145 * +-----------+---------+-----+-------+-------+-------+-----------+
26146 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26147 * +-----------+---------+-----+-------+-------+-------+-----------+
26150 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26152 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26155 case OPC_MXU_D32SLLV
:
26156 /* TODO: Implement emulation of D32SLLV instruction. */
26157 MIPS_INVAL("OPC_MXU_D32SLLV");
26158 generate_exception_end(ctx
, EXCP_RI
);
26160 case OPC_MXU_D32SLRV
:
26161 /* TODO: Implement emulation of D32SLRV instruction. */
26162 MIPS_INVAL("OPC_MXU_D32SLRV");
26163 generate_exception_end(ctx
, EXCP_RI
);
26165 case OPC_MXU_D32SARV
:
26166 /* TODO: Implement emulation of D32SARV instruction. */
26167 MIPS_INVAL("OPC_MXU_D32SARV");
26168 generate_exception_end(ctx
, EXCP_RI
);
26170 case OPC_MXU_Q16SLLV
:
26171 /* TODO: Implement emulation of Q16SLLV instruction. */
26172 MIPS_INVAL("OPC_MXU_Q16SLLV");
26173 generate_exception_end(ctx
, EXCP_RI
);
26175 case OPC_MXU_Q16SLRV
:
26176 /* TODO: Implement emulation of Q16SLRV instruction. */
26177 MIPS_INVAL("OPC_MXU_Q16SLRV");
26178 generate_exception_end(ctx
, EXCP_RI
);
26180 case OPC_MXU_Q16SARV
:
26181 /* TODO: Implement emulation of Q16SARV instruction. */
26182 MIPS_INVAL("OPC_MXU_Q16SARV");
26183 generate_exception_end(ctx
, EXCP_RI
);
26186 MIPS_INVAL("decode_opc_mxu");
26187 generate_exception_end(ctx
, EXCP_RI
);
26194 * Decode MXU pool19
26196 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26197 * +-----------+---+---+-------+-------+-------+-------+-----------+
26198 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26199 * +-----------+---+---+-------+-------+-------+-------+-----------+
26202 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26204 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26207 case OPC_MXU_Q8MUL
:
26208 case OPC_MXU_Q8MULSU
:
26209 gen_mxu_q8mul_q8mulsu(ctx
);
26212 MIPS_INVAL("decode_opc_mxu");
26213 generate_exception_end(ctx
, EXCP_RI
);
26220 * Decode MXU pool20
26222 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26223 * +-----------+---------+-----+-------+-------+-------+-----------+
26224 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26225 * +-----------+---------+-----+-------+-------+-------+-----------+
26228 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26230 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26233 case OPC_MXU_Q8MOVZ
:
26234 /* TODO: Implement emulation of Q8MOVZ instruction. */
26235 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26236 generate_exception_end(ctx
, EXCP_RI
);
26238 case OPC_MXU_Q8MOVN
:
26239 /* TODO: Implement emulation of Q8MOVN instruction. */
26240 MIPS_INVAL("OPC_MXU_Q8MOVN");
26241 generate_exception_end(ctx
, EXCP_RI
);
26243 case OPC_MXU_D16MOVZ
:
26244 /* TODO: Implement emulation of D16MOVZ instruction. */
26245 MIPS_INVAL("OPC_MXU_D16MOVZ");
26246 generate_exception_end(ctx
, EXCP_RI
);
26248 case OPC_MXU_D16MOVN
:
26249 /* TODO: Implement emulation of D16MOVN instruction. */
26250 MIPS_INVAL("OPC_MXU_D16MOVN");
26251 generate_exception_end(ctx
, EXCP_RI
);
26253 case OPC_MXU_S32MOVZ
:
26254 /* TODO: Implement emulation of S32MOVZ instruction. */
26255 MIPS_INVAL("OPC_MXU_S32MOVZ");
26256 generate_exception_end(ctx
, EXCP_RI
);
26258 case OPC_MXU_S32MOVN
:
26259 /* TODO: Implement emulation of S32MOVN instruction. */
26260 MIPS_INVAL("OPC_MXU_S32MOVN");
26261 generate_exception_end(ctx
, EXCP_RI
);
26264 MIPS_INVAL("decode_opc_mxu");
26265 generate_exception_end(ctx
, EXCP_RI
);
26272 * Decode MXU pool21
26274 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26275 * +-----------+---+---+-------+-------+-------+-------+-----------+
26276 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26277 * +-----------+---+---+-------+-------+-------+-------+-----------+
26280 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26282 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26285 case OPC_MXU_Q8MAC
:
26286 /* TODO: Implement emulation of Q8MAC instruction. */
26287 MIPS_INVAL("OPC_MXU_Q8MAC");
26288 generate_exception_end(ctx
, EXCP_RI
);
26290 case OPC_MXU_Q8MACSU
:
26291 /* TODO: Implement emulation of Q8MACSU instruction. */
26292 MIPS_INVAL("OPC_MXU_Q8MACSU");
26293 generate_exception_end(ctx
, EXCP_RI
);
26296 MIPS_INVAL("decode_opc_mxu");
26297 generate_exception_end(ctx
, EXCP_RI
);
26304 * Main MXU decoding function
26306 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26307 * +-----------+---------------------------------------+-----------+
26308 * | SPECIAL2 | |x x x x x x|
26309 * +-----------+---------------------------------------+-----------+
26312 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26315 * TODO: Investigate necessity of including handling of
26316 * CLZ, CLO, SDBB in this function, as they belong to
26317 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26319 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26321 if (opcode
== OPC__MXU_MUL
) {
26322 uint32_t rs
, rt
, rd
, op1
;
26324 rs
= extract32(ctx
->opcode
, 21, 5);
26325 rt
= extract32(ctx
->opcode
, 16, 5);
26326 rd
= extract32(ctx
->opcode
, 11, 5);
26327 op1
= MASK_SPECIAL2(ctx
->opcode
);
26329 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26334 if (opcode
== OPC_MXU_S32M2I
) {
26335 gen_mxu_s32m2i(ctx
);
26339 if (opcode
== OPC_MXU_S32I2M
) {
26340 gen_mxu_s32i2m(ctx
);
26345 TCGv t_mxu_cr
= tcg_temp_new();
26346 TCGLabel
*l_exit
= gen_new_label();
26348 gen_load_mxu_cr(t_mxu_cr
);
26349 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26350 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26353 case OPC_MXU_S32MADD
:
26354 /* TODO: Implement emulation of S32MADD instruction. */
26355 MIPS_INVAL("OPC_MXU_S32MADD");
26356 generate_exception_end(ctx
, EXCP_RI
);
26358 case OPC_MXU_S32MADDU
:
26359 /* TODO: Implement emulation of S32MADDU instruction. */
26360 MIPS_INVAL("OPC_MXU_S32MADDU");
26361 generate_exception_end(ctx
, EXCP_RI
);
26363 case OPC_MXU__POOL00
:
26364 decode_opc_mxu__pool00(env
, ctx
);
26366 case OPC_MXU_S32MSUB
:
26367 /* TODO: Implement emulation of S32MSUB instruction. */
26368 MIPS_INVAL("OPC_MXU_S32MSUB");
26369 generate_exception_end(ctx
, EXCP_RI
);
26371 case OPC_MXU_S32MSUBU
:
26372 /* TODO: Implement emulation of S32MSUBU instruction. */
26373 MIPS_INVAL("OPC_MXU_S32MSUBU");
26374 generate_exception_end(ctx
, EXCP_RI
);
26376 case OPC_MXU__POOL01
:
26377 decode_opc_mxu__pool01(env
, ctx
);
26379 case OPC_MXU__POOL02
:
26380 decode_opc_mxu__pool02(env
, ctx
);
26382 case OPC_MXU_D16MUL
:
26383 gen_mxu_d16mul(ctx
);
26385 case OPC_MXU__POOL03
:
26386 decode_opc_mxu__pool03(env
, ctx
);
26388 case OPC_MXU_D16MAC
:
26389 gen_mxu_d16mac(ctx
);
26391 case OPC_MXU_D16MACF
:
26392 /* TODO: Implement emulation of D16MACF instruction. */
26393 MIPS_INVAL("OPC_MXU_D16MACF");
26394 generate_exception_end(ctx
, EXCP_RI
);
26396 case OPC_MXU_D16MADL
:
26397 /* TODO: Implement emulation of D16MADL instruction. */
26398 MIPS_INVAL("OPC_MXU_D16MADL");
26399 generate_exception_end(ctx
, EXCP_RI
);
26401 case OPC_MXU_S16MAD
:
26402 /* TODO: Implement emulation of S16MAD instruction. */
26403 MIPS_INVAL("OPC_MXU_S16MAD");
26404 generate_exception_end(ctx
, EXCP_RI
);
26406 case OPC_MXU_Q16ADD
:
26407 /* TODO: Implement emulation of Q16ADD instruction. */
26408 MIPS_INVAL("OPC_MXU_Q16ADD");
26409 generate_exception_end(ctx
, EXCP_RI
);
26411 case OPC_MXU_D16MACE
:
26412 /* TODO: Implement emulation of D16MACE instruction. */
26413 MIPS_INVAL("OPC_MXU_D16MACE");
26414 generate_exception_end(ctx
, EXCP_RI
);
26416 case OPC_MXU__POOL04
:
26417 decode_opc_mxu__pool04(env
, ctx
);
26419 case OPC_MXU__POOL05
:
26420 decode_opc_mxu__pool05(env
, ctx
);
26422 case OPC_MXU__POOL06
:
26423 decode_opc_mxu__pool06(env
, ctx
);
26425 case OPC_MXU__POOL07
:
26426 decode_opc_mxu__pool07(env
, ctx
);
26428 case OPC_MXU__POOL08
:
26429 decode_opc_mxu__pool08(env
, ctx
);
26431 case OPC_MXU__POOL09
:
26432 decode_opc_mxu__pool09(env
, ctx
);
26434 case OPC_MXU__POOL10
:
26435 decode_opc_mxu__pool10(env
, ctx
);
26437 case OPC_MXU__POOL11
:
26438 decode_opc_mxu__pool11(env
, ctx
);
26440 case OPC_MXU_D32ADD
:
26441 /* TODO: Implement emulation of D32ADD instruction. */
26442 MIPS_INVAL("OPC_MXU_D32ADD");
26443 generate_exception_end(ctx
, EXCP_RI
);
26445 case OPC_MXU__POOL12
:
26446 decode_opc_mxu__pool12(env
, ctx
);
26448 case OPC_MXU__POOL13
:
26449 decode_opc_mxu__pool13(env
, ctx
);
26451 case OPC_MXU__POOL14
:
26452 decode_opc_mxu__pool14(env
, ctx
);
26454 case OPC_MXU_Q8ACCE
:
26455 /* TODO: Implement emulation of Q8ACCE instruction. */
26456 MIPS_INVAL("OPC_MXU_Q8ACCE");
26457 generate_exception_end(ctx
, EXCP_RI
);
26459 case OPC_MXU_S8LDD
:
26460 gen_mxu_s8ldd(ctx
);
26462 case OPC_MXU_S8STD
:
26463 /* TODO: Implement emulation of S8STD instruction. */
26464 MIPS_INVAL("OPC_MXU_S8STD");
26465 generate_exception_end(ctx
, EXCP_RI
);
26467 case OPC_MXU_S8LDI
:
26468 /* TODO: Implement emulation of S8LDI instruction. */
26469 MIPS_INVAL("OPC_MXU_S8LDI");
26470 generate_exception_end(ctx
, EXCP_RI
);
26472 case OPC_MXU_S8SDI
:
26473 /* TODO: Implement emulation of S8SDI instruction. */
26474 MIPS_INVAL("OPC_MXU_S8SDI");
26475 generate_exception_end(ctx
, EXCP_RI
);
26477 case OPC_MXU__POOL15
:
26478 decode_opc_mxu__pool15(env
, ctx
);
26480 case OPC_MXU__POOL16
:
26481 decode_opc_mxu__pool16(env
, ctx
);
26483 case OPC_MXU__POOL17
:
26484 decode_opc_mxu__pool17(env
, ctx
);
26486 case OPC_MXU_S16LDD
:
26487 /* TODO: Implement emulation of S16LDD instruction. */
26488 MIPS_INVAL("OPC_MXU_S16LDD");
26489 generate_exception_end(ctx
, EXCP_RI
);
26491 case OPC_MXU_S16STD
:
26492 /* TODO: Implement emulation of S16STD instruction. */
26493 MIPS_INVAL("OPC_MXU_S16STD");
26494 generate_exception_end(ctx
, EXCP_RI
);
26496 case OPC_MXU_S16LDI
:
26497 /* TODO: Implement emulation of S16LDI instruction. */
26498 MIPS_INVAL("OPC_MXU_S16LDI");
26499 generate_exception_end(ctx
, EXCP_RI
);
26501 case OPC_MXU_S16SDI
:
26502 /* TODO: Implement emulation of S16SDI instruction. */
26503 MIPS_INVAL("OPC_MXU_S16SDI");
26504 generate_exception_end(ctx
, EXCP_RI
);
26506 case OPC_MXU_D32SLL
:
26507 /* TODO: Implement emulation of D32SLL instruction. */
26508 MIPS_INVAL("OPC_MXU_D32SLL");
26509 generate_exception_end(ctx
, EXCP_RI
);
26511 case OPC_MXU_D32SLR
:
26512 /* TODO: Implement emulation of D32SLR instruction. */
26513 MIPS_INVAL("OPC_MXU_D32SLR");
26514 generate_exception_end(ctx
, EXCP_RI
);
26516 case OPC_MXU_D32SARL
:
26517 /* TODO: Implement emulation of D32SARL instruction. */
26518 MIPS_INVAL("OPC_MXU_D32SARL");
26519 generate_exception_end(ctx
, EXCP_RI
);
26521 case OPC_MXU_D32SAR
:
26522 /* TODO: Implement emulation of D32SAR instruction. */
26523 MIPS_INVAL("OPC_MXU_D32SAR");
26524 generate_exception_end(ctx
, EXCP_RI
);
26526 case OPC_MXU_Q16SLL
:
26527 /* TODO: Implement emulation of Q16SLL instruction. */
26528 MIPS_INVAL("OPC_MXU_Q16SLL");
26529 generate_exception_end(ctx
, EXCP_RI
);
26531 case OPC_MXU_Q16SLR
:
26532 /* TODO: Implement emulation of Q16SLR instruction. */
26533 MIPS_INVAL("OPC_MXU_Q16SLR");
26534 generate_exception_end(ctx
, EXCP_RI
);
26536 case OPC_MXU__POOL18
:
26537 decode_opc_mxu__pool18(env
, ctx
);
26539 case OPC_MXU_Q16SAR
:
26540 /* TODO: Implement emulation of Q16SAR instruction. */
26541 MIPS_INVAL("OPC_MXU_Q16SAR");
26542 generate_exception_end(ctx
, EXCP_RI
);
26544 case OPC_MXU__POOL19
:
26545 decode_opc_mxu__pool19(env
, ctx
);
26547 case OPC_MXU__POOL20
:
26548 decode_opc_mxu__pool20(env
, ctx
);
26550 case OPC_MXU__POOL21
:
26551 decode_opc_mxu__pool21(env
, ctx
);
26553 case OPC_MXU_Q16SCOP
:
26554 /* TODO: Implement emulation of Q16SCOP instruction. */
26555 MIPS_INVAL("OPC_MXU_Q16SCOP");
26556 generate_exception_end(ctx
, EXCP_RI
);
26558 case OPC_MXU_Q8MADL
:
26559 /* TODO: Implement emulation of Q8MADL instruction. */
26560 MIPS_INVAL("OPC_MXU_Q8MADL");
26561 generate_exception_end(ctx
, EXCP_RI
);
26563 case OPC_MXU_S32SFL
:
26564 /* TODO: Implement emulation of S32SFL instruction. */
26565 MIPS_INVAL("OPC_MXU_S32SFL");
26566 generate_exception_end(ctx
, EXCP_RI
);
26568 case OPC_MXU_Q8SAD
:
26569 /* TODO: Implement emulation of Q8SAD instruction. */
26570 MIPS_INVAL("OPC_MXU_Q8SAD");
26571 generate_exception_end(ctx
, EXCP_RI
);
26574 MIPS_INVAL("decode_opc_mxu");
26575 generate_exception_end(ctx
, EXCP_RI
);
26578 gen_set_label(l_exit
);
26579 tcg_temp_free(t_mxu_cr
);
26583 #endif /* !defined(TARGET_MIPS64) */
26586 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26593 rs
= (ctx
->opcode
>> 21) & 0x1f;
26594 rt
= (ctx
->opcode
>> 16) & 0x1f;
26595 rd
= (ctx
->opcode
>> 11) & 0x1f;
26597 op1
= MASK_SPECIAL2(ctx
->opcode
);
26599 case OPC_MADD
: /* Multiply and add/sub */
26603 check_insn(ctx
, ISA_MIPS32
);
26604 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26607 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26610 case OPC_DIVU_G_2F
:
26611 case OPC_MULT_G_2F
:
26612 case OPC_MULTU_G_2F
:
26614 case OPC_MODU_G_2F
:
26615 check_insn(ctx
, INSN_LOONGSON2F
);
26616 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26620 check_insn(ctx
, ISA_MIPS32
);
26621 gen_cl(ctx
, op1
, rd
, rs
);
26624 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26625 gen_helper_do_semihosting(cpu_env
);
26627 /* XXX: not clear which exception should be raised
26628 * when in debug mode...
26630 check_insn(ctx
, ISA_MIPS32
);
26631 generate_exception_end(ctx
, EXCP_DBp
);
26634 #if defined(TARGET_MIPS64)
26637 check_insn(ctx
, ISA_MIPS64
);
26638 check_mips_64(ctx
);
26639 gen_cl(ctx
, op1
, rd
, rs
);
26641 case OPC_DMULT_G_2F
:
26642 case OPC_DMULTU_G_2F
:
26643 case OPC_DDIV_G_2F
:
26644 case OPC_DDIVU_G_2F
:
26645 case OPC_DMOD_G_2F
:
26646 case OPC_DMODU_G_2F
:
26647 check_insn(ctx
, INSN_LOONGSON2F
);
26648 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26651 default: /* Invalid */
26652 MIPS_INVAL("special2_legacy");
26653 generate_exception_end(ctx
, EXCP_RI
);
26658 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26660 int rs
, rt
, rd
, sa
;
26664 rs
= (ctx
->opcode
>> 21) & 0x1f;
26665 rt
= (ctx
->opcode
>> 16) & 0x1f;
26666 rd
= (ctx
->opcode
>> 11) & 0x1f;
26667 sa
= (ctx
->opcode
>> 6) & 0x1f;
26668 imm
= (int16_t)ctx
->opcode
>> 7;
26670 op1
= MASK_SPECIAL3(ctx
->opcode
);
26674 /* hint codes 24-31 are reserved and signal RI */
26675 generate_exception_end(ctx
, EXCP_RI
);
26677 /* Treat as NOP. */
26680 check_cp0_enabled(ctx
);
26681 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26682 gen_cache_operation(ctx
, rt
, rs
, imm
);
26686 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
26689 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26694 /* Treat as NOP. */
26697 op2
= MASK_BSHFL(ctx
->opcode
);
26703 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
26706 gen_bitswap(ctx
, op2
, rd
, rt
);
26711 #if defined(TARGET_MIPS64)
26713 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
26716 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26719 check_mips_64(ctx
);
26722 /* Treat as NOP. */
26725 op2
= MASK_DBSHFL(ctx
->opcode
);
26735 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26738 gen_bitswap(ctx
, op2
, rd
, rt
);
26745 default: /* Invalid */
26746 MIPS_INVAL("special3_r6");
26747 generate_exception_end(ctx
, EXCP_RI
);
26752 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26757 rs
= (ctx
->opcode
>> 21) & 0x1f;
26758 rt
= (ctx
->opcode
>> 16) & 0x1f;
26759 rd
= (ctx
->opcode
>> 11) & 0x1f;
26761 op1
= MASK_SPECIAL3(ctx
->opcode
);
26764 case OPC_DIVU_G_2E
:
26766 case OPC_MODU_G_2E
:
26767 case OPC_MULT_G_2E
:
26768 case OPC_MULTU_G_2E
:
26769 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26770 * the same mask and op1. */
26771 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26772 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26775 case OPC_ADDUH_R_QB
:
26777 case OPC_ADDQH_R_PH
:
26779 case OPC_ADDQH_R_W
:
26781 case OPC_SUBUH_R_QB
:
26783 case OPC_SUBQH_R_PH
:
26785 case OPC_SUBQH_R_W
:
26786 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26791 case OPC_MULQ_RS_W
:
26792 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26795 MIPS_INVAL("MASK ADDUH.QB");
26796 generate_exception_end(ctx
, EXCP_RI
);
26799 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26800 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26802 generate_exception_end(ctx
, EXCP_RI
);
26806 op2
= MASK_LX(ctx
->opcode
);
26808 #if defined(TARGET_MIPS64)
26814 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26816 default: /* Invalid */
26817 MIPS_INVAL("MASK LX");
26818 generate_exception_end(ctx
, EXCP_RI
);
26822 case OPC_ABSQ_S_PH_DSP
:
26823 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26825 case OPC_ABSQ_S_QB
:
26826 case OPC_ABSQ_S_PH
:
26828 case OPC_PRECEQ_W_PHL
:
26829 case OPC_PRECEQ_W_PHR
:
26830 case OPC_PRECEQU_PH_QBL
:
26831 case OPC_PRECEQU_PH_QBR
:
26832 case OPC_PRECEQU_PH_QBLA
:
26833 case OPC_PRECEQU_PH_QBRA
:
26834 case OPC_PRECEU_PH_QBL
:
26835 case OPC_PRECEU_PH_QBR
:
26836 case OPC_PRECEU_PH_QBLA
:
26837 case OPC_PRECEU_PH_QBRA
:
26838 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26845 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26848 MIPS_INVAL("MASK ABSQ_S.PH");
26849 generate_exception_end(ctx
, EXCP_RI
);
26853 case OPC_ADDU_QB_DSP
:
26854 op2
= MASK_ADDU_QB(ctx
->opcode
);
26857 case OPC_ADDQ_S_PH
:
26860 case OPC_ADDU_S_QB
:
26862 case OPC_ADDU_S_PH
:
26864 case OPC_SUBQ_S_PH
:
26867 case OPC_SUBU_S_QB
:
26869 case OPC_SUBU_S_PH
:
26873 case OPC_RADDU_W_QB
:
26874 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26876 case OPC_MULEU_S_PH_QBL
:
26877 case OPC_MULEU_S_PH_QBR
:
26878 case OPC_MULQ_RS_PH
:
26879 case OPC_MULEQ_S_W_PHL
:
26880 case OPC_MULEQ_S_W_PHR
:
26881 case OPC_MULQ_S_PH
:
26882 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26884 default: /* Invalid */
26885 MIPS_INVAL("MASK ADDU.QB");
26886 generate_exception_end(ctx
, EXCP_RI
);
26891 case OPC_CMPU_EQ_QB_DSP
:
26892 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26894 case OPC_PRECR_SRA_PH_W
:
26895 case OPC_PRECR_SRA_R_PH_W
:
26896 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26898 case OPC_PRECR_QB_PH
:
26899 case OPC_PRECRQ_QB_PH
:
26900 case OPC_PRECRQ_PH_W
:
26901 case OPC_PRECRQ_RS_PH_W
:
26902 case OPC_PRECRQU_S_QB_PH
:
26903 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26905 case OPC_CMPU_EQ_QB
:
26906 case OPC_CMPU_LT_QB
:
26907 case OPC_CMPU_LE_QB
:
26908 case OPC_CMP_EQ_PH
:
26909 case OPC_CMP_LT_PH
:
26910 case OPC_CMP_LE_PH
:
26911 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26913 case OPC_CMPGU_EQ_QB
:
26914 case OPC_CMPGU_LT_QB
:
26915 case OPC_CMPGU_LE_QB
:
26916 case OPC_CMPGDU_EQ_QB
:
26917 case OPC_CMPGDU_LT_QB
:
26918 case OPC_CMPGDU_LE_QB
:
26921 case OPC_PACKRL_PH
:
26922 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26924 default: /* Invalid */
26925 MIPS_INVAL("MASK CMPU.EQ.QB");
26926 generate_exception_end(ctx
, EXCP_RI
);
26930 case OPC_SHLL_QB_DSP
:
26931 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26933 case OPC_DPA_W_PH_DSP
:
26934 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26936 case OPC_DPAU_H_QBL
:
26937 case OPC_DPAU_H_QBR
:
26938 case OPC_DPSU_H_QBL
:
26939 case OPC_DPSU_H_QBR
:
26941 case OPC_DPAX_W_PH
:
26942 case OPC_DPAQ_S_W_PH
:
26943 case OPC_DPAQX_S_W_PH
:
26944 case OPC_DPAQX_SA_W_PH
:
26946 case OPC_DPSX_W_PH
:
26947 case OPC_DPSQ_S_W_PH
:
26948 case OPC_DPSQX_S_W_PH
:
26949 case OPC_DPSQX_SA_W_PH
:
26950 case OPC_MULSAQ_S_W_PH
:
26951 case OPC_DPAQ_SA_L_W
:
26952 case OPC_DPSQ_SA_L_W
:
26953 case OPC_MAQ_S_W_PHL
:
26954 case OPC_MAQ_S_W_PHR
:
26955 case OPC_MAQ_SA_W_PHL
:
26956 case OPC_MAQ_SA_W_PHR
:
26957 case OPC_MULSA_W_PH
:
26958 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26960 default: /* Invalid */
26961 MIPS_INVAL("MASK DPAW.PH");
26962 generate_exception_end(ctx
, EXCP_RI
);
26967 op2
= MASK_INSV(ctx
->opcode
);
26978 t0
= tcg_temp_new();
26979 t1
= tcg_temp_new();
26981 gen_load_gpr(t0
, rt
);
26982 gen_load_gpr(t1
, rs
);
26984 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26990 default: /* Invalid */
26991 MIPS_INVAL("MASK INSV");
26992 generate_exception_end(ctx
, EXCP_RI
);
26996 case OPC_APPEND_DSP
:
26997 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26999 case OPC_EXTR_W_DSP
:
27000 op2
= MASK_EXTR_W(ctx
->opcode
);
27004 case OPC_EXTR_RS_W
:
27006 case OPC_EXTRV_S_H
:
27008 case OPC_EXTRV_R_W
:
27009 case OPC_EXTRV_RS_W
:
27014 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27017 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27023 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27025 default: /* Invalid */
27026 MIPS_INVAL("MASK EXTR.W");
27027 generate_exception_end(ctx
, EXCP_RI
);
27031 #if defined(TARGET_MIPS64)
27032 case OPC_DDIV_G_2E
:
27033 case OPC_DDIVU_G_2E
:
27034 case OPC_DMULT_G_2E
:
27035 case OPC_DMULTU_G_2E
:
27036 case OPC_DMOD_G_2E
:
27037 case OPC_DMODU_G_2E
:
27038 check_insn(ctx
, INSN_LOONGSON2E
);
27039 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27041 case OPC_ABSQ_S_QH_DSP
:
27042 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27044 case OPC_PRECEQ_L_PWL
:
27045 case OPC_PRECEQ_L_PWR
:
27046 case OPC_PRECEQ_PW_QHL
:
27047 case OPC_PRECEQ_PW_QHR
:
27048 case OPC_PRECEQ_PW_QHLA
:
27049 case OPC_PRECEQ_PW_QHRA
:
27050 case OPC_PRECEQU_QH_OBL
:
27051 case OPC_PRECEQU_QH_OBR
:
27052 case OPC_PRECEQU_QH_OBLA
:
27053 case OPC_PRECEQU_QH_OBRA
:
27054 case OPC_PRECEU_QH_OBL
:
27055 case OPC_PRECEU_QH_OBR
:
27056 case OPC_PRECEU_QH_OBLA
:
27057 case OPC_PRECEU_QH_OBRA
:
27058 case OPC_ABSQ_S_OB
:
27059 case OPC_ABSQ_S_PW
:
27060 case OPC_ABSQ_S_QH
:
27061 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27069 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27071 default: /* Invalid */
27072 MIPS_INVAL("MASK ABSQ_S.QH");
27073 generate_exception_end(ctx
, EXCP_RI
);
27077 case OPC_ADDU_OB_DSP
:
27078 op2
= MASK_ADDU_OB(ctx
->opcode
);
27080 case OPC_RADDU_L_OB
:
27082 case OPC_SUBQ_S_PW
:
27084 case OPC_SUBQ_S_QH
:
27086 case OPC_SUBU_S_OB
:
27088 case OPC_SUBU_S_QH
:
27090 case OPC_SUBUH_R_OB
:
27092 case OPC_ADDQ_S_PW
:
27094 case OPC_ADDQ_S_QH
:
27096 case OPC_ADDU_S_OB
:
27098 case OPC_ADDU_S_QH
:
27100 case OPC_ADDUH_R_OB
:
27101 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27103 case OPC_MULEQ_S_PW_QHL
:
27104 case OPC_MULEQ_S_PW_QHR
:
27105 case OPC_MULEU_S_QH_OBL
:
27106 case OPC_MULEU_S_QH_OBR
:
27107 case OPC_MULQ_RS_QH
:
27108 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27110 default: /* Invalid */
27111 MIPS_INVAL("MASK ADDU.OB");
27112 generate_exception_end(ctx
, EXCP_RI
);
27116 case OPC_CMPU_EQ_OB_DSP
:
27117 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27119 case OPC_PRECR_SRA_QH_PW
:
27120 case OPC_PRECR_SRA_R_QH_PW
:
27121 /* Return value is rt. */
27122 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27124 case OPC_PRECR_OB_QH
:
27125 case OPC_PRECRQ_OB_QH
:
27126 case OPC_PRECRQ_PW_L
:
27127 case OPC_PRECRQ_QH_PW
:
27128 case OPC_PRECRQ_RS_QH_PW
:
27129 case OPC_PRECRQU_S_OB_QH
:
27130 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27132 case OPC_CMPU_EQ_OB
:
27133 case OPC_CMPU_LT_OB
:
27134 case OPC_CMPU_LE_OB
:
27135 case OPC_CMP_EQ_QH
:
27136 case OPC_CMP_LT_QH
:
27137 case OPC_CMP_LE_QH
:
27138 case OPC_CMP_EQ_PW
:
27139 case OPC_CMP_LT_PW
:
27140 case OPC_CMP_LE_PW
:
27141 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27143 case OPC_CMPGDU_EQ_OB
:
27144 case OPC_CMPGDU_LT_OB
:
27145 case OPC_CMPGDU_LE_OB
:
27146 case OPC_CMPGU_EQ_OB
:
27147 case OPC_CMPGU_LT_OB
:
27148 case OPC_CMPGU_LE_OB
:
27149 case OPC_PACKRL_PW
:
27153 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27155 default: /* Invalid */
27156 MIPS_INVAL("MASK CMPU_EQ.OB");
27157 generate_exception_end(ctx
, EXCP_RI
);
27161 case OPC_DAPPEND_DSP
:
27162 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27164 case OPC_DEXTR_W_DSP
:
27165 op2
= MASK_DEXTR_W(ctx
->opcode
);
27172 case OPC_DEXTR_R_L
:
27173 case OPC_DEXTR_RS_L
:
27175 case OPC_DEXTR_R_W
:
27176 case OPC_DEXTR_RS_W
:
27177 case OPC_DEXTR_S_H
:
27179 case OPC_DEXTRV_R_L
:
27180 case OPC_DEXTRV_RS_L
:
27181 case OPC_DEXTRV_S_H
:
27183 case OPC_DEXTRV_R_W
:
27184 case OPC_DEXTRV_RS_W
:
27185 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27190 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27192 default: /* Invalid */
27193 MIPS_INVAL("MASK EXTR.W");
27194 generate_exception_end(ctx
, EXCP_RI
);
27198 case OPC_DPAQ_W_QH_DSP
:
27199 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27201 case OPC_DPAU_H_OBL
:
27202 case OPC_DPAU_H_OBR
:
27203 case OPC_DPSU_H_OBL
:
27204 case OPC_DPSU_H_OBR
:
27206 case OPC_DPAQ_S_W_QH
:
27208 case OPC_DPSQ_S_W_QH
:
27209 case OPC_MULSAQ_S_W_QH
:
27210 case OPC_DPAQ_SA_L_PW
:
27211 case OPC_DPSQ_SA_L_PW
:
27212 case OPC_MULSAQ_S_L_PW
:
27213 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27215 case OPC_MAQ_S_W_QHLL
:
27216 case OPC_MAQ_S_W_QHLR
:
27217 case OPC_MAQ_S_W_QHRL
:
27218 case OPC_MAQ_S_W_QHRR
:
27219 case OPC_MAQ_SA_W_QHLL
:
27220 case OPC_MAQ_SA_W_QHLR
:
27221 case OPC_MAQ_SA_W_QHRL
:
27222 case OPC_MAQ_SA_W_QHRR
:
27223 case OPC_MAQ_S_L_PWL
:
27224 case OPC_MAQ_S_L_PWR
:
27229 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27231 default: /* Invalid */
27232 MIPS_INVAL("MASK DPAQ.W.QH");
27233 generate_exception_end(ctx
, EXCP_RI
);
27237 case OPC_DINSV_DSP
:
27238 op2
= MASK_INSV(ctx
->opcode
);
27249 t0
= tcg_temp_new();
27250 t1
= tcg_temp_new();
27252 gen_load_gpr(t0
, rt
);
27253 gen_load_gpr(t1
, rs
);
27255 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27261 default: /* Invalid */
27262 MIPS_INVAL("MASK DINSV");
27263 generate_exception_end(ctx
, EXCP_RI
);
27267 case OPC_SHLL_OB_DSP
:
27268 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27271 default: /* Invalid */
27272 MIPS_INVAL("special3_legacy");
27273 generate_exception_end(ctx
, EXCP_RI
);
27279 #if defined(TARGET_MIPS64)
27281 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27283 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27286 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27287 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27288 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27289 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27290 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27291 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27292 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27293 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27294 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27295 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27296 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27297 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27298 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27299 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27300 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27301 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27302 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27303 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27304 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27305 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27306 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27307 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27308 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27309 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27310 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27311 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27314 MIPS_INVAL("TX79 MMI class MMI0");
27315 generate_exception_end(ctx
, EXCP_RI
);
27320 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27322 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27325 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27326 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27327 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27328 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27329 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27330 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27331 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27332 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27333 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27334 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27335 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27336 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27337 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27338 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27339 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27340 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27341 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27342 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27343 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27346 MIPS_INVAL("TX79 MMI class MMI1");
27347 generate_exception_end(ctx
, EXCP_RI
);
27352 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27354 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27357 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27358 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27359 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27360 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27361 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27362 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27363 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27364 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27365 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27366 case MMI_OPC_2_PCPYLD
: /* TODO: MMI_OPC_2_PCPYLD */
27367 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27368 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27369 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27370 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27371 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27372 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27373 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27374 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27375 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27376 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27377 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27378 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27379 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27382 MIPS_INVAL("TX79 MMI class MMI2");
27383 generate_exception_end(ctx
, EXCP_RI
);
27388 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27390 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27393 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27394 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27395 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27396 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27397 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27398 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27399 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27400 case MMI_OPC_3_PCPYUD
: /* TODO: MMI_OPC_3_PCPYUD */
27401 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27402 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27403 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27404 case MMI_OPC_3_PCPYH
: /* TODO: MMI_OPC_3_PCPYH */
27405 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27406 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27409 MIPS_INVAL("TX79 MMI class MMI3");
27410 generate_exception_end(ctx
, EXCP_RI
);
27415 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27417 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27418 int rs
= extract32(ctx
->opcode
, 21, 5);
27419 int rt
= extract32(ctx
->opcode
, 16, 5);
27420 int rd
= extract32(ctx
->opcode
, 11, 5);
27423 case MMI_OPC_CLASS_MMI0
:
27424 decode_mmi0(env
, ctx
);
27426 case MMI_OPC_CLASS_MMI1
:
27427 decode_mmi1(env
, ctx
);
27429 case MMI_OPC_CLASS_MMI2
:
27430 decode_mmi2(env
, ctx
);
27432 case MMI_OPC_CLASS_MMI3
:
27433 decode_mmi3(env
, ctx
);
27435 case MMI_OPC_MULT1
:
27436 case MMI_OPC_MULTU1
:
27438 case MMI_OPC_MADDU
:
27439 case MMI_OPC_MADD1
:
27440 case MMI_OPC_MADDU1
:
27441 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27444 case MMI_OPC_DIVU1
:
27445 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27447 case MMI_OPC_MTLO1
:
27448 case MMI_OPC_MTHI1
:
27449 gen_HILO1_tx79(ctx
, opc
, rs
);
27451 case MMI_OPC_MFLO1
:
27452 case MMI_OPC_MFHI1
:
27453 gen_HILO1_tx79(ctx
, opc
, rd
);
27455 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27456 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27457 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27458 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27459 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27460 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27461 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27462 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27463 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27464 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27467 MIPS_INVAL("TX79 MMI class");
27468 generate_exception_end(ctx
, EXCP_RI
);
27473 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27475 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27478 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27480 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27484 * The TX79-specific instruction Store Quadword
27486 * +--------+-------+-------+------------------------+
27487 * | 011111 | base | rt | offset | SQ
27488 * +--------+-------+-------+------------------------+
27491 * has the same opcode as the Read Hardware Register instruction
27493 * +--------+-------+-------+-------+-------+--------+
27494 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27495 * +--------+-------+-------+-------+-------+--------+
27498 * that is required, trapped and emulated by the Linux kernel. However, all
27499 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27500 * offset is odd. Therefore all valid SQ instructions can execute normally.
27501 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27502 * between SQ and RDHWR, as the Linux kernel does.
27504 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27506 int base
= extract32(ctx
->opcode
, 21, 5);
27507 int rt
= extract32(ctx
->opcode
, 16, 5);
27508 int offset
= extract32(ctx
->opcode
, 0, 16);
27510 #ifdef CONFIG_USER_ONLY
27511 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27512 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27514 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27515 int rd
= extract32(ctx
->opcode
, 11, 5);
27517 gen_rdhwr(ctx
, rt
, rd
, 0);
27522 gen_mmi_sq(ctx
, base
, rt
, offset
);
27527 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27529 int rs
, rt
, rd
, sa
;
27533 rs
= (ctx
->opcode
>> 21) & 0x1f;
27534 rt
= (ctx
->opcode
>> 16) & 0x1f;
27535 rd
= (ctx
->opcode
>> 11) & 0x1f;
27536 sa
= (ctx
->opcode
>> 6) & 0x1f;
27537 imm
= sextract32(ctx
->opcode
, 7, 9);
27539 op1
= MASK_SPECIAL3(ctx
->opcode
);
27542 * EVA loads and stores overlap Loongson 2E instructions decoded by
27543 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27558 check_cp0_enabled(ctx
);
27559 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27563 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27568 check_cp0_enabled(ctx
);
27569 gen_st(ctx
, op1
, rt
, rs
, imm
);
27572 check_cp0_enabled(ctx
);
27573 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
27576 check_cp0_enabled(ctx
);
27577 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27578 gen_cache_operation(ctx
, rt
, rs
, imm
);
27580 /* Treat as NOP. */
27583 check_cp0_enabled(ctx
);
27584 /* Treat as NOP. */
27592 check_insn(ctx
, ISA_MIPS32R2
);
27593 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27596 op2
= MASK_BSHFL(ctx
->opcode
);
27603 check_insn(ctx
, ISA_MIPS32R6
);
27604 decode_opc_special3_r6(env
, ctx
);
27607 check_insn(ctx
, ISA_MIPS32R2
);
27608 gen_bshfl(ctx
, op2
, rt
, rd
);
27612 #if defined(TARGET_MIPS64)
27619 check_insn(ctx
, ISA_MIPS64R2
);
27620 check_mips_64(ctx
);
27621 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27624 op2
= MASK_DBSHFL(ctx
->opcode
);
27635 check_insn(ctx
, ISA_MIPS32R6
);
27636 decode_opc_special3_r6(env
, ctx
);
27639 check_insn(ctx
, ISA_MIPS64R2
);
27640 check_mips_64(ctx
);
27641 op2
= MASK_DBSHFL(ctx
->opcode
);
27642 gen_bshfl(ctx
, op2
, rt
, rd
);
27648 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27653 TCGv t0
= tcg_temp_new();
27654 TCGv t1
= tcg_temp_new();
27656 gen_load_gpr(t0
, rt
);
27657 gen_load_gpr(t1
, rs
);
27658 gen_helper_fork(t0
, t1
);
27666 TCGv t0
= tcg_temp_new();
27668 gen_load_gpr(t0
, rs
);
27669 gen_helper_yield(t0
, cpu_env
, t0
);
27670 gen_store_gpr(t0
, rd
);
27675 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27676 decode_opc_special3_r6(env
, ctx
);
27678 decode_opc_special3_legacy(env
, ctx
);
27683 /* MIPS SIMD Architecture (MSA) */
27684 static inline int check_msa_access(DisasContext
*ctx
)
27686 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
27687 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
27688 generate_exception_end(ctx
, EXCP_RI
);
27692 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
27693 if (ctx
->insn_flags
& ASE_MSA
) {
27694 generate_exception_end(ctx
, EXCP_MSADIS
);
27697 generate_exception_end(ctx
, EXCP_RI
);
27704 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
27706 /* generates tcg ops to check if any element is 0 */
27707 /* Note this function only works with MSA_WRLEN = 128 */
27708 uint64_t eval_zero_or_big
= 0;
27709 uint64_t eval_big
= 0;
27710 TCGv_i64 t0
= tcg_temp_new_i64();
27711 TCGv_i64 t1
= tcg_temp_new_i64();
27714 eval_zero_or_big
= 0x0101010101010101ULL
;
27715 eval_big
= 0x8080808080808080ULL
;
27718 eval_zero_or_big
= 0x0001000100010001ULL
;
27719 eval_big
= 0x8000800080008000ULL
;
27722 eval_zero_or_big
= 0x0000000100000001ULL
;
27723 eval_big
= 0x8000000080000000ULL
;
27726 eval_zero_or_big
= 0x0000000000000001ULL
;
27727 eval_big
= 0x8000000000000000ULL
;
27730 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
27731 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
27732 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27733 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
27734 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
27735 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27736 tcg_gen_or_i64(t0
, t0
, t1
);
27737 /* if all bits are zero then all elements are not zero */
27738 /* if some bit is non-zero then some element is zero */
27739 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27740 tcg_gen_trunc_i64_tl(tresult
, t0
);
27741 tcg_temp_free_i64(t0
);
27742 tcg_temp_free_i64(t1
);
27745 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27747 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27748 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27749 int64_t s16
= (int16_t)ctx
->opcode
;
27751 check_msa_access(ctx
);
27753 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27754 generate_exception_end(ctx
, EXCP_RI
);
27761 TCGv_i64 t0
= tcg_temp_new_i64();
27762 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
27763 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27764 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27765 tcg_gen_trunc_i64_tl(bcond
, t0
);
27766 tcg_temp_free_i64(t0
);
27773 gen_check_zero_element(bcond
, df
, wt
);
27779 gen_check_zero_element(bcond
, df
, wt
);
27780 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
27784 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
27786 ctx
->hflags
|= MIPS_HFLAG_BC
;
27787 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
27790 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
27792 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
27793 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
27794 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27795 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27797 TCGv_i32 twd
= tcg_const_i32(wd
);
27798 TCGv_i32 tws
= tcg_const_i32(ws
);
27799 TCGv_i32 ti8
= tcg_const_i32(i8
);
27801 switch (MASK_MSA_I8(ctx
->opcode
)) {
27803 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
27806 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
27809 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
27812 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
27815 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
27818 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
27821 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
27827 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
27828 if (df
== DF_DOUBLE
) {
27829 generate_exception_end(ctx
, EXCP_RI
);
27831 TCGv_i32 tdf
= tcg_const_i32(df
);
27832 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
27833 tcg_temp_free_i32(tdf
);
27838 MIPS_INVAL("MSA instruction");
27839 generate_exception_end(ctx
, EXCP_RI
);
27843 tcg_temp_free_i32(twd
);
27844 tcg_temp_free_i32(tws
);
27845 tcg_temp_free_i32(ti8
);
27848 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
27850 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27851 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27852 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
27853 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
27854 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27855 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27857 TCGv_i32 tdf
= tcg_const_i32(df
);
27858 TCGv_i32 twd
= tcg_const_i32(wd
);
27859 TCGv_i32 tws
= tcg_const_i32(ws
);
27860 TCGv_i32 timm
= tcg_temp_new_i32();
27861 tcg_gen_movi_i32(timm
, u5
);
27863 switch (MASK_MSA_I5(ctx
->opcode
)) {
27865 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27868 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27870 case OPC_MAXI_S_df
:
27871 tcg_gen_movi_i32(timm
, s5
);
27872 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27874 case OPC_MAXI_U_df
:
27875 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27877 case OPC_MINI_S_df
:
27878 tcg_gen_movi_i32(timm
, s5
);
27879 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27881 case OPC_MINI_U_df
:
27882 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27885 tcg_gen_movi_i32(timm
, s5
);
27886 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27888 case OPC_CLTI_S_df
:
27889 tcg_gen_movi_i32(timm
, s5
);
27890 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27892 case OPC_CLTI_U_df
:
27893 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27895 case OPC_CLEI_S_df
:
27896 tcg_gen_movi_i32(timm
, s5
);
27897 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27899 case OPC_CLEI_U_df
:
27900 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27904 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
27905 tcg_gen_movi_i32(timm
, s10
);
27906 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
27910 MIPS_INVAL("MSA instruction");
27911 generate_exception_end(ctx
, EXCP_RI
);
27915 tcg_temp_free_i32(tdf
);
27916 tcg_temp_free_i32(twd
);
27917 tcg_temp_free_i32(tws
);
27918 tcg_temp_free_i32(timm
);
27921 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
27923 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27924 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
27925 uint32_t df
= 0, m
= 0;
27926 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27927 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27934 if ((dfm
& 0x40) == 0x00) {
27937 } else if ((dfm
& 0x60) == 0x40) {
27940 } else if ((dfm
& 0x70) == 0x60) {
27943 } else if ((dfm
& 0x78) == 0x70) {
27947 generate_exception_end(ctx
, EXCP_RI
);
27951 tdf
= tcg_const_i32(df
);
27952 tm
= tcg_const_i32(m
);
27953 twd
= tcg_const_i32(wd
);
27954 tws
= tcg_const_i32(ws
);
27956 switch (MASK_MSA_BIT(ctx
->opcode
)) {
27958 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27961 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
27964 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27967 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27970 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
27973 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
27975 case OPC_BINSLI_df
:
27976 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27978 case OPC_BINSRI_df
:
27979 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27982 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
27985 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
27988 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
27991 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27994 MIPS_INVAL("MSA instruction");
27995 generate_exception_end(ctx
, EXCP_RI
);
27999 tcg_temp_free_i32(tdf
);
28000 tcg_temp_free_i32(tm
);
28001 tcg_temp_free_i32(twd
);
28002 tcg_temp_free_i32(tws
);
28005 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28007 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28008 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28009 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28010 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28011 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28013 TCGv_i32 tdf
= tcg_const_i32(df
);
28014 TCGv_i32 twd
= tcg_const_i32(wd
);
28015 TCGv_i32 tws
= tcg_const_i32(ws
);
28016 TCGv_i32 twt
= tcg_const_i32(wt
);
28018 switch (MASK_MSA_3R(ctx
->opcode
)) {
28020 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28023 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28026 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28029 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28031 case OPC_SUBS_S_df
:
28032 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28035 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28038 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28041 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28044 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28047 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28049 case OPC_ADDS_A_df
:
28050 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28052 case OPC_SUBS_U_df
:
28053 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28056 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28059 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28062 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28065 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28068 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28071 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28073 case OPC_ADDS_S_df
:
28074 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28076 case OPC_SUBSUS_U_df
:
28077 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28080 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28083 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28086 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28089 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28092 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28095 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28097 case OPC_ADDS_U_df
:
28098 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28100 case OPC_SUBSUU_S_df
:
28101 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28104 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28107 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28110 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28113 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28116 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28118 case OPC_ASUB_S_df
:
28119 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28122 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28125 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28128 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28131 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28134 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28137 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28139 case OPC_ASUB_U_df
:
28140 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28143 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28146 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28149 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28152 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28154 case OPC_AVER_S_df
:
28155 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28158 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28161 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28164 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28167 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28169 case OPC_AVER_U_df
:
28170 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28173 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28176 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28179 case OPC_DOTP_S_df
:
28180 case OPC_DOTP_U_df
:
28181 case OPC_DPADD_S_df
:
28182 case OPC_DPADD_U_df
:
28183 case OPC_DPSUB_S_df
:
28184 case OPC_HADD_S_df
:
28185 case OPC_DPSUB_U_df
:
28186 case OPC_HADD_U_df
:
28187 case OPC_HSUB_S_df
:
28188 case OPC_HSUB_U_df
:
28189 if (df
== DF_BYTE
) {
28190 generate_exception_end(ctx
, EXCP_RI
);
28193 switch (MASK_MSA_3R(ctx
->opcode
)) {
28194 case OPC_DOTP_S_df
:
28195 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28197 case OPC_DOTP_U_df
:
28198 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28200 case OPC_DPADD_S_df
:
28201 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28203 case OPC_DPADD_U_df
:
28204 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28206 case OPC_DPSUB_S_df
:
28207 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28209 case OPC_HADD_S_df
:
28210 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28212 case OPC_DPSUB_U_df
:
28213 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28215 case OPC_HADD_U_df
:
28216 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28218 case OPC_HSUB_S_df
:
28219 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28221 case OPC_HSUB_U_df
:
28222 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28227 MIPS_INVAL("MSA instruction");
28228 generate_exception_end(ctx
, EXCP_RI
);
28231 tcg_temp_free_i32(twd
);
28232 tcg_temp_free_i32(tws
);
28233 tcg_temp_free_i32(twt
);
28234 tcg_temp_free_i32(tdf
);
28237 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28239 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28240 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28241 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28242 TCGv telm
= tcg_temp_new();
28243 TCGv_i32 tsr
= tcg_const_i32(source
);
28244 TCGv_i32 tdt
= tcg_const_i32(dest
);
28246 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28248 gen_load_gpr(telm
, source
);
28249 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28252 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28253 gen_store_gpr(telm
, dest
);
28256 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28259 MIPS_INVAL("MSA instruction");
28260 generate_exception_end(ctx
, EXCP_RI
);
28264 tcg_temp_free(telm
);
28265 tcg_temp_free_i32(tdt
);
28266 tcg_temp_free_i32(tsr
);
28269 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28272 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28273 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28274 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28276 TCGv_i32 tws
= tcg_const_i32(ws
);
28277 TCGv_i32 twd
= tcg_const_i32(wd
);
28278 TCGv_i32 tn
= tcg_const_i32(n
);
28279 TCGv_i32 tdf
= tcg_const_i32(df
);
28281 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28283 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28285 case OPC_SPLATI_df
:
28286 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28289 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28291 case OPC_COPY_S_df
:
28292 case OPC_COPY_U_df
:
28293 case OPC_INSERT_df
:
28294 #if !defined(TARGET_MIPS64)
28295 /* Double format valid only for MIPS64 */
28296 if (df
== DF_DOUBLE
) {
28297 generate_exception_end(ctx
, EXCP_RI
);
28301 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28302 case OPC_COPY_S_df
:
28303 if (likely(wd
!= 0)) {
28304 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
28307 case OPC_COPY_U_df
:
28308 if (likely(wd
!= 0)) {
28309 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
28312 case OPC_INSERT_df
:
28313 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
28318 MIPS_INVAL("MSA instruction");
28319 generate_exception_end(ctx
, EXCP_RI
);
28321 tcg_temp_free_i32(twd
);
28322 tcg_temp_free_i32(tws
);
28323 tcg_temp_free_i32(tn
);
28324 tcg_temp_free_i32(tdf
);
28327 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28329 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28330 uint32_t df
= 0, n
= 0;
28332 if ((dfn
& 0x30) == 0x00) {
28335 } else if ((dfn
& 0x38) == 0x20) {
28338 } else if ((dfn
& 0x3c) == 0x30) {
28341 } else if ((dfn
& 0x3e) == 0x38) {
28344 } else if (dfn
== 0x3E) {
28345 /* CTCMSA, CFCMSA, MOVE.V */
28346 gen_msa_elm_3e(env
, ctx
);
28349 generate_exception_end(ctx
, EXCP_RI
);
28353 gen_msa_elm_df(env
, ctx
, df
, n
);
28356 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28358 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28359 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28360 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28361 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28362 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28364 TCGv_i32 twd
= tcg_const_i32(wd
);
28365 TCGv_i32 tws
= tcg_const_i32(ws
);
28366 TCGv_i32 twt
= tcg_const_i32(wt
);
28367 TCGv_i32 tdf
= tcg_temp_new_i32();
28369 /* adjust df value for floating-point instruction */
28370 tcg_gen_movi_i32(tdf
, df
+ 2);
28372 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28374 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28377 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28380 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28383 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28386 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28389 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28392 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28395 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28398 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28401 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28404 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28407 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28410 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28413 tcg_gen_movi_i32(tdf
, df
+ 1);
28414 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28417 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28420 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28422 case OPC_MADD_Q_df
:
28423 tcg_gen_movi_i32(tdf
, df
+ 1);
28424 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28427 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28429 case OPC_MSUB_Q_df
:
28430 tcg_gen_movi_i32(tdf
, df
+ 1);
28431 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28434 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28437 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28440 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28443 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28446 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28449 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28452 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28455 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28458 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28461 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28464 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28467 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28470 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28472 case OPC_MULR_Q_df
:
28473 tcg_gen_movi_i32(tdf
, df
+ 1);
28474 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28477 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28479 case OPC_FMIN_A_df
:
28480 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28482 case OPC_MADDR_Q_df
:
28483 tcg_gen_movi_i32(tdf
, df
+ 1);
28484 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28487 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28490 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28492 case OPC_MSUBR_Q_df
:
28493 tcg_gen_movi_i32(tdf
, df
+ 1);
28494 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28497 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28499 case OPC_FMAX_A_df
:
28500 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28503 MIPS_INVAL("MSA instruction");
28504 generate_exception_end(ctx
, EXCP_RI
);
28508 tcg_temp_free_i32(twd
);
28509 tcg_temp_free_i32(tws
);
28510 tcg_temp_free_i32(twt
);
28511 tcg_temp_free_i32(tdf
);
28514 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28516 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28517 (op & (0x7 << 18)))
28518 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28519 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28520 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28521 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28522 TCGv_i32 twd
= tcg_const_i32(wd
);
28523 TCGv_i32 tws
= tcg_const_i32(ws
);
28524 TCGv_i32 twt
= tcg_const_i32(wt
);
28525 TCGv_i32 tdf
= tcg_const_i32(df
);
28527 switch (MASK_MSA_2R(ctx
->opcode
)) {
28529 #if !defined(TARGET_MIPS64)
28530 /* Double format valid only for MIPS64 */
28531 if (df
== DF_DOUBLE
) {
28532 generate_exception_end(ctx
, EXCP_RI
);
28536 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28539 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28542 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28545 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28548 MIPS_INVAL("MSA instruction");
28549 generate_exception_end(ctx
, EXCP_RI
);
28553 tcg_temp_free_i32(twd
);
28554 tcg_temp_free_i32(tws
);
28555 tcg_temp_free_i32(twt
);
28556 tcg_temp_free_i32(tdf
);
28559 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28561 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28562 (op & (0xf << 17)))
28563 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28564 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28565 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28566 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28567 TCGv_i32 twd
= tcg_const_i32(wd
);
28568 TCGv_i32 tws
= tcg_const_i32(ws
);
28569 TCGv_i32 twt
= tcg_const_i32(wt
);
28570 /* adjust df value for floating-point instruction */
28571 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28573 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28574 case OPC_FCLASS_df
:
28575 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28577 case OPC_FTRUNC_S_df
:
28578 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28580 case OPC_FTRUNC_U_df
:
28581 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28584 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28586 case OPC_FRSQRT_df
:
28587 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28590 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28593 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28596 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28598 case OPC_FEXUPL_df
:
28599 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28601 case OPC_FEXUPR_df
:
28602 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28605 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28608 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28610 case OPC_FTINT_S_df
:
28611 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28613 case OPC_FTINT_U_df
:
28614 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28616 case OPC_FFINT_S_df
:
28617 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28619 case OPC_FFINT_U_df
:
28620 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
28624 tcg_temp_free_i32(twd
);
28625 tcg_temp_free_i32(tws
);
28626 tcg_temp_free_i32(twt
);
28627 tcg_temp_free_i32(tdf
);
28630 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
28632 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
28633 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28634 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28635 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28636 TCGv_i32 twd
= tcg_const_i32(wd
);
28637 TCGv_i32 tws
= tcg_const_i32(ws
);
28638 TCGv_i32 twt
= tcg_const_i32(wt
);
28640 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28642 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
28645 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
28648 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
28651 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
28654 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
28657 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
28660 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
28663 MIPS_INVAL("MSA instruction");
28664 generate_exception_end(ctx
, EXCP_RI
);
28668 tcg_temp_free_i32(twd
);
28669 tcg_temp_free_i32(tws
);
28670 tcg_temp_free_i32(twt
);
28673 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
28675 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28683 gen_msa_vec_v(env
, ctx
);
28686 gen_msa_2r(env
, ctx
);
28689 gen_msa_2rf(env
, ctx
);
28692 MIPS_INVAL("MSA instruction");
28693 generate_exception_end(ctx
, EXCP_RI
);
28698 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
28700 uint32_t opcode
= ctx
->opcode
;
28701 check_insn(ctx
, ASE_MSA
);
28702 check_msa_access(ctx
);
28704 switch (MASK_MSA_MINOR(opcode
)) {
28705 case OPC_MSA_I8_00
:
28706 case OPC_MSA_I8_01
:
28707 case OPC_MSA_I8_02
:
28708 gen_msa_i8(env
, ctx
);
28710 case OPC_MSA_I5_06
:
28711 case OPC_MSA_I5_07
:
28712 gen_msa_i5(env
, ctx
);
28714 case OPC_MSA_BIT_09
:
28715 case OPC_MSA_BIT_0A
:
28716 gen_msa_bit(env
, ctx
);
28718 case OPC_MSA_3R_0D
:
28719 case OPC_MSA_3R_0E
:
28720 case OPC_MSA_3R_0F
:
28721 case OPC_MSA_3R_10
:
28722 case OPC_MSA_3R_11
:
28723 case OPC_MSA_3R_12
:
28724 case OPC_MSA_3R_13
:
28725 case OPC_MSA_3R_14
:
28726 case OPC_MSA_3R_15
:
28727 gen_msa_3r(env
, ctx
);
28730 gen_msa_elm(env
, ctx
);
28732 case OPC_MSA_3RF_1A
:
28733 case OPC_MSA_3RF_1B
:
28734 case OPC_MSA_3RF_1C
:
28735 gen_msa_3rf(env
, ctx
);
28738 gen_msa_vec(env
, ctx
);
28749 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
28750 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
28751 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28752 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
28754 TCGv_i32 twd
= tcg_const_i32(wd
);
28755 TCGv taddr
= tcg_temp_new();
28756 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
28758 switch (MASK_MSA_MINOR(opcode
)) {
28760 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
28763 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
28766 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
28769 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
28772 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
28775 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
28778 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
28781 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
28785 tcg_temp_free_i32(twd
);
28786 tcg_temp_free(taddr
);
28790 MIPS_INVAL("MSA instruction");
28791 generate_exception_end(ctx
, EXCP_RI
);
28797 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
28800 int rs
, rt
, rd
, sa
;
28804 /* make sure instructions are on a word boundary */
28805 if (ctx
->base
.pc_next
& 0x3) {
28806 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
28807 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
28811 /* Handle blikely not taken case */
28812 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
28813 TCGLabel
*l1
= gen_new_label();
28815 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
28816 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
28817 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
28821 op
= MASK_OP_MAJOR(ctx
->opcode
);
28822 rs
= (ctx
->opcode
>> 21) & 0x1f;
28823 rt
= (ctx
->opcode
>> 16) & 0x1f;
28824 rd
= (ctx
->opcode
>> 11) & 0x1f;
28825 sa
= (ctx
->opcode
>> 6) & 0x1f;
28826 imm
= (int16_t)ctx
->opcode
;
28829 decode_opc_special(env
, ctx
);
28832 #if defined(TARGET_MIPS64)
28833 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
28834 decode_mmi(env
, ctx
);
28836 if (ctx
->insn_flags
& ASE_MXU
) {
28837 decode_opc_mxu(env
, ctx
);
28840 decode_opc_special2_legacy(env
, ctx
);
28844 #if defined(TARGET_MIPS64)
28845 if (ctx
->insn_flags
& INSN_R5900
) {
28846 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
28848 decode_opc_special3(env
, ctx
);
28851 decode_opc_special3(env
, ctx
);
28855 op1
= MASK_REGIMM(ctx
->opcode
);
28857 case OPC_BLTZL
: /* REGIMM branches */
28861 check_insn(ctx
, ISA_MIPS2
);
28862 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28866 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28870 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28872 /* OPC_NAL, OPC_BAL */
28873 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
28875 generate_exception_end(ctx
, EXCP_RI
);
28878 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28881 case OPC_TGEI
: /* REGIMM traps */
28888 check_insn(ctx
, ISA_MIPS2
);
28889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28890 gen_trap(ctx
, op1
, rs
, -1, imm
);
28893 check_insn(ctx
, ISA_MIPS32R6
);
28894 generate_exception_end(ctx
, EXCP_RI
);
28897 check_insn(ctx
, ISA_MIPS32R2
);
28898 /* Break the TB to be able to sync copied instructions
28900 ctx
->base
.is_jmp
= DISAS_STOP
;
28902 case OPC_BPOSGE32
: /* MIPS DSP branch */
28903 #if defined(TARGET_MIPS64)
28907 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
28909 #if defined(TARGET_MIPS64)
28911 check_insn(ctx
, ISA_MIPS32R6
);
28912 check_mips_64(ctx
);
28914 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
28918 check_insn(ctx
, ISA_MIPS32R6
);
28919 check_mips_64(ctx
);
28921 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
28925 default: /* Invalid */
28926 MIPS_INVAL("regimm");
28927 generate_exception_end(ctx
, EXCP_RI
);
28932 check_cp0_enabled(ctx
);
28933 op1
= MASK_CP0(ctx
->opcode
);
28941 #if defined(TARGET_MIPS64)
28945 #ifndef CONFIG_USER_ONLY
28946 gen_cp0(env
, ctx
, op1
, rt
, rd
);
28947 #endif /* !CONFIG_USER_ONLY */
28965 #ifndef CONFIG_USER_ONLY
28966 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
28967 #endif /* !CONFIG_USER_ONLY */
28970 #ifndef CONFIG_USER_ONLY
28973 TCGv t0
= tcg_temp_new();
28975 op2
= MASK_MFMC0(ctx
->opcode
);
28979 gen_helper_dmt(t0
);
28980 gen_store_gpr(t0
, rt
);
28984 gen_helper_emt(t0
);
28985 gen_store_gpr(t0
, rt
);
28989 gen_helper_dvpe(t0
, cpu_env
);
28990 gen_store_gpr(t0
, rt
);
28994 gen_helper_evpe(t0
, cpu_env
);
28995 gen_store_gpr(t0
, rt
);
28998 check_insn(ctx
, ISA_MIPS32R6
);
29000 gen_helper_dvp(t0
, cpu_env
);
29001 gen_store_gpr(t0
, rt
);
29005 check_insn(ctx
, ISA_MIPS32R6
);
29007 gen_helper_evp(t0
, cpu_env
);
29008 gen_store_gpr(t0
, rt
);
29012 check_insn(ctx
, ISA_MIPS32R2
);
29013 save_cpu_state(ctx
, 1);
29014 gen_helper_di(t0
, cpu_env
);
29015 gen_store_gpr(t0
, rt
);
29016 /* Stop translation as we may have switched
29017 the execution mode. */
29018 ctx
->base
.is_jmp
= DISAS_STOP
;
29021 check_insn(ctx
, ISA_MIPS32R2
);
29022 save_cpu_state(ctx
, 1);
29023 gen_helper_ei(t0
, cpu_env
);
29024 gen_store_gpr(t0
, rt
);
29025 /* DISAS_STOP isn't sufficient, we need to ensure we break
29026 out of translated code to check for pending interrupts */
29027 gen_save_pc(ctx
->base
.pc_next
+ 4);
29028 ctx
->base
.is_jmp
= DISAS_EXIT
;
29030 default: /* Invalid */
29031 MIPS_INVAL("mfmc0");
29032 generate_exception_end(ctx
, EXCP_RI
);
29037 #endif /* !CONFIG_USER_ONLY */
29040 check_insn(ctx
, ISA_MIPS32R2
);
29041 gen_load_srsgpr(rt
, rd
);
29044 check_insn(ctx
, ISA_MIPS32R2
);
29045 gen_store_srsgpr(rt
, rd
);
29049 generate_exception_end(ctx
, EXCP_RI
);
29053 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29054 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29055 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29056 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29059 /* Arithmetic with immediate opcode */
29060 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29064 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29066 case OPC_SLTI
: /* Set on less than with immediate opcode */
29068 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29070 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29071 case OPC_LUI
: /* OPC_AUI */
29074 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29076 case OPC_J
: /* Jump */
29078 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29079 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29082 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29083 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29085 generate_exception_end(ctx
, EXCP_RI
);
29088 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29089 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29092 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29095 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29096 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29098 generate_exception_end(ctx
, EXCP_RI
);
29101 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29102 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29105 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29108 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29111 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29113 check_insn(ctx
, ISA_MIPS32R6
);
29114 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29115 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29118 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29121 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29123 check_insn(ctx
, ISA_MIPS32R6
);
29124 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29125 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29130 check_insn(ctx
, ISA_MIPS2
);
29131 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29135 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29137 case OPC_LL
: /* Load and stores */
29138 check_insn(ctx
, ISA_MIPS2
);
29139 if (ctx
->insn_flags
& INSN_R5900
) {
29140 check_insn_opc_user_only(ctx
, INSN_R5900
);
29145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29153 gen_ld(ctx
, op
, rt
, rs
, imm
);
29157 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29162 gen_st(ctx
, op
, rt
, rs
, imm
);
29165 check_insn(ctx
, ISA_MIPS2
);
29166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29167 if (ctx
->insn_flags
& INSN_R5900
) {
29168 check_insn_opc_user_only(ctx
, INSN_R5900
);
29170 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
29173 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29174 check_cp0_enabled(ctx
);
29175 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29176 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29177 gen_cache_operation(ctx
, rt
, rs
, imm
);
29179 /* Treat as NOP. */
29182 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29183 if (ctx
->insn_flags
& INSN_R5900
) {
29184 /* Treat as NOP. */
29186 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29187 /* Treat as NOP. */
29191 /* Floating point (COP1). */
29196 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29200 op1
= MASK_CP1(ctx
->opcode
);
29205 check_cp1_enabled(ctx
);
29206 check_insn(ctx
, ISA_MIPS32R2
);
29212 check_cp1_enabled(ctx
);
29213 gen_cp1(ctx
, op1
, rt
, rd
);
29215 #if defined(TARGET_MIPS64)
29218 check_cp1_enabled(ctx
);
29219 check_insn(ctx
, ISA_MIPS3
);
29220 check_mips_64(ctx
);
29221 gen_cp1(ctx
, op1
, rt
, rd
);
29224 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29225 check_cp1_enabled(ctx
);
29226 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29228 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29233 check_insn(ctx
, ASE_MIPS3D
);
29234 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29235 (rt
>> 2) & 0x7, imm
<< 2);
29239 check_cp1_enabled(ctx
);
29240 check_insn(ctx
, ISA_MIPS32R6
);
29241 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29245 check_cp1_enabled(ctx
);
29246 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29248 check_insn(ctx
, ASE_MIPS3D
);
29251 check_cp1_enabled(ctx
);
29252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29253 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29254 (rt
>> 2) & 0x7, imm
<< 2);
29261 check_cp1_enabled(ctx
);
29262 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29268 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29269 check_cp1_enabled(ctx
);
29270 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29272 case R6_OPC_CMP_AF_S
:
29273 case R6_OPC_CMP_UN_S
:
29274 case R6_OPC_CMP_EQ_S
:
29275 case R6_OPC_CMP_UEQ_S
:
29276 case R6_OPC_CMP_LT_S
:
29277 case R6_OPC_CMP_ULT_S
:
29278 case R6_OPC_CMP_LE_S
:
29279 case R6_OPC_CMP_ULE_S
:
29280 case R6_OPC_CMP_SAF_S
:
29281 case R6_OPC_CMP_SUN_S
:
29282 case R6_OPC_CMP_SEQ_S
:
29283 case R6_OPC_CMP_SEUQ_S
:
29284 case R6_OPC_CMP_SLT_S
:
29285 case R6_OPC_CMP_SULT_S
:
29286 case R6_OPC_CMP_SLE_S
:
29287 case R6_OPC_CMP_SULE_S
:
29288 case R6_OPC_CMP_OR_S
:
29289 case R6_OPC_CMP_UNE_S
:
29290 case R6_OPC_CMP_NE_S
:
29291 case R6_OPC_CMP_SOR_S
:
29292 case R6_OPC_CMP_SUNE_S
:
29293 case R6_OPC_CMP_SNE_S
:
29294 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29296 case R6_OPC_CMP_AF_D
:
29297 case R6_OPC_CMP_UN_D
:
29298 case R6_OPC_CMP_EQ_D
:
29299 case R6_OPC_CMP_UEQ_D
:
29300 case R6_OPC_CMP_LT_D
:
29301 case R6_OPC_CMP_ULT_D
:
29302 case R6_OPC_CMP_LE_D
:
29303 case R6_OPC_CMP_ULE_D
:
29304 case R6_OPC_CMP_SAF_D
:
29305 case R6_OPC_CMP_SUN_D
:
29306 case R6_OPC_CMP_SEQ_D
:
29307 case R6_OPC_CMP_SEUQ_D
:
29308 case R6_OPC_CMP_SLT_D
:
29309 case R6_OPC_CMP_SULT_D
:
29310 case R6_OPC_CMP_SLE_D
:
29311 case R6_OPC_CMP_SULE_D
:
29312 case R6_OPC_CMP_OR_D
:
29313 case R6_OPC_CMP_UNE_D
:
29314 case R6_OPC_CMP_NE_D
:
29315 case R6_OPC_CMP_SOR_D
:
29316 case R6_OPC_CMP_SUNE_D
:
29317 case R6_OPC_CMP_SNE_D
:
29318 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29321 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29322 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29327 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29342 check_insn(ctx
, ASE_MSA
);
29343 gen_msa_branch(env
, ctx
, op1
);
29347 generate_exception_end(ctx
, EXCP_RI
);
29352 /* Compact branches [R6] and COP2 [non-R6] */
29353 case OPC_BC
: /* OPC_LWC2 */
29354 case OPC_BALC
: /* OPC_SWC2 */
29355 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29356 /* OPC_BC, OPC_BALC */
29357 gen_compute_compact_branch(ctx
, op
, 0, 0,
29358 sextract32(ctx
->opcode
<< 2, 0, 28));
29360 /* OPC_LWC2, OPC_SWC2 */
29361 /* COP2: Not implemented. */
29362 generate_exception_err(ctx
, EXCP_CpU
, 2);
29365 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29366 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29367 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29369 /* OPC_BEQZC, OPC_BNEZC */
29370 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29371 sextract32(ctx
->opcode
<< 2, 0, 23));
29373 /* OPC_JIC, OPC_JIALC */
29374 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29377 /* OPC_LWC2, OPC_SWC2 */
29378 /* COP2: Not implemented. */
29379 generate_exception_err(ctx
, EXCP_CpU
, 2);
29383 check_insn(ctx
, INSN_LOONGSON2F
);
29384 /* Note that these instructions use different fields. */
29385 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29390 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29391 check_cp1_enabled(ctx
);
29392 op1
= MASK_CP3(ctx
->opcode
);
29396 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29402 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29403 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29406 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29407 /* Treat as NOP. */
29410 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29424 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29425 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29429 generate_exception_end(ctx
, EXCP_RI
);
29433 generate_exception_err(ctx
, EXCP_CpU
, 1);
29437 #if defined(TARGET_MIPS64)
29438 /* MIPS64 opcodes */
29440 if (ctx
->insn_flags
& INSN_R5900
) {
29441 check_insn_opc_user_only(ctx
, INSN_R5900
);
29446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29450 check_insn(ctx
, ISA_MIPS3
);
29451 check_mips_64(ctx
);
29452 gen_ld(ctx
, op
, rt
, rs
, imm
);
29456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29459 check_insn(ctx
, ISA_MIPS3
);
29460 check_mips_64(ctx
);
29461 gen_st(ctx
, op
, rt
, rs
, imm
);
29464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29465 check_insn(ctx
, ISA_MIPS3
);
29466 if (ctx
->insn_flags
& INSN_R5900
) {
29467 check_insn_opc_user_only(ctx
, INSN_R5900
);
29469 check_mips_64(ctx
);
29470 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
29472 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29473 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29474 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29475 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29478 check_insn(ctx
, ISA_MIPS3
);
29479 check_mips_64(ctx
);
29480 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29484 check_insn(ctx
, ISA_MIPS3
);
29485 check_mips_64(ctx
);
29486 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29489 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29490 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29491 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29493 MIPS_INVAL("major opcode");
29494 generate_exception_end(ctx
, EXCP_RI
);
29498 case OPC_DAUI
: /* OPC_JALX */
29499 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29500 #if defined(TARGET_MIPS64)
29502 check_mips_64(ctx
);
29504 generate_exception(ctx
, EXCP_RI
);
29505 } else if (rt
!= 0) {
29506 TCGv t0
= tcg_temp_new();
29507 gen_load_gpr(t0
, rs
);
29508 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29512 generate_exception_end(ctx
, EXCP_RI
);
29513 MIPS_INVAL("major opcode");
29517 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29518 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29519 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29522 case OPC_MSA
: /* OPC_MDMX */
29523 if (ctx
->insn_flags
& INSN_R5900
) {
29524 #if defined(TARGET_MIPS64)
29525 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29528 /* MDMX: Not implemented. */
29533 check_insn(ctx
, ISA_MIPS32R6
);
29534 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29536 default: /* Invalid */
29537 MIPS_INVAL("major opcode");
29538 generate_exception_end(ctx
, EXCP_RI
);
29543 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29545 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29546 CPUMIPSState
*env
= cs
->env_ptr
;
29548 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29549 ctx
->saved_pc
= -1;
29550 ctx
->insn_flags
= env
->insn_flags
;
29551 ctx
->CP0_Config1
= env
->CP0_Config1
;
29552 ctx
->CP0_Config2
= env
->CP0_Config2
;
29553 ctx
->CP0_Config3
= env
->CP0_Config3
;
29554 ctx
->CP0_Config5
= env
->CP0_Config5
;
29556 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29557 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29558 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29559 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29560 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29561 ctx
->PAMask
= env
->PAMask
;
29562 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29563 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29564 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29565 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29566 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29567 /* Restore delay slot state from the tb context. */
29568 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29569 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29570 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29571 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29572 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29573 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29574 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29575 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29576 restore_cpu_state(env
, ctx
);
29577 #ifdef CONFIG_USER_ONLY
29578 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29580 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29582 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29583 MO_UNALN
: MO_ALIGN
;
29585 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29589 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29593 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29595 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29597 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29601 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29602 const CPUBreakpoint
*bp
)
29604 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29606 save_cpu_state(ctx
, 1);
29607 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29608 gen_helper_raise_exception_debug(cpu_env
);
29609 /* The address covered by the breakpoint must be included in
29610 [tb->pc, tb->pc + tb->size) in order to for it to be
29611 properly cleared -- thus we increment the PC here so that
29612 the logic setting tb->size below does the right thing. */
29613 ctx
->base
.pc_next
+= 4;
29617 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
29619 CPUMIPSState
*env
= cs
->env_ptr
;
29620 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29624 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
29625 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
29626 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29627 insn_bytes
= decode_nanomips_opc(env
, ctx
);
29628 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
29629 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
29631 decode_opc(env
, ctx
);
29632 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
29633 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29634 insn_bytes
= decode_micromips_opc(env
, ctx
);
29635 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
29636 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29637 insn_bytes
= decode_mips16_opc(env
, ctx
);
29639 generate_exception_end(ctx
, EXCP_RI
);
29640 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
29644 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
29645 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
29646 MIPS_HFLAG_FBNSLOT
))) {
29647 /* force to generate branch as there is neither delay nor
29651 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
29652 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
29653 /* Force to generate branch as microMIPS R6 doesn't restrict
29654 branches in the forbidden slot. */
29659 gen_branch(ctx
, insn_bytes
);
29661 ctx
->base
.pc_next
+= insn_bytes
;
29663 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
29666 /* Execute a branch and its delay slot as a single instruction.
29667 This is what GDB expects and is consistent with what the
29668 hardware does (e.g. if a delay slot instruction faults, the
29669 reported PC is the PC of the branch). */
29670 if (ctx
->base
.singlestep_enabled
&&
29671 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
29672 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29674 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
29675 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29679 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
29681 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29683 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
29684 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
29685 gen_helper_raise_exception_debug(cpu_env
);
29687 switch (ctx
->base
.is_jmp
) {
29689 gen_save_pc(ctx
->base
.pc_next
);
29690 tcg_gen_lookup_and_goto_ptr();
29693 case DISAS_TOO_MANY
:
29694 save_cpu_state(ctx
, 0);
29695 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
29698 tcg_gen_exit_tb(NULL
, 0);
29700 case DISAS_NORETURN
:
29703 g_assert_not_reached();
29708 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
29710 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
29711 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
29714 static const TranslatorOps mips_tr_ops
= {
29715 .init_disas_context
= mips_tr_init_disas_context
,
29716 .tb_start
= mips_tr_tb_start
,
29717 .insn_start
= mips_tr_insn_start
,
29718 .breakpoint_check
= mips_tr_breakpoint_check
,
29719 .translate_insn
= mips_tr_translate_insn
,
29720 .tb_stop
= mips_tr_tb_stop
,
29721 .disas_log
= mips_tr_disas_log
,
29724 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
29728 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
29731 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, int flags
)
29734 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
29736 #define printfpr(fp) \
29739 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
29740 " fd:%13g fs:%13g psu: %13g\n", \
29741 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
29742 (double)(fp)->fd, \
29743 (double)(fp)->fs[FP_ENDIAN_IDX], \
29744 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
29747 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
29748 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
29749 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
29750 " fd:%13g fs:%13g psu:%13g\n", \
29751 tmp.w[FP_ENDIAN_IDX], tmp.d, \
29753 (double)tmp.fs[FP_ENDIAN_IDX], \
29754 (double)tmp.fs[!FP_ENDIAN_IDX]); \
29760 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
29761 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
29762 get_float_exception_flags(&env
->active_fpu
.fp_status
));
29763 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
29764 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
29765 printfpr(&env
->active_fpu
.fpr
[i
]);
29771 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
29773 MIPSCPU
*cpu
= MIPS_CPU(cs
);
29774 CPUMIPSState
*env
= &cpu
->env
;
29777 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
29778 " LO=0x" TARGET_FMT_lx
" ds %04x "
29779 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
29780 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
29781 env
->hflags
, env
->btarget
, env
->bcond
);
29782 for (i
= 0; i
< 32; i
++) {
29784 qemu_fprintf(f
, "GPR%02d:", i
);
29785 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
29786 regnames
[i
], env
->active_tc
.gpr
[i
]);
29788 qemu_fprintf(f
, "\n");
29791 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
29792 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
29793 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
29795 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
29796 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
29797 env
->CP0_Config2
, env
->CP0_Config3
);
29798 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
29799 env
->CP0_Config4
, env
->CP0_Config5
);
29800 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
29801 fpu_dump_state(env
, f
, flags
);
29805 void mips_tcg_init(void)
29810 for (i
= 1; i
< 32; i
++)
29811 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29812 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
29815 for (i
= 0; i
< 32; i
++) {
29816 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
29818 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
29819 /* The scalar floating-point unit (FPU) registers are mapped on
29820 * the MSA vector registers. */
29821 fpu_f64
[i
] = msa_wr_d
[i
* 2];
29822 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
29823 msa_wr_d
[i
* 2 + 1] =
29824 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
29827 cpu_PC
= tcg_global_mem_new(cpu_env
,
29828 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
29829 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
29830 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
29831 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
29833 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
29834 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
29837 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
29838 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
29840 bcond
= tcg_global_mem_new(cpu_env
,
29841 offsetof(CPUMIPSState
, bcond
), "bcond");
29842 btarget
= tcg_global_mem_new(cpu_env
,
29843 offsetof(CPUMIPSState
, btarget
), "btarget");
29844 hflags
= tcg_global_mem_new_i32(cpu_env
,
29845 offsetof(CPUMIPSState
, hflags
), "hflags");
29847 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
29848 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
29850 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
29851 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
29853 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
29855 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
29858 #if defined(TARGET_MIPS64)
29860 for (i
= 1; i
< 32; i
++) {
29861 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
29862 offsetof(CPUMIPSState
,
29868 #if !defined(TARGET_MIPS64)
29869 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
29870 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29871 offsetof(CPUMIPSState
,
29872 active_tc
.mxu_gpr
[i
]),
29876 mxu_CR
= tcg_global_mem_new(cpu_env
,
29877 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
29878 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
29882 #include "translate_init.inc.c"
29884 void cpu_mips_realize_env(CPUMIPSState
*env
)
29886 env
->exception_base
= (int32_t)0xBFC00000;
29888 #ifndef CONFIG_USER_ONLY
29889 mmu_init(env
, env
->cpu_model
);
29891 fpu_init(env
, env
->cpu_model
);
29892 mvp_init(env
, env
->cpu_model
);
29895 bool cpu_supports_cps_smp(const char *cpu_type
)
29897 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29898 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
29901 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
29903 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29904 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
29907 void cpu_set_exception_base(int vp_index
, target_ulong address
)
29909 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
29910 vp
->env
.exception_base
= address
;
29913 void cpu_state_reset(CPUMIPSState
*env
)
29915 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
29916 CPUState
*cs
= CPU(cpu
);
29918 /* Reset registers to their default values */
29919 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
29920 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
29921 #ifdef TARGET_WORDS_BIGENDIAN
29922 env
->CP0_Config0
|= (1 << CP0C0_BE
);
29924 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
29925 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
29926 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
29927 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
29928 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
29929 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
29930 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
29931 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
29932 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
29933 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
29934 << env
->cpu_model
->CP0_LLAddr_shift
;
29935 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
29936 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
29937 env
->CCRes
= env
->cpu_model
->CCRes
;
29938 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
29939 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
29940 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
29941 env
->current_tc
= 0;
29942 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
29943 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
29944 #if defined(TARGET_MIPS64)
29945 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
29946 env
->SEGMask
|= 3ULL << 62;
29949 env
->PABITS
= env
->cpu_model
->PABITS
;
29950 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
29951 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
29952 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
29953 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
29954 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
29955 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
29956 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
29957 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
29958 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
29959 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
29960 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
29961 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
29962 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
29963 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
29964 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
29965 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
29966 env
->msair
= env
->cpu_model
->MSAIR
;
29967 env
->insn_flags
= env
->cpu_model
->insn_flags
;
29969 #if defined(CONFIG_USER_ONLY)
29970 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
29971 # ifdef TARGET_MIPS64
29972 /* Enable 64-bit register mode. */
29973 env
->CP0_Status
|= (1 << CP0St_PX
);
29975 # ifdef TARGET_ABI_MIPSN64
29976 /* Enable 64-bit address mode. */
29977 env
->CP0_Status
|= (1 << CP0St_UX
);
29979 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
29980 hardware registers. */
29981 env
->CP0_HWREna
|= 0x0000000F;
29982 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
29983 env
->CP0_Status
|= (1 << CP0St_CU1
);
29985 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
29986 env
->CP0_Status
|= (1 << CP0St_MX
);
29988 # if defined(TARGET_MIPS64)
29989 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
29990 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
29991 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
29992 env
->CP0_Status
|= (1 << CP0St_FR
);
29996 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
29997 /* If the exception was raised from a delay slot,
29998 come back to the jump. */
29999 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
30000 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30002 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30004 env
->active_tc
.PC
= env
->exception_base
;
30005 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30006 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30007 env
->CP0_Wired
= 0;
30008 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30009 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30010 if (mips_um_ksegs_enabled()) {
30011 env
->CP0_EBase
|= 0x40000000;
30013 env
->CP0_EBase
|= (int32_t)0x80000000;
30015 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30016 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30018 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30020 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30021 /* vectored interrupts not implemented, timer on int 7,
30022 no performance counters. */
30023 env
->CP0_IntCtl
= 0xe0000000;
30027 for (i
= 0; i
< 7; i
++) {
30028 env
->CP0_WatchLo
[i
] = 0;
30029 env
->CP0_WatchHi
[i
] = 0x80000000;
30031 env
->CP0_WatchLo
[7] = 0;
30032 env
->CP0_WatchHi
[7] = 0;
30034 /* Count register increments in debug mode, EJTAG version 1 */
30035 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30037 cpu_mips_store_count(env
, 1);
30039 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30042 /* Only TC0 on VPE 0 starts as active. */
30043 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30044 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30045 env
->tcs
[i
].CP0_TCHalt
= 1;
30047 env
->active_tc
.CP0_TCHalt
= 1;
30050 if (cs
->cpu_index
== 0) {
30051 /* VPE0 starts up enabled. */
30052 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30053 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30055 /* TC0 starts up unhalted. */
30057 env
->active_tc
.CP0_TCHalt
= 0;
30058 env
->tcs
[0].CP0_TCHalt
= 0;
30059 /* With thread 0 active. */
30060 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30061 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30066 * Configure default legacy segmentation control. We use this regardless of
30067 * whether segmentation control is presented to the guest.
30069 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30070 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30071 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30072 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30073 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30074 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30076 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30077 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30078 (3 << CP0SC_C
)) << 16;
30079 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30080 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30081 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30082 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30083 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30084 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30085 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30086 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30088 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30089 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30090 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30091 env
->CP0_Status
|= (1 << CP0St_FR
);
30094 if (env
->insn_flags
& ISA_MIPS32R6
) {
30096 env
->CP0_PWSize
= 0x40;
30102 env
->CP0_PWField
= 0x0C30C302;
30109 env
->CP0_PWField
= 0x02;
30112 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30113 /* microMIPS on reset when Config3.ISA is 3 */
30114 env
->hflags
|= MIPS_HFLAG_M16
;
30118 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30122 compute_hflags(env
);
30123 restore_fp_status(env
);
30124 restore_pamask(env
);
30125 cs
->exception_index
= EXCP_NONE
;
30127 if (semihosting_get_argc()) {
30128 /* UHI interface can be used to obtain argc and argv */
30129 env
->active_tc
.gpr
[4] = -1;
30133 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30134 target_ulong
*data
)
30136 env
->active_tc
.PC
= data
[0];
30137 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30138 env
->hflags
|= data
[1];
30139 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30140 case MIPS_HFLAG_BR
:
30142 case MIPS_HFLAG_BC
:
30143 case MIPS_HFLAG_BL
:
30145 env
->btarget
= data
[2];