2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
898 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
901 /* Coprocessor 0 (with rs == C0) */
902 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
905 OPC_TLBR
= 0x01 | OPC_C0
,
906 OPC_TLBWI
= 0x02 | OPC_C0
,
907 OPC_TLBINV
= 0x03 | OPC_C0
,
908 OPC_TLBINVF
= 0x04 | OPC_C0
,
909 OPC_TLBWR
= 0x06 | OPC_C0
,
910 OPC_TLBP
= 0x08 | OPC_C0
,
911 OPC_RFE
= 0x10 | OPC_C0
,
912 OPC_ERET
= 0x18 | OPC_C0
,
913 OPC_DERET
= 0x1F | OPC_C0
,
914 OPC_WAIT
= 0x20 | OPC_C0
,
917 /* Coprocessor 1 (rs field) */
918 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
920 /* Values for the fmt field in FP instructions */
922 /* 0 - 15 are reserved */
923 FMT_S
= 16, /* single fp */
924 FMT_D
= 17, /* double fp */
925 FMT_E
= 18, /* extended fp */
926 FMT_Q
= 19, /* quad fp */
927 FMT_W
= 20, /* 32-bit fixed */
928 FMT_L
= 21, /* 64-bit fixed */
929 FMT_PS
= 22, /* paired single fp */
930 /* 23 - 31 are reserved */
934 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
935 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
936 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
937 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
938 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
939 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
940 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
941 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
942 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
943 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
944 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
945 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
946 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
947 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
948 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
949 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
950 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
951 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
952 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
953 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
954 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
955 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
956 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
957 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
958 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
959 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
960 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
961 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
962 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
963 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
966 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
967 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
970 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
971 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
972 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
973 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
977 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
978 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
982 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
983 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
986 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1127 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1128 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1129 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1130 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1131 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1132 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1133 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1134 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1135 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1136 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1137 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1138 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1139 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1140 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1141 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1142 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1143 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1144 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1145 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1146 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1147 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1149 /* MI10 instruction */
1150 OPC_LD_B
= (0x20) | OPC_MSA
,
1151 OPC_LD_H
= (0x21) | OPC_MSA
,
1152 OPC_LD_W
= (0x22) | OPC_MSA
,
1153 OPC_LD_D
= (0x23) | OPC_MSA
,
1154 OPC_ST_B
= (0x24) | OPC_MSA
,
1155 OPC_ST_H
= (0x25) | OPC_MSA
,
1156 OPC_ST_W
= (0x26) | OPC_MSA
,
1157 OPC_ST_D
= (0x27) | OPC_MSA
,
1161 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1162 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1163 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1164 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1165 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1167 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1173 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1175 /* I8 instruction */
1176 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1179 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1180 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1182 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1183 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1184 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1185 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1187 /* VEC/2R/2RF instruction */
1188 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1189 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1190 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1191 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1192 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1193 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1194 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1196 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1199 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1200 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1201 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1202 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1203 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1205 /* 2RF instruction df(bit 16) = _w, _d */
1206 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1207 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1209 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1210 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1211 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1212 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1213 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1214 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1216 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1217 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1218 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1220 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1223 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1224 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1225 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1226 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1227 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1228 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1229 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1230 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1231 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1232 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1233 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1234 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1235 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1236 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1237 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1238 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1239 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1240 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1241 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1242 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1243 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1244 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1245 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1246 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1247 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1248 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1249 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1250 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1251 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1252 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1253 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1254 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1255 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1256 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1257 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1258 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1259 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1260 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1261 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1262 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1263 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1264 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1265 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1266 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1267 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1268 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1269 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1270 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1271 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1272 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1273 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1274 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1275 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1276 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1277 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1278 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1279 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1280 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1281 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1282 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1283 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1284 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1285 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1286 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1288 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1289 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 /* 3RF instruction _df(bit 21) = _w, _d */
1300 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1342 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1343 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1357 /* global register indices */
1358 static TCGv_env cpu_env
;
1359 static TCGv cpu_gpr
[32], cpu_PC
;
1360 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1361 static TCGv cpu_dspctrl
, btarget
, bcond
;
1362 static TCGv_i32 hflags
;
1363 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1364 static TCGv_i64 fpu_f64
[32];
1365 static TCGv_i64 msa_wr_d
[64];
1367 #include "exec/gen-icount.h"
1369 #define gen_helper_0e0i(name, arg) do { \
1370 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1371 gen_helper_##name(cpu_env, helper_tmp); \
1372 tcg_temp_free_i32(helper_tmp); \
1375 #define gen_helper_0e1i(name, arg1, arg2) do { \
1376 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1377 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1378 tcg_temp_free_i32(helper_tmp); \
1381 #define gen_helper_1e0i(name, ret, arg1) do { \
1382 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1383 gen_helper_##name(ret, cpu_env, helper_tmp); \
1384 tcg_temp_free_i32(helper_tmp); \
1387 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1388 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1389 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1390 tcg_temp_free_i32(helper_tmp); \
1393 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1394 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1395 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1396 tcg_temp_free_i32(helper_tmp); \
1399 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1400 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1401 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1402 tcg_temp_free_i32(helper_tmp); \
1405 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1406 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1407 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1408 tcg_temp_free_i32(helper_tmp); \
1411 typedef struct DisasContext
{
1412 struct TranslationBlock
*tb
;
1413 target_ulong pc
, saved_pc
;
1415 int singlestep_enabled
;
1417 int32_t CP0_Config1
;
1418 /* Routine used to access memory */
1420 TCGMemOp default_tcg_memop_mask
;
1421 uint32_t hflags
, saved_hflags
;
1423 target_ulong btarget
;
1432 int CP0_LLAddr_shift
;
1439 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1440 * exception condition */
1441 BS_STOP
= 1, /* We want to stop translation for any reason */
1442 BS_BRANCH
= 2, /* We reached a branch condition */
1443 BS_EXCP
= 3, /* We reached an exception condition */
1446 static const char * const regnames
[] = {
1447 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1448 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1449 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1450 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1453 static const char * const regnames_HI
[] = {
1454 "HI0", "HI1", "HI2", "HI3",
1457 static const char * const regnames_LO
[] = {
1458 "LO0", "LO1", "LO2", "LO3",
1461 static const char * const fregnames
[] = {
1462 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1463 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1464 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1465 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1468 static const char * const msaregnames
[] = {
1469 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1470 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1471 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1472 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1473 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1474 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1475 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1476 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1477 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1478 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1479 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1480 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1481 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1482 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1483 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1484 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1487 #define LOG_DISAS(...) \
1489 if (MIPS_DEBUG_DISAS) { \
1490 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1494 #define MIPS_INVAL(op) \
1496 if (MIPS_DEBUG_DISAS) { \
1497 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1498 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1499 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1500 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1504 /* General purpose registers moves. */
1505 static inline void gen_load_gpr (TCGv t
, int reg
)
1508 tcg_gen_movi_tl(t
, 0);
1510 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1513 static inline void gen_store_gpr (TCGv t
, int reg
)
1516 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1519 /* Moves to/from shadow registers. */
1520 static inline void gen_load_srsgpr (int from
, int to
)
1522 TCGv t0
= tcg_temp_new();
1525 tcg_gen_movi_tl(t0
, 0);
1527 TCGv_i32 t2
= tcg_temp_new_i32();
1528 TCGv_ptr addr
= tcg_temp_new_ptr();
1530 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1531 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1532 tcg_gen_andi_i32(t2
, t2
, 0xf);
1533 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1534 tcg_gen_ext_i32_ptr(addr
, t2
);
1535 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1537 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1538 tcg_temp_free_ptr(addr
);
1539 tcg_temp_free_i32(t2
);
1541 gen_store_gpr(t0
, to
);
1545 static inline void gen_store_srsgpr (int from
, int to
)
1548 TCGv t0
= tcg_temp_new();
1549 TCGv_i32 t2
= tcg_temp_new_i32();
1550 TCGv_ptr addr
= tcg_temp_new_ptr();
1552 gen_load_gpr(t0
, from
);
1553 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1554 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1555 tcg_gen_andi_i32(t2
, t2
, 0xf);
1556 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1557 tcg_gen_ext_i32_ptr(addr
, t2
);
1558 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1560 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1561 tcg_temp_free_ptr(addr
);
1562 tcg_temp_free_i32(t2
);
1568 static inline void gen_save_pc(target_ulong pc
)
1570 tcg_gen_movi_tl(cpu_PC
, pc
);
1573 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1575 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1576 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1577 gen_save_pc(ctx
->pc
);
1578 ctx
->saved_pc
= ctx
->pc
;
1580 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1581 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1582 ctx
->saved_hflags
= ctx
->hflags
;
1583 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1589 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1595 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1597 ctx
->saved_hflags
= ctx
->hflags
;
1598 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1604 ctx
->btarget
= env
->btarget
;
1609 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1611 TCGv_i32 texcp
= tcg_const_i32(excp
);
1612 TCGv_i32 terr
= tcg_const_i32(err
);
1613 save_cpu_state(ctx
, 1);
1614 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1615 tcg_temp_free_i32(terr
);
1616 tcg_temp_free_i32(texcp
);
1617 ctx
->bstate
= BS_EXCP
;
1620 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1622 gen_helper_0e0i(raise_exception
, excp
);
1625 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1627 generate_exception_err(ctx
, excp
, 0);
1630 /* Floating point register moves. */
1631 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1633 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1634 generate_exception(ctx
, EXCP_RI
);
1636 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1639 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1642 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1643 generate_exception(ctx
, EXCP_RI
);
1645 t64
= tcg_temp_new_i64();
1646 tcg_gen_extu_i32_i64(t64
, t
);
1647 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1648 tcg_temp_free_i64(t64
);
1651 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1653 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1654 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1656 gen_load_fpr32(ctx
, t
, reg
| 1);
1660 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1662 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1663 TCGv_i64 t64
= tcg_temp_new_i64();
1664 tcg_gen_extu_i32_i64(t64
, t
);
1665 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1666 tcg_temp_free_i64(t64
);
1668 gen_store_fpr32(ctx
, t
, reg
| 1);
1672 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1674 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1675 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1677 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1681 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1683 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1684 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1687 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1688 t0
= tcg_temp_new_i64();
1689 tcg_gen_shri_i64(t0
, t
, 32);
1690 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1691 tcg_temp_free_i64(t0
);
1695 static inline int get_fp_bit (int cc
)
1703 /* Addresses computation */
1704 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1706 tcg_gen_add_tl(ret
, arg0
, arg1
);
1708 #if defined(TARGET_MIPS64)
1709 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1710 tcg_gen_ext32s_i64(ret
, ret
);
1715 /* Addresses computation (translation time) */
1716 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1719 target_long sum
= base
+ offset
;
1721 #if defined(TARGET_MIPS64)
1722 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1729 /* Sign-extract the low 32-bits to a target_long. */
1730 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1732 #if defined(TARGET_MIPS64)
1733 tcg_gen_ext32s_i64(ret
, arg
);
1735 tcg_gen_extrl_i64_i32(ret
, arg
);
1739 /* Sign-extract the high 32-bits to a target_long. */
1740 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1742 #if defined(TARGET_MIPS64)
1743 tcg_gen_sari_i64(ret
, arg
, 32);
1745 tcg_gen_extrh_i64_i32(ret
, arg
);
1749 static inline void check_cp0_enabled(DisasContext
*ctx
)
1751 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1752 generate_exception_err(ctx
, EXCP_CpU
, 0);
1755 static inline void check_cp1_enabled(DisasContext
*ctx
)
1757 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1758 generate_exception_err(ctx
, EXCP_CpU
, 1);
1761 /* Verify that the processor is running with COP1X instructions enabled.
1762 This is associated with the nabla symbol in the MIPS32 and MIPS64
1765 static inline void check_cop1x(DisasContext
*ctx
)
1767 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1768 generate_exception_end(ctx
, EXCP_RI
);
1771 /* Verify that the processor is running with 64-bit floating-point
1772 operations enabled. */
1774 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1776 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1777 generate_exception_end(ctx
, EXCP_RI
);
1781 * Verify if floating point register is valid; an operation is not defined
1782 * if bit 0 of any register specification is set and the FR bit in the
1783 * Status register equals zero, since the register numbers specify an
1784 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1785 * in the Status register equals one, both even and odd register numbers
1786 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1788 * Multiple 64 bit wide registers can be checked by calling
1789 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1791 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1793 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1794 generate_exception_end(ctx
, EXCP_RI
);
1797 /* Verify that the processor is running with DSP instructions enabled.
1798 This is enabled by CP0 Status register MX(24) bit.
1801 static inline void check_dsp(DisasContext
*ctx
)
1803 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1804 if (ctx
->insn_flags
& ASE_DSP
) {
1805 generate_exception_end(ctx
, EXCP_DSPDIS
);
1807 generate_exception_end(ctx
, EXCP_RI
);
1812 static inline void check_dspr2(DisasContext
*ctx
)
1814 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1815 if (ctx
->insn_flags
& ASE_DSP
) {
1816 generate_exception_end(ctx
, EXCP_DSPDIS
);
1818 generate_exception_end(ctx
, EXCP_RI
);
1823 /* This code generates a "reserved instruction" exception if the
1824 CPU does not support the instruction set corresponding to flags. */
1825 static inline void check_insn(DisasContext
*ctx
, int flags
)
1827 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1828 generate_exception_end(ctx
, EXCP_RI
);
1832 /* This code generates a "reserved instruction" exception if the
1833 CPU has corresponding flag set which indicates that the instruction
1834 has been removed. */
1835 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1837 if (unlikely(ctx
->insn_flags
& flags
)) {
1838 generate_exception_end(ctx
, EXCP_RI
);
1842 /* This code generates a "reserved instruction" exception if the
1843 CPU does not support 64-bit paired-single (PS) floating point data type */
1844 static inline void check_ps(DisasContext
*ctx
)
1846 if (unlikely(!ctx
->ps
)) {
1847 generate_exception(ctx
, EXCP_RI
);
1849 check_cp1_64bitmode(ctx
);
1852 #ifdef TARGET_MIPS64
1853 /* This code generates a "reserved instruction" exception if 64-bit
1854 instructions are not enabled. */
1855 static inline void check_mips_64(DisasContext
*ctx
)
1857 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1858 generate_exception_end(ctx
, EXCP_RI
);
1862 #ifndef CONFIG_USER_ONLY
1863 static inline void check_mvh(DisasContext
*ctx
)
1865 if (unlikely(!ctx
->mvh
)) {
1866 generate_exception(ctx
, EXCP_RI
);
1871 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1872 calling interface for 32 and 64-bit FPRs. No sense in changing
1873 all callers for gen_load_fpr32 when we need the CTX parameter for
1875 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1876 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1877 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1878 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1879 int ft, int fs, int cc) \
1881 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1882 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1891 check_cp1_registers(ctx, fs | ft); \
1899 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1900 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1902 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1903 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1904 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1905 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1906 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1907 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1908 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1909 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1910 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1911 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1912 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1913 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1914 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1915 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1916 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1917 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1920 tcg_temp_free_i##bits (fp0); \
1921 tcg_temp_free_i##bits (fp1); \
1924 FOP_CONDS(, 0, d
, FMT_D
, 64)
1925 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1926 FOP_CONDS(, 0, s
, FMT_S
, 32)
1927 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1928 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1929 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1932 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1933 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1934 int ft, int fs, int fd) \
1936 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1937 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1938 if (ifmt == FMT_D) { \
1939 check_cp1_registers(ctx, fs | ft | fd); \
1941 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1942 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1945 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1948 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1951 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1954 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1957 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1960 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1963 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1966 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2002 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2005 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2008 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2014 tcg_temp_free_i ## bits (fp0); \
2015 tcg_temp_free_i ## bits (fp1); \
2018 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2019 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2021 #undef gen_ldcmp_fpr32
2022 #undef gen_ldcmp_fpr64
2024 /* load/store instructions. */
2025 #ifdef CONFIG_USER_ONLY
2026 #define OP_LD_ATOMIC(insn,fname) \
2027 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2029 TCGv t0 = tcg_temp_new(); \
2030 tcg_gen_mov_tl(t0, arg1); \
2031 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2032 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2033 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2034 tcg_temp_free(t0); \
2037 #define OP_LD_ATOMIC(insn,fname) \
2038 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2040 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2043 OP_LD_ATOMIC(ll
,ld32s
);
2044 #if defined(TARGET_MIPS64)
2045 OP_LD_ATOMIC(lld
,ld64
);
2049 #ifdef CONFIG_USER_ONLY
2050 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2051 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2053 TCGv t0 = tcg_temp_new(); \
2054 TCGLabel *l1 = gen_new_label(); \
2055 TCGLabel *l2 = gen_new_label(); \
2057 tcg_gen_andi_tl(t0, arg2, almask); \
2058 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2059 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2060 generate_exception(ctx, EXCP_AdES); \
2061 gen_set_label(l1); \
2062 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2063 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2064 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2065 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2066 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2067 generate_exception_end(ctx, EXCP_SC); \
2068 gen_set_label(l2); \
2069 tcg_gen_movi_tl(t0, 0); \
2070 gen_store_gpr(t0, rt); \
2071 tcg_temp_free(t0); \
2074 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2075 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2077 TCGv t0 = tcg_temp_new(); \
2078 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2079 gen_store_gpr(t0, rt); \
2080 tcg_temp_free(t0); \
2083 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2084 #if defined(TARGET_MIPS64)
2085 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2089 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2090 int base
, int16_t offset
)
2093 tcg_gen_movi_tl(addr
, offset
);
2094 } else if (offset
== 0) {
2095 gen_load_gpr(addr
, base
);
2097 tcg_gen_movi_tl(addr
, offset
);
2098 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2102 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2104 target_ulong pc
= ctx
->pc
;
2106 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2107 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2112 pc
&= ~(target_ulong
)3;
2117 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2118 int rt
, int base
, int16_t offset
)
2122 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2123 /* Loongson CPU uses a load to zero register for prefetch.
2124 We emulate it as a NOP. On other CPU we must perform the
2125 actual memory access. */
2129 t0
= tcg_temp_new();
2130 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2133 #if defined(TARGET_MIPS64)
2135 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2136 ctx
->default_tcg_memop_mask
);
2137 gen_store_gpr(t0
, rt
);
2140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2141 ctx
->default_tcg_memop_mask
);
2142 gen_store_gpr(t0
, rt
);
2146 op_ld_lld(t0
, t0
, ctx
);
2147 gen_store_gpr(t0
, rt
);
2150 t1
= tcg_temp_new();
2151 /* Do a byte access to possibly trigger a page
2152 fault with the unaligned address. */
2153 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2154 tcg_gen_andi_tl(t1
, t0
, 7);
2155 #ifndef TARGET_WORDS_BIGENDIAN
2156 tcg_gen_xori_tl(t1
, t1
, 7);
2158 tcg_gen_shli_tl(t1
, t1
, 3);
2159 tcg_gen_andi_tl(t0
, t0
, ~7);
2160 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2161 tcg_gen_shl_tl(t0
, t0
, t1
);
2162 t2
= tcg_const_tl(-1);
2163 tcg_gen_shl_tl(t2
, t2
, t1
);
2164 gen_load_gpr(t1
, rt
);
2165 tcg_gen_andc_tl(t1
, t1
, t2
);
2167 tcg_gen_or_tl(t0
, t0
, t1
);
2169 gen_store_gpr(t0
, rt
);
2172 t1
= tcg_temp_new();
2173 /* Do a byte access to possibly trigger a page
2174 fault with the unaligned address. */
2175 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2176 tcg_gen_andi_tl(t1
, t0
, 7);
2177 #ifdef TARGET_WORDS_BIGENDIAN
2178 tcg_gen_xori_tl(t1
, t1
, 7);
2180 tcg_gen_shli_tl(t1
, t1
, 3);
2181 tcg_gen_andi_tl(t0
, t0
, ~7);
2182 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2183 tcg_gen_shr_tl(t0
, t0
, t1
);
2184 tcg_gen_xori_tl(t1
, t1
, 63);
2185 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2186 tcg_gen_shl_tl(t2
, t2
, t1
);
2187 gen_load_gpr(t1
, rt
);
2188 tcg_gen_and_tl(t1
, t1
, t2
);
2190 tcg_gen_or_tl(t0
, t0
, t1
);
2192 gen_store_gpr(t0
, rt
);
2195 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2196 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2198 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2199 gen_store_gpr(t0
, rt
);
2203 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2204 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2206 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2207 gen_store_gpr(t0
, rt
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2211 ctx
->default_tcg_memop_mask
);
2212 gen_store_gpr(t0
, rt
);
2215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2216 ctx
->default_tcg_memop_mask
);
2217 gen_store_gpr(t0
, rt
);
2220 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2221 ctx
->default_tcg_memop_mask
);
2222 gen_store_gpr(t0
, rt
);
2225 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2226 gen_store_gpr(t0
, rt
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2230 gen_store_gpr(t0
, rt
);
2233 t1
= tcg_temp_new();
2234 /* Do a byte access to possibly trigger a page
2235 fault with the unaligned address. */
2236 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2237 tcg_gen_andi_tl(t1
, t0
, 3);
2238 #ifndef TARGET_WORDS_BIGENDIAN
2239 tcg_gen_xori_tl(t1
, t1
, 3);
2241 tcg_gen_shli_tl(t1
, t1
, 3);
2242 tcg_gen_andi_tl(t0
, t0
, ~3);
2243 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2244 tcg_gen_shl_tl(t0
, t0
, t1
);
2245 t2
= tcg_const_tl(-1);
2246 tcg_gen_shl_tl(t2
, t2
, t1
);
2247 gen_load_gpr(t1
, rt
);
2248 tcg_gen_andc_tl(t1
, t1
, t2
);
2250 tcg_gen_or_tl(t0
, t0
, t1
);
2252 tcg_gen_ext32s_tl(t0
, t0
);
2253 gen_store_gpr(t0
, rt
);
2256 t1
= tcg_temp_new();
2257 /* Do a byte access to possibly trigger a page
2258 fault with the unaligned address. */
2259 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2260 tcg_gen_andi_tl(t1
, t0
, 3);
2261 #ifdef TARGET_WORDS_BIGENDIAN
2262 tcg_gen_xori_tl(t1
, t1
, 3);
2264 tcg_gen_shli_tl(t1
, t1
, 3);
2265 tcg_gen_andi_tl(t0
, t0
, ~3);
2266 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2267 tcg_gen_shr_tl(t0
, t0
, t1
);
2268 tcg_gen_xori_tl(t1
, t1
, 31);
2269 t2
= tcg_const_tl(0xfffffffeull
);
2270 tcg_gen_shl_tl(t2
, t2
, t1
);
2271 gen_load_gpr(t1
, rt
);
2272 tcg_gen_and_tl(t1
, t1
, t2
);
2274 tcg_gen_or_tl(t0
, t0
, t1
);
2276 tcg_gen_ext32s_tl(t0
, t0
);
2277 gen_store_gpr(t0
, rt
);
2281 op_ld_ll(t0
, t0
, ctx
);
2282 gen_store_gpr(t0
, rt
);
2289 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2290 int base
, int16_t offset
)
2292 TCGv t0
= tcg_temp_new();
2293 TCGv t1
= tcg_temp_new();
2295 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2296 gen_load_gpr(t1
, rt
);
2298 #if defined(TARGET_MIPS64)
2300 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2301 ctx
->default_tcg_memop_mask
);
2304 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2307 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2311 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2312 ctx
->default_tcg_memop_mask
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2316 ctx
->default_tcg_memop_mask
);
2319 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2322 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2325 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2333 /* Store conditional */
2334 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2335 int base
, int16_t offset
)
2339 #ifdef CONFIG_USER_ONLY
2340 t0
= tcg_temp_local_new();
2341 t1
= tcg_temp_local_new();
2343 t0
= tcg_temp_new();
2344 t1
= tcg_temp_new();
2346 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2347 gen_load_gpr(t1
, rt
);
2349 #if defined(TARGET_MIPS64)
2352 op_st_scd(t1
, t0
, rt
, ctx
);
2357 op_st_sc(t1
, t0
, rt
, ctx
);
2364 /* Load and store */
2365 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2366 int base
, int16_t offset
)
2368 TCGv t0
= tcg_temp_new();
2370 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2371 /* Don't do NOP if destination is zero: we must perform the actual
2376 TCGv_i32 fp0
= tcg_temp_new_i32();
2377 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2378 ctx
->default_tcg_memop_mask
);
2379 gen_store_fpr32(ctx
, fp0
, ft
);
2380 tcg_temp_free_i32(fp0
);
2385 TCGv_i32 fp0
= tcg_temp_new_i32();
2386 gen_load_fpr32(ctx
, fp0
, ft
);
2387 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2388 ctx
->default_tcg_memop_mask
);
2389 tcg_temp_free_i32(fp0
);
2394 TCGv_i64 fp0
= tcg_temp_new_i64();
2395 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2396 ctx
->default_tcg_memop_mask
);
2397 gen_store_fpr64(ctx
, fp0
, ft
);
2398 tcg_temp_free_i64(fp0
);
2403 TCGv_i64 fp0
= tcg_temp_new_i64();
2404 gen_load_fpr64(ctx
, fp0
, ft
);
2405 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2406 ctx
->default_tcg_memop_mask
);
2407 tcg_temp_free_i64(fp0
);
2411 MIPS_INVAL("flt_ldst");
2412 generate_exception_end(ctx
, EXCP_RI
);
2419 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2420 int rs
, int16_t imm
)
2422 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2423 check_cp1_enabled(ctx
);
2427 check_insn(ctx
, ISA_MIPS2
);
2430 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2433 generate_exception_err(ctx
, EXCP_CpU
, 1);
2437 /* Arithmetic with immediate operand */
2438 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2439 int rt
, int rs
, int16_t imm
)
2441 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2443 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2444 /* If no destination, treat it as a NOP.
2445 For addi, we must generate the overflow exception when needed. */
2451 TCGv t0
= tcg_temp_local_new();
2452 TCGv t1
= tcg_temp_new();
2453 TCGv t2
= tcg_temp_new();
2454 TCGLabel
*l1
= gen_new_label();
2456 gen_load_gpr(t1
, rs
);
2457 tcg_gen_addi_tl(t0
, t1
, uimm
);
2458 tcg_gen_ext32s_tl(t0
, t0
);
2460 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2461 tcg_gen_xori_tl(t2
, t0
, uimm
);
2462 tcg_gen_and_tl(t1
, t1
, t2
);
2464 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2466 /* operands of same sign, result different sign */
2467 generate_exception(ctx
, EXCP_OVERFLOW
);
2469 tcg_gen_ext32s_tl(t0
, t0
);
2470 gen_store_gpr(t0
, rt
);
2476 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2477 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2479 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2482 #if defined(TARGET_MIPS64)
2485 TCGv t0
= tcg_temp_local_new();
2486 TCGv t1
= tcg_temp_new();
2487 TCGv t2
= tcg_temp_new();
2488 TCGLabel
*l1
= gen_new_label();
2490 gen_load_gpr(t1
, rs
);
2491 tcg_gen_addi_tl(t0
, t1
, uimm
);
2493 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2494 tcg_gen_xori_tl(t2
, t0
, uimm
);
2495 tcg_gen_and_tl(t1
, t1
, t2
);
2497 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2499 /* operands of same sign, result different sign */
2500 generate_exception(ctx
, EXCP_OVERFLOW
);
2502 gen_store_gpr(t0
, rt
);
2508 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2510 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2517 /* Logic with immediate operand */
2518 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2519 int rt
, int rs
, int16_t imm
)
2524 /* If no destination, treat it as a NOP. */
2527 uimm
= (uint16_t)imm
;
2530 if (likely(rs
!= 0))
2531 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2533 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2537 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2539 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2542 if (likely(rs
!= 0))
2543 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2545 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2548 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2550 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2551 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2553 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2562 /* Set on less than with immediate operand */
2563 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2564 int rt
, int rs
, int16_t imm
)
2566 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2570 /* If no destination, treat it as a NOP. */
2573 t0
= tcg_temp_new();
2574 gen_load_gpr(t0
, rs
);
2577 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2580 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2586 /* Shifts with immediate operand */
2587 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2588 int rt
, int rs
, int16_t imm
)
2590 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2594 /* If no destination, treat it as a NOP. */
2598 t0
= tcg_temp_new();
2599 gen_load_gpr(t0
, rs
);
2602 tcg_gen_shli_tl(t0
, t0
, uimm
);
2603 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2606 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2610 tcg_gen_ext32u_tl(t0
, t0
);
2611 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2613 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2618 TCGv_i32 t1
= tcg_temp_new_i32();
2620 tcg_gen_trunc_tl_i32(t1
, t0
);
2621 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2622 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2623 tcg_temp_free_i32(t1
);
2625 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2628 #if defined(TARGET_MIPS64)
2630 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2633 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2636 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2640 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2642 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2646 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2649 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2652 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2655 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2663 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2664 int rd
, int rs
, int rt
)
2666 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2667 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2668 /* If no destination, treat it as a NOP.
2669 For add & sub, we must generate the overflow exception when needed. */
2676 TCGv t0
= tcg_temp_local_new();
2677 TCGv t1
= tcg_temp_new();
2678 TCGv t2
= tcg_temp_new();
2679 TCGLabel
*l1
= gen_new_label();
2681 gen_load_gpr(t1
, rs
);
2682 gen_load_gpr(t2
, rt
);
2683 tcg_gen_add_tl(t0
, t1
, t2
);
2684 tcg_gen_ext32s_tl(t0
, t0
);
2685 tcg_gen_xor_tl(t1
, t1
, t2
);
2686 tcg_gen_xor_tl(t2
, t0
, t2
);
2687 tcg_gen_andc_tl(t1
, t2
, t1
);
2689 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2691 /* operands of same sign, result different sign */
2692 generate_exception(ctx
, EXCP_OVERFLOW
);
2694 gen_store_gpr(t0
, rd
);
2699 if (rs
!= 0 && rt
!= 0) {
2700 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2701 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2702 } else if (rs
== 0 && rt
!= 0) {
2703 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2704 } else if (rs
!= 0 && rt
== 0) {
2705 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2707 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2712 TCGv t0
= tcg_temp_local_new();
2713 TCGv t1
= tcg_temp_new();
2714 TCGv t2
= tcg_temp_new();
2715 TCGLabel
*l1
= gen_new_label();
2717 gen_load_gpr(t1
, rs
);
2718 gen_load_gpr(t2
, rt
);
2719 tcg_gen_sub_tl(t0
, t1
, t2
);
2720 tcg_gen_ext32s_tl(t0
, t0
);
2721 tcg_gen_xor_tl(t2
, t1
, t2
);
2722 tcg_gen_xor_tl(t1
, t0
, t1
);
2723 tcg_gen_and_tl(t1
, t1
, t2
);
2725 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2727 /* operands of different sign, first operand and result different sign */
2728 generate_exception(ctx
, EXCP_OVERFLOW
);
2730 gen_store_gpr(t0
, rd
);
2735 if (rs
!= 0 && rt
!= 0) {
2736 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2737 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2738 } else if (rs
== 0 && rt
!= 0) {
2739 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2740 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2741 } else if (rs
!= 0 && rt
== 0) {
2742 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2744 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2747 #if defined(TARGET_MIPS64)
2750 TCGv t0
= tcg_temp_local_new();
2751 TCGv t1
= tcg_temp_new();
2752 TCGv t2
= tcg_temp_new();
2753 TCGLabel
*l1
= gen_new_label();
2755 gen_load_gpr(t1
, rs
);
2756 gen_load_gpr(t2
, rt
);
2757 tcg_gen_add_tl(t0
, t1
, t2
);
2758 tcg_gen_xor_tl(t1
, t1
, t2
);
2759 tcg_gen_xor_tl(t2
, t0
, t2
);
2760 tcg_gen_andc_tl(t1
, t2
, t1
);
2762 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2764 /* operands of same sign, result different sign */
2765 generate_exception(ctx
, EXCP_OVERFLOW
);
2767 gen_store_gpr(t0
, rd
);
2772 if (rs
!= 0 && rt
!= 0) {
2773 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2774 } else if (rs
== 0 && rt
!= 0) {
2775 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2776 } else if (rs
!= 0 && rt
== 0) {
2777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2779 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2784 TCGv t0
= tcg_temp_local_new();
2785 TCGv t1
= tcg_temp_new();
2786 TCGv t2
= tcg_temp_new();
2787 TCGLabel
*l1
= gen_new_label();
2789 gen_load_gpr(t1
, rs
);
2790 gen_load_gpr(t2
, rt
);
2791 tcg_gen_sub_tl(t0
, t1
, t2
);
2792 tcg_gen_xor_tl(t2
, t1
, t2
);
2793 tcg_gen_xor_tl(t1
, t0
, t1
);
2794 tcg_gen_and_tl(t1
, t1
, t2
);
2796 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2798 /* operands of different sign, first operand and result different sign */
2799 generate_exception(ctx
, EXCP_OVERFLOW
);
2801 gen_store_gpr(t0
, rd
);
2806 if (rs
!= 0 && rt
!= 0) {
2807 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2808 } else if (rs
== 0 && rt
!= 0) {
2809 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2810 } else if (rs
!= 0 && rt
== 0) {
2811 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2813 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2818 if (likely(rs
!= 0 && rt
!= 0)) {
2819 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2820 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2822 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2828 /* Conditional move */
2829 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2830 int rd
, int rs
, int rt
)
2835 /* If no destination, treat it as a NOP. */
2839 t0
= tcg_temp_new();
2840 gen_load_gpr(t0
, rt
);
2841 t1
= tcg_const_tl(0);
2842 t2
= tcg_temp_new();
2843 gen_load_gpr(t2
, rs
);
2846 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2849 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2852 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2855 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2864 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2865 int rd
, int rs
, int rt
)
2868 /* If no destination, treat it as a NOP. */
2874 if (likely(rs
!= 0 && rt
!= 0)) {
2875 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2877 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2881 if (rs
!= 0 && rt
!= 0) {
2882 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2883 } else if (rs
== 0 && rt
!= 0) {
2884 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2885 } else if (rs
!= 0 && rt
== 0) {
2886 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2888 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2892 if (likely(rs
!= 0 && rt
!= 0)) {
2893 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2894 } else if (rs
== 0 && rt
!= 0) {
2895 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2896 } else if (rs
!= 0 && rt
== 0) {
2897 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2899 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2903 if (likely(rs
!= 0 && rt
!= 0)) {
2904 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2905 } else if (rs
== 0 && rt
!= 0) {
2906 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2907 } else if (rs
!= 0 && rt
== 0) {
2908 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2910 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2916 /* Set on lower than */
2917 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2918 int rd
, int rs
, int rt
)
2923 /* If no destination, treat it as a NOP. */
2927 t0
= tcg_temp_new();
2928 t1
= tcg_temp_new();
2929 gen_load_gpr(t0
, rs
);
2930 gen_load_gpr(t1
, rt
);
2933 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2936 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2944 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2945 int rd
, int rs
, int rt
)
2950 /* If no destination, treat it as a NOP.
2951 For add & sub, we must generate the overflow exception when needed. */
2955 t0
= tcg_temp_new();
2956 t1
= tcg_temp_new();
2957 gen_load_gpr(t0
, rs
);
2958 gen_load_gpr(t1
, rt
);
2961 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2962 tcg_gen_shl_tl(t0
, t1
, t0
);
2963 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2966 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2967 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2970 tcg_gen_ext32u_tl(t1
, t1
);
2971 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2972 tcg_gen_shr_tl(t0
, t1
, t0
);
2973 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2977 TCGv_i32 t2
= tcg_temp_new_i32();
2978 TCGv_i32 t3
= tcg_temp_new_i32();
2980 tcg_gen_trunc_tl_i32(t2
, t0
);
2981 tcg_gen_trunc_tl_i32(t3
, t1
);
2982 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2983 tcg_gen_rotr_i32(t2
, t3
, t2
);
2984 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2985 tcg_temp_free_i32(t2
);
2986 tcg_temp_free_i32(t3
);
2989 #if defined(TARGET_MIPS64)
2991 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2992 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3003 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3004 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3012 /* Arithmetic on HI/LO registers */
3013 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3015 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3026 #if defined(TARGET_MIPS64)
3028 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3032 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3036 #if defined(TARGET_MIPS64)
3038 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3042 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3047 #if defined(TARGET_MIPS64)
3049 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3053 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3056 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3061 #if defined(TARGET_MIPS64)
3063 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3067 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3070 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3076 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3079 TCGv t0
= tcg_const_tl(addr
);
3080 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3081 gen_store_gpr(t0
, reg
);
3085 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3091 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3094 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3095 addr
= addr_add(ctx
, pc
, offset
);
3096 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3100 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3101 addr
= addr_add(ctx
, pc
, offset
);
3102 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3104 #if defined(TARGET_MIPS64)
3107 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3108 addr
= addr_add(ctx
, pc
, offset
);
3109 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3113 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3116 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3117 addr
= addr_add(ctx
, pc
, offset
);
3118 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3123 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3124 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3125 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3128 #if defined(TARGET_MIPS64)
3129 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3130 case R6_OPC_LDPC
+ (1 << 16):
3131 case R6_OPC_LDPC
+ (2 << 16):
3132 case R6_OPC_LDPC
+ (3 << 16):
3134 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3135 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3136 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3140 MIPS_INVAL("OPC_PCREL");
3141 generate_exception_end(ctx
, EXCP_RI
);
3148 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3157 t0
= tcg_temp_new();
3158 t1
= tcg_temp_new();
3160 gen_load_gpr(t0
, rs
);
3161 gen_load_gpr(t1
, rt
);
3166 TCGv t2
= tcg_temp_new();
3167 TCGv t3
= tcg_temp_new();
3168 tcg_gen_ext32s_tl(t0
, t0
);
3169 tcg_gen_ext32s_tl(t1
, t1
);
3170 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3171 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3172 tcg_gen_and_tl(t2
, t2
, t3
);
3173 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3174 tcg_gen_or_tl(t2
, t2
, t3
);
3175 tcg_gen_movi_tl(t3
, 0);
3176 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3177 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3178 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3185 TCGv t2
= tcg_temp_new();
3186 TCGv t3
= tcg_temp_new();
3187 tcg_gen_ext32s_tl(t0
, t0
);
3188 tcg_gen_ext32s_tl(t1
, t1
);
3189 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3190 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3191 tcg_gen_and_tl(t2
, t2
, t3
);
3192 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3193 tcg_gen_or_tl(t2
, t2
, t3
);
3194 tcg_gen_movi_tl(t3
, 0);
3195 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3196 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3197 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3204 TCGv t2
= tcg_const_tl(0);
3205 TCGv t3
= tcg_const_tl(1);
3206 tcg_gen_ext32u_tl(t0
, t0
);
3207 tcg_gen_ext32u_tl(t1
, t1
);
3208 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3209 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3210 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3217 TCGv t2
= tcg_const_tl(0);
3218 TCGv t3
= tcg_const_tl(1);
3219 tcg_gen_ext32u_tl(t0
, t0
);
3220 tcg_gen_ext32u_tl(t1
, t1
);
3221 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3222 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3223 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3230 TCGv_i32 t2
= tcg_temp_new_i32();
3231 TCGv_i32 t3
= tcg_temp_new_i32();
3232 tcg_gen_trunc_tl_i32(t2
, t0
);
3233 tcg_gen_trunc_tl_i32(t3
, t1
);
3234 tcg_gen_mul_i32(t2
, t2
, t3
);
3235 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3236 tcg_temp_free_i32(t2
);
3237 tcg_temp_free_i32(t3
);
3242 TCGv_i32 t2
= tcg_temp_new_i32();
3243 TCGv_i32 t3
= tcg_temp_new_i32();
3244 tcg_gen_trunc_tl_i32(t2
, t0
);
3245 tcg_gen_trunc_tl_i32(t3
, t1
);
3246 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3247 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3248 tcg_temp_free_i32(t2
);
3249 tcg_temp_free_i32(t3
);
3254 TCGv_i32 t2
= tcg_temp_new_i32();
3255 TCGv_i32 t3
= tcg_temp_new_i32();
3256 tcg_gen_trunc_tl_i32(t2
, t0
);
3257 tcg_gen_trunc_tl_i32(t3
, t1
);
3258 tcg_gen_mul_i32(t2
, t2
, t3
);
3259 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3260 tcg_temp_free_i32(t2
);
3261 tcg_temp_free_i32(t3
);
3266 TCGv_i32 t2
= tcg_temp_new_i32();
3267 TCGv_i32 t3
= tcg_temp_new_i32();
3268 tcg_gen_trunc_tl_i32(t2
, t0
);
3269 tcg_gen_trunc_tl_i32(t3
, t1
);
3270 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3271 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3272 tcg_temp_free_i32(t2
);
3273 tcg_temp_free_i32(t3
);
3276 #if defined(TARGET_MIPS64)
3279 TCGv t2
= tcg_temp_new();
3280 TCGv t3
= tcg_temp_new();
3281 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3282 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3283 tcg_gen_and_tl(t2
, t2
, t3
);
3284 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3285 tcg_gen_or_tl(t2
, t2
, t3
);
3286 tcg_gen_movi_tl(t3
, 0);
3287 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3288 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3295 TCGv t2
= tcg_temp_new();
3296 TCGv t3
= tcg_temp_new();
3297 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3298 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3299 tcg_gen_and_tl(t2
, t2
, t3
);
3300 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3301 tcg_gen_or_tl(t2
, t2
, t3
);
3302 tcg_gen_movi_tl(t3
, 0);
3303 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3304 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3311 TCGv t2
= tcg_const_tl(0);
3312 TCGv t3
= tcg_const_tl(1);
3313 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3314 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3321 TCGv t2
= tcg_const_tl(0);
3322 TCGv t3
= tcg_const_tl(1);
3323 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3324 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3330 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3334 TCGv t2
= tcg_temp_new();
3335 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3340 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3344 TCGv t2
= tcg_temp_new();
3345 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3351 MIPS_INVAL("r6 mul/div");
3352 generate_exception_end(ctx
, EXCP_RI
);
3360 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3361 int acc
, int rs
, int rt
)
3365 t0
= tcg_temp_new();
3366 t1
= tcg_temp_new();
3368 gen_load_gpr(t0
, rs
);
3369 gen_load_gpr(t1
, rt
);
3378 TCGv t2
= tcg_temp_new();
3379 TCGv t3
= tcg_temp_new();
3380 tcg_gen_ext32s_tl(t0
, t0
);
3381 tcg_gen_ext32s_tl(t1
, t1
);
3382 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3383 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3384 tcg_gen_and_tl(t2
, t2
, t3
);
3385 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3386 tcg_gen_or_tl(t2
, t2
, t3
);
3387 tcg_gen_movi_tl(t3
, 0);
3388 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3389 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3390 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3391 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3392 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3399 TCGv t2
= tcg_const_tl(0);
3400 TCGv t3
= tcg_const_tl(1);
3401 tcg_gen_ext32u_tl(t0
, t0
);
3402 tcg_gen_ext32u_tl(t1
, t1
);
3403 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3404 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3405 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3406 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3407 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3414 TCGv_i32 t2
= tcg_temp_new_i32();
3415 TCGv_i32 t3
= tcg_temp_new_i32();
3416 tcg_gen_trunc_tl_i32(t2
, t0
);
3417 tcg_gen_trunc_tl_i32(t3
, t1
);
3418 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3419 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3420 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3421 tcg_temp_free_i32(t2
);
3422 tcg_temp_free_i32(t3
);
3427 TCGv_i32 t2
= tcg_temp_new_i32();
3428 TCGv_i32 t3
= tcg_temp_new_i32();
3429 tcg_gen_trunc_tl_i32(t2
, t0
);
3430 tcg_gen_trunc_tl_i32(t3
, t1
);
3431 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3432 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3433 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3434 tcg_temp_free_i32(t2
);
3435 tcg_temp_free_i32(t3
);
3438 #if defined(TARGET_MIPS64)
3441 TCGv t2
= tcg_temp_new();
3442 TCGv t3
= tcg_temp_new();
3443 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3444 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3445 tcg_gen_and_tl(t2
, t2
, t3
);
3446 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3447 tcg_gen_or_tl(t2
, t2
, t3
);
3448 tcg_gen_movi_tl(t3
, 0);
3449 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3450 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3451 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3458 TCGv t2
= tcg_const_tl(0);
3459 TCGv t3
= tcg_const_tl(1);
3460 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3461 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3462 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3468 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3471 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3476 TCGv_i64 t2
= tcg_temp_new_i64();
3477 TCGv_i64 t3
= tcg_temp_new_i64();
3479 tcg_gen_ext_tl_i64(t2
, t0
);
3480 tcg_gen_ext_tl_i64(t3
, t1
);
3481 tcg_gen_mul_i64(t2
, t2
, t3
);
3482 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3483 tcg_gen_add_i64(t2
, t2
, t3
);
3484 tcg_temp_free_i64(t3
);
3485 gen_move_low32(cpu_LO
[acc
], t2
);
3486 gen_move_high32(cpu_HI
[acc
], t2
);
3487 tcg_temp_free_i64(t2
);
3492 TCGv_i64 t2
= tcg_temp_new_i64();
3493 TCGv_i64 t3
= tcg_temp_new_i64();
3495 tcg_gen_ext32u_tl(t0
, t0
);
3496 tcg_gen_ext32u_tl(t1
, t1
);
3497 tcg_gen_extu_tl_i64(t2
, t0
);
3498 tcg_gen_extu_tl_i64(t3
, t1
);
3499 tcg_gen_mul_i64(t2
, t2
, t3
);
3500 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3501 tcg_gen_add_i64(t2
, t2
, t3
);
3502 tcg_temp_free_i64(t3
);
3503 gen_move_low32(cpu_LO
[acc
], t2
);
3504 gen_move_high32(cpu_HI
[acc
], t2
);
3505 tcg_temp_free_i64(t2
);
3510 TCGv_i64 t2
= tcg_temp_new_i64();
3511 TCGv_i64 t3
= tcg_temp_new_i64();
3513 tcg_gen_ext_tl_i64(t2
, t0
);
3514 tcg_gen_ext_tl_i64(t3
, t1
);
3515 tcg_gen_mul_i64(t2
, t2
, t3
);
3516 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3517 tcg_gen_sub_i64(t2
, t3
, t2
);
3518 tcg_temp_free_i64(t3
);
3519 gen_move_low32(cpu_LO
[acc
], t2
);
3520 gen_move_high32(cpu_HI
[acc
], t2
);
3521 tcg_temp_free_i64(t2
);
3526 TCGv_i64 t2
= tcg_temp_new_i64();
3527 TCGv_i64 t3
= tcg_temp_new_i64();
3529 tcg_gen_ext32u_tl(t0
, t0
);
3530 tcg_gen_ext32u_tl(t1
, t1
);
3531 tcg_gen_extu_tl_i64(t2
, t0
);
3532 tcg_gen_extu_tl_i64(t3
, t1
);
3533 tcg_gen_mul_i64(t2
, t2
, t3
);
3534 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3535 tcg_gen_sub_i64(t2
, t3
, t2
);
3536 tcg_temp_free_i64(t3
);
3537 gen_move_low32(cpu_LO
[acc
], t2
);
3538 gen_move_high32(cpu_HI
[acc
], t2
);
3539 tcg_temp_free_i64(t2
);
3543 MIPS_INVAL("mul/div");
3544 generate_exception_end(ctx
, EXCP_RI
);
3552 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3553 int rd
, int rs
, int rt
)
3555 TCGv t0
= tcg_temp_new();
3556 TCGv t1
= tcg_temp_new();
3558 gen_load_gpr(t0
, rs
);
3559 gen_load_gpr(t1
, rt
);
3562 case OPC_VR54XX_MULS
:
3563 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3565 case OPC_VR54XX_MULSU
:
3566 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3568 case OPC_VR54XX_MACC
:
3569 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3571 case OPC_VR54XX_MACCU
:
3572 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3574 case OPC_VR54XX_MSAC
:
3575 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3577 case OPC_VR54XX_MSACU
:
3578 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3580 case OPC_VR54XX_MULHI
:
3581 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3583 case OPC_VR54XX_MULHIU
:
3584 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3586 case OPC_VR54XX_MULSHI
:
3587 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3589 case OPC_VR54XX_MULSHIU
:
3590 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3592 case OPC_VR54XX_MACCHI
:
3593 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3595 case OPC_VR54XX_MACCHIU
:
3596 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3598 case OPC_VR54XX_MSACHI
:
3599 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3601 case OPC_VR54XX_MSACHIU
:
3602 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3605 MIPS_INVAL("mul vr54xx");
3606 generate_exception_end(ctx
, EXCP_RI
);
3609 gen_store_gpr(t0
, rd
);
3616 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3625 t0
= tcg_temp_new();
3626 gen_load_gpr(t0
, rs
);
3630 gen_helper_clo(cpu_gpr
[rd
], t0
);
3634 gen_helper_clz(cpu_gpr
[rd
], t0
);
3636 #if defined(TARGET_MIPS64)
3639 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3643 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3650 /* Godson integer instructions */
3651 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3652 int rd
, int rs
, int rt
)
3664 case OPC_MULTU_G_2E
:
3665 case OPC_MULTU_G_2F
:
3666 #if defined(TARGET_MIPS64)
3667 case OPC_DMULT_G_2E
:
3668 case OPC_DMULT_G_2F
:
3669 case OPC_DMULTU_G_2E
:
3670 case OPC_DMULTU_G_2F
:
3672 t0
= tcg_temp_new();
3673 t1
= tcg_temp_new();
3676 t0
= tcg_temp_local_new();
3677 t1
= tcg_temp_local_new();
3681 gen_load_gpr(t0
, rs
);
3682 gen_load_gpr(t1
, rt
);
3687 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3688 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3690 case OPC_MULTU_G_2E
:
3691 case OPC_MULTU_G_2F
:
3692 tcg_gen_ext32u_tl(t0
, t0
);
3693 tcg_gen_ext32u_tl(t1
, t1
);
3694 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3695 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3700 TCGLabel
*l1
= gen_new_label();
3701 TCGLabel
*l2
= gen_new_label();
3702 TCGLabel
*l3
= gen_new_label();
3703 tcg_gen_ext32s_tl(t0
, t0
);
3704 tcg_gen_ext32s_tl(t1
, t1
);
3705 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3706 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3710 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3711 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3714 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3715 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3722 TCGLabel
*l1
= gen_new_label();
3723 TCGLabel
*l2
= gen_new_label();
3724 tcg_gen_ext32u_tl(t0
, t0
);
3725 tcg_gen_ext32u_tl(t1
, t1
);
3726 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3727 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3730 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3731 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3738 TCGLabel
*l1
= gen_new_label();
3739 TCGLabel
*l2
= gen_new_label();
3740 TCGLabel
*l3
= gen_new_label();
3741 tcg_gen_ext32u_tl(t0
, t0
);
3742 tcg_gen_ext32u_tl(t1
, t1
);
3743 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3744 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3745 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3747 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3750 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3751 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3758 TCGLabel
*l1
= gen_new_label();
3759 TCGLabel
*l2
= gen_new_label();
3760 tcg_gen_ext32u_tl(t0
, t0
);
3761 tcg_gen_ext32u_tl(t1
, t1
);
3762 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3763 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3766 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3767 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3771 #if defined(TARGET_MIPS64)
3772 case OPC_DMULT_G_2E
:
3773 case OPC_DMULT_G_2F
:
3774 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3776 case OPC_DMULTU_G_2E
:
3777 case OPC_DMULTU_G_2F
:
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3783 TCGLabel
*l1
= gen_new_label();
3784 TCGLabel
*l2
= gen_new_label();
3785 TCGLabel
*l3
= gen_new_label();
3786 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3787 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3791 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3792 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3795 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3799 case OPC_DDIVU_G_2E
:
3800 case OPC_DDIVU_G_2F
:
3802 TCGLabel
*l1
= gen_new_label();
3803 TCGLabel
*l2
= gen_new_label();
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3805 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3808 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3815 TCGLabel
*l1
= gen_new_label();
3816 TCGLabel
*l2
= gen_new_label();
3817 TCGLabel
*l3
= gen_new_label();
3818 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3819 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3820 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3822 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3825 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3829 case OPC_DMODU_G_2E
:
3830 case OPC_DMODU_G_2F
:
3832 TCGLabel
*l1
= gen_new_label();
3833 TCGLabel
*l2
= gen_new_label();
3834 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3835 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3838 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3849 /* Loongson multimedia instructions */
3850 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3852 uint32_t opc
, shift_max
;
3855 opc
= MASK_LMI(ctx
->opcode
);
3861 t0
= tcg_temp_local_new_i64();
3862 t1
= tcg_temp_local_new_i64();
3865 t0
= tcg_temp_new_i64();
3866 t1
= tcg_temp_new_i64();
3870 gen_load_fpr64(ctx
, t0
, rs
);
3871 gen_load_fpr64(ctx
, t1
, rt
);
3873 #define LMI_HELPER(UP, LO) \
3874 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3875 #define LMI_HELPER_1(UP, LO) \
3876 case OPC_##UP: gen_helper_##LO(t0, t0); break
3877 #define LMI_DIRECT(UP, LO, OP) \
3878 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3881 LMI_HELPER(PADDSH
, paddsh
);
3882 LMI_HELPER(PADDUSH
, paddush
);
3883 LMI_HELPER(PADDH
, paddh
);
3884 LMI_HELPER(PADDW
, paddw
);
3885 LMI_HELPER(PADDSB
, paddsb
);
3886 LMI_HELPER(PADDUSB
, paddusb
);
3887 LMI_HELPER(PADDB
, paddb
);
3889 LMI_HELPER(PSUBSH
, psubsh
);
3890 LMI_HELPER(PSUBUSH
, psubush
);
3891 LMI_HELPER(PSUBH
, psubh
);
3892 LMI_HELPER(PSUBW
, psubw
);
3893 LMI_HELPER(PSUBSB
, psubsb
);
3894 LMI_HELPER(PSUBUSB
, psubusb
);
3895 LMI_HELPER(PSUBB
, psubb
);
3897 LMI_HELPER(PSHUFH
, pshufh
);
3898 LMI_HELPER(PACKSSWH
, packsswh
);
3899 LMI_HELPER(PACKSSHB
, packsshb
);
3900 LMI_HELPER(PACKUSHB
, packushb
);
3902 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3903 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3904 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3905 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3906 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3907 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3909 LMI_HELPER(PAVGH
, pavgh
);
3910 LMI_HELPER(PAVGB
, pavgb
);
3911 LMI_HELPER(PMAXSH
, pmaxsh
);
3912 LMI_HELPER(PMINSH
, pminsh
);
3913 LMI_HELPER(PMAXUB
, pmaxub
);
3914 LMI_HELPER(PMINUB
, pminub
);
3916 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3917 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3918 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3919 LMI_HELPER(PCMPGTH
, pcmpgth
);
3920 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3921 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3923 LMI_HELPER(PSLLW
, psllw
);
3924 LMI_HELPER(PSLLH
, psllh
);
3925 LMI_HELPER(PSRLW
, psrlw
);
3926 LMI_HELPER(PSRLH
, psrlh
);
3927 LMI_HELPER(PSRAW
, psraw
);
3928 LMI_HELPER(PSRAH
, psrah
);
3930 LMI_HELPER(PMULLH
, pmullh
);
3931 LMI_HELPER(PMULHH
, pmulhh
);
3932 LMI_HELPER(PMULHUH
, pmulhuh
);
3933 LMI_HELPER(PMADDHW
, pmaddhw
);
3935 LMI_HELPER(PASUBUB
, pasubub
);
3936 LMI_HELPER_1(BIADD
, biadd
);
3937 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3939 LMI_DIRECT(PADDD
, paddd
, add
);
3940 LMI_DIRECT(PSUBD
, psubd
, sub
);
3941 LMI_DIRECT(XOR_CP2
, xor, xor);
3942 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3943 LMI_DIRECT(AND_CP2
, and, and);
3944 LMI_DIRECT(PANDN
, pandn
, andc
);
3945 LMI_DIRECT(OR
, or, or);
3948 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3951 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3954 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3957 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3961 tcg_gen_andi_i64(t1
, t1
, 3);
3962 tcg_gen_shli_i64(t1
, t1
, 4);
3963 tcg_gen_shr_i64(t0
, t0
, t1
);
3964 tcg_gen_ext16u_i64(t0
, t0
);
3968 tcg_gen_add_i64(t0
, t0
, t1
);
3969 tcg_gen_ext32s_i64(t0
, t0
);
3972 tcg_gen_sub_i64(t0
, t0
, t1
);
3973 tcg_gen_ext32s_i64(t0
, t0
);
3995 /* Make sure shift count isn't TCG undefined behaviour. */
3996 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4001 tcg_gen_shl_i64(t0
, t0
, t1
);
4005 /* Since SRA is UndefinedResult without sign-extended inputs,
4006 we can treat SRA and DSRA the same. */
4007 tcg_gen_sar_i64(t0
, t0
, t1
);
4010 /* We want to shift in zeros for SRL; zero-extend first. */
4011 tcg_gen_ext32u_i64(t0
, t0
);
4014 tcg_gen_shr_i64(t0
, t0
, t1
);
4018 if (shift_max
== 32) {
4019 tcg_gen_ext32s_i64(t0
, t0
);
4022 /* Shifts larger than MAX produce zero. */
4023 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4024 tcg_gen_neg_i64(t1
, t1
);
4025 tcg_gen_and_i64(t0
, t0
, t1
);
4031 TCGv_i64 t2
= tcg_temp_new_i64();
4032 TCGLabel
*lab
= gen_new_label();
4034 tcg_gen_mov_i64(t2
, t0
);
4035 tcg_gen_add_i64(t0
, t1
, t2
);
4036 if (opc
== OPC_ADD_CP2
) {
4037 tcg_gen_ext32s_i64(t0
, t0
);
4039 tcg_gen_xor_i64(t1
, t1
, t2
);
4040 tcg_gen_xor_i64(t2
, t2
, t0
);
4041 tcg_gen_andc_i64(t1
, t2
, t1
);
4042 tcg_temp_free_i64(t2
);
4043 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4044 generate_exception(ctx
, EXCP_OVERFLOW
);
4052 TCGv_i64 t2
= tcg_temp_new_i64();
4053 TCGLabel
*lab
= gen_new_label();
4055 tcg_gen_mov_i64(t2
, t0
);
4056 tcg_gen_sub_i64(t0
, t1
, t2
);
4057 if (opc
== OPC_SUB_CP2
) {
4058 tcg_gen_ext32s_i64(t0
, t0
);
4060 tcg_gen_xor_i64(t1
, t1
, t2
);
4061 tcg_gen_xor_i64(t2
, t2
, t0
);
4062 tcg_gen_and_i64(t1
, t1
, t2
);
4063 tcg_temp_free_i64(t2
);
4064 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4065 generate_exception(ctx
, EXCP_OVERFLOW
);
4071 tcg_gen_ext32u_i64(t0
, t0
);
4072 tcg_gen_ext32u_i64(t1
, t1
);
4073 tcg_gen_mul_i64(t0
, t0
, t1
);
4082 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4083 FD field is the CC field? */
4085 MIPS_INVAL("loongson_cp2");
4086 generate_exception_end(ctx
, EXCP_RI
);
4093 gen_store_fpr64(ctx
, t0
, rd
);
4095 tcg_temp_free_i64(t0
);
4096 tcg_temp_free_i64(t1
);
4100 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4101 int rs
, int rt
, int16_t imm
)
4104 TCGv t0
= tcg_temp_new();
4105 TCGv t1
= tcg_temp_new();
4108 /* Load needed operands */
4116 /* Compare two registers */
4118 gen_load_gpr(t0
, rs
);
4119 gen_load_gpr(t1
, rt
);
4129 /* Compare register to immediate */
4130 if (rs
!= 0 || imm
!= 0) {
4131 gen_load_gpr(t0
, rs
);
4132 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4139 case OPC_TEQ
: /* rs == rs */
4140 case OPC_TEQI
: /* r0 == 0 */
4141 case OPC_TGE
: /* rs >= rs */
4142 case OPC_TGEI
: /* r0 >= 0 */
4143 case OPC_TGEU
: /* rs >= rs unsigned */
4144 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4146 generate_exception_end(ctx
, EXCP_TRAP
);
4148 case OPC_TLT
: /* rs < rs */
4149 case OPC_TLTI
: /* r0 < 0 */
4150 case OPC_TLTU
: /* rs < rs unsigned */
4151 case OPC_TLTIU
: /* r0 < 0 unsigned */
4152 case OPC_TNE
: /* rs != rs */
4153 case OPC_TNEI
: /* r0 != 0 */
4154 /* Never trap: treat as NOP. */
4158 TCGLabel
*l1
= gen_new_label();
4163 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4167 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4171 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4175 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4179 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4183 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4186 generate_exception(ctx
, EXCP_TRAP
);
4193 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4195 TranslationBlock
*tb
;
4197 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4198 likely(!ctx
->singlestep_enabled
)) {
4201 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4204 if (ctx
->singlestep_enabled
) {
4205 save_cpu_state(ctx
, 0);
4206 gen_helper_raise_exception_debug(cpu_env
);
4212 /* Branches (before delay slot) */
4213 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4215 int rs
, int rt
, int32_t offset
,
4218 target_ulong btgt
= -1;
4220 int bcond_compute
= 0;
4221 TCGv t0
= tcg_temp_new();
4222 TCGv t1
= tcg_temp_new();
4224 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4225 #ifdef MIPS_DEBUG_DISAS
4226 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4227 TARGET_FMT_lx
"\n", ctx
->pc
);
4229 generate_exception_end(ctx
, EXCP_RI
);
4233 /* Load needed operands */
4239 /* Compare two registers */
4241 gen_load_gpr(t0
, rs
);
4242 gen_load_gpr(t1
, rt
);
4245 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4259 /* Compare to zero */
4261 gen_load_gpr(t0
, rs
);
4264 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4267 #if defined(TARGET_MIPS64)
4269 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4271 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4274 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4279 /* Jump to immediate */
4280 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4284 /* Jump to register */
4285 if (offset
!= 0 && offset
!= 16) {
4286 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4287 others are reserved. */
4288 MIPS_INVAL("jump hint");
4289 generate_exception_end(ctx
, EXCP_RI
);
4292 gen_load_gpr(btarget
, rs
);
4295 MIPS_INVAL("branch/jump");
4296 generate_exception_end(ctx
, EXCP_RI
);
4299 if (bcond_compute
== 0) {
4300 /* No condition to be computed */
4302 case OPC_BEQ
: /* rx == rx */
4303 case OPC_BEQL
: /* rx == rx likely */
4304 case OPC_BGEZ
: /* 0 >= 0 */
4305 case OPC_BGEZL
: /* 0 >= 0 likely */
4306 case OPC_BLEZ
: /* 0 <= 0 */
4307 case OPC_BLEZL
: /* 0 <= 0 likely */
4309 ctx
->hflags
|= MIPS_HFLAG_B
;
4311 case OPC_BGEZAL
: /* 0 >= 0 */
4312 case OPC_BGEZALL
: /* 0 >= 0 likely */
4313 /* Always take and link */
4315 ctx
->hflags
|= MIPS_HFLAG_B
;
4317 case OPC_BNE
: /* rx != rx */
4318 case OPC_BGTZ
: /* 0 > 0 */
4319 case OPC_BLTZ
: /* 0 < 0 */
4322 case OPC_BLTZAL
: /* 0 < 0 */
4323 /* Handle as an unconditional branch to get correct delay
4326 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4327 ctx
->hflags
|= MIPS_HFLAG_B
;
4329 case OPC_BLTZALL
: /* 0 < 0 likely */
4330 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4331 /* Skip the instruction in the delay slot */
4334 case OPC_BNEL
: /* rx != rx likely */
4335 case OPC_BGTZL
: /* 0 > 0 likely */
4336 case OPC_BLTZL
: /* 0 < 0 likely */
4337 /* Skip the instruction in the delay slot */
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4344 ctx
->hflags
|= MIPS_HFLAG_BX
;
4348 ctx
->hflags
|= MIPS_HFLAG_B
;
4351 ctx
->hflags
|= MIPS_HFLAG_BR
;
4355 ctx
->hflags
|= MIPS_HFLAG_BR
;
4358 MIPS_INVAL("branch/jump");
4359 generate_exception_end(ctx
, EXCP_RI
);
4365 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4368 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4371 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4374 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4377 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4380 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4383 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4387 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4391 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4397 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4400 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4403 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4406 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4409 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4411 #if defined(TARGET_MIPS64)
4413 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 ctx
->hflags
|= MIPS_HFLAG_BC
;
4423 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4426 ctx
->hflags
|= MIPS_HFLAG_BL
;
4429 MIPS_INVAL("conditional branch/jump");
4430 generate_exception_end(ctx
, EXCP_RI
);
4435 ctx
->btarget
= btgt
;
4437 switch (delayslot_size
) {
4439 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4442 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4447 int post_delay
= insn_bytes
+ delayslot_size
;
4448 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4450 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4454 if (insn_bytes
== 2)
4455 ctx
->hflags
|= MIPS_HFLAG_B16
;
4460 /* special3 bitfield operations */
4461 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4462 int rs
, int lsb
, int msb
)
4464 TCGv t0
= tcg_temp_new();
4465 TCGv t1
= tcg_temp_new();
4467 gen_load_gpr(t1
, rs
);
4470 if (lsb
+ msb
> 31) {
4473 tcg_gen_shri_tl(t0
, t1
, lsb
);
4475 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4477 tcg_gen_ext32s_tl(t0
, t0
);
4480 #if defined(TARGET_MIPS64)
4489 if (lsb
+ msb
> 63) {
4492 tcg_gen_shri_tl(t0
, t1
, lsb
);
4494 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4502 gen_load_gpr(t0
, rt
);
4503 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4504 tcg_gen_ext32s_tl(t0
, t0
);
4506 #if defined(TARGET_MIPS64)
4517 gen_load_gpr(t0
, rt
);
4518 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4523 MIPS_INVAL("bitops");
4524 generate_exception_end(ctx
, EXCP_RI
);
4529 gen_store_gpr(t0
, rt
);
4534 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4539 /* If no destination, treat it as a NOP. */
4543 t0
= tcg_temp_new();
4544 gen_load_gpr(t0
, rt
);
4548 TCGv t1
= tcg_temp_new();
4550 tcg_gen_shri_tl(t1
, t0
, 8);
4551 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4552 tcg_gen_shli_tl(t0
, t0
, 8);
4553 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4554 tcg_gen_or_tl(t0
, t0
, t1
);
4556 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4560 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4563 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4565 #if defined(TARGET_MIPS64)
4568 TCGv t1
= tcg_temp_new();
4570 tcg_gen_shri_tl(t1
, t0
, 8);
4571 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4572 tcg_gen_shli_tl(t0
, t0
, 8);
4573 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4574 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4580 TCGv t1
= tcg_temp_new();
4582 tcg_gen_shri_tl(t1
, t0
, 16);
4583 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4584 tcg_gen_shli_tl(t0
, t0
, 16);
4585 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4586 tcg_gen_or_tl(t0
, t0
, t1
);
4587 tcg_gen_shri_tl(t1
, t0
, 32);
4588 tcg_gen_shli_tl(t0
, t0
, 32);
4589 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4595 MIPS_INVAL("bsfhl");
4596 generate_exception_end(ctx
, EXCP_RI
);
4603 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4612 t0
= tcg_temp_new();
4613 t1
= tcg_temp_new();
4614 gen_load_gpr(t0
, rs
);
4615 gen_load_gpr(t1
, rt
);
4616 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4617 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4618 if (opc
== OPC_LSA
) {
4619 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4628 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4636 t0
= tcg_temp_new();
4637 gen_load_gpr(t0
, rt
);
4641 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4643 #if defined(TARGET_MIPS64)
4645 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4650 TCGv t1
= tcg_temp_new();
4651 gen_load_gpr(t1
, rs
);
4655 TCGv_i64 t2
= tcg_temp_new_i64();
4656 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4657 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4658 gen_move_low32(cpu_gpr
[rd
], t2
);
4659 tcg_temp_free_i64(t2
);
4662 #if defined(TARGET_MIPS64)
4664 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4665 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4666 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4676 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4683 t0
= tcg_temp_new();
4684 gen_load_gpr(t0
, rt
);
4687 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4689 #if defined(TARGET_MIPS64)
4691 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4698 #ifndef CONFIG_USER_ONLY
4699 /* CP0 (MMU and control) */
4700 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4702 TCGv_i64 t0
= tcg_temp_new_i64();
4703 TCGv_i64 t1
= tcg_temp_new_i64();
4705 tcg_gen_ext_tl_i64(t0
, arg
);
4706 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4707 #if defined(TARGET_MIPS64)
4708 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4710 tcg_gen_concat32_i64(t1
, t1
, t0
);
4712 tcg_gen_st_i64(t1
, cpu_env
, off
);
4713 tcg_temp_free_i64(t1
);
4714 tcg_temp_free_i64(t0
);
4717 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4719 TCGv_i64 t0
= tcg_temp_new_i64();
4720 TCGv_i64 t1
= tcg_temp_new_i64();
4722 tcg_gen_ext_tl_i64(t0
, arg
);
4723 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4724 tcg_gen_concat32_i64(t1
, t1
, t0
);
4725 tcg_gen_st_i64(t1
, cpu_env
, off
);
4726 tcg_temp_free_i64(t1
);
4727 tcg_temp_free_i64(t0
);
4730 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4732 TCGv_i64 t0
= tcg_temp_new_i64();
4734 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4735 #if defined(TARGET_MIPS64)
4736 tcg_gen_shri_i64(t0
, t0
, 30);
4738 tcg_gen_shri_i64(t0
, t0
, 32);
4740 gen_move_low32(arg
, t0
);
4741 tcg_temp_free_i64(t0
);
4744 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4746 TCGv_i64 t0
= tcg_temp_new_i64();
4748 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4749 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4750 gen_move_low32(arg
, t0
);
4751 tcg_temp_free_i64(t0
);
4754 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4756 TCGv_i32 t0
= tcg_temp_new_i32();
4758 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4759 tcg_gen_ext_i32_tl(arg
, t0
);
4760 tcg_temp_free_i32(t0
);
4763 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4765 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4766 tcg_gen_ext32s_tl(arg
, arg
);
4769 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4771 TCGv_i32 t0
= tcg_temp_new_i32();
4773 tcg_gen_trunc_tl_i32(t0
, arg
);
4774 tcg_gen_st_i32(t0
, cpu_env
, off
);
4775 tcg_temp_free_i32(t0
);
4778 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4780 const char *rn
= "invalid";
4782 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4783 goto mfhc0_read_zero
;
4790 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4794 goto mfhc0_read_zero
;
4800 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4804 goto mfhc0_read_zero
;
4810 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4811 ctx
->CP0_LLAddr_shift
);
4815 goto mfhc0_read_zero
;
4824 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4828 goto mfhc0_read_zero
;
4832 goto mfhc0_read_zero
;
4835 (void)rn
; /* avoid a compiler warning */
4836 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4840 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4841 tcg_gen_movi_tl(arg
, 0);
4844 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4846 const char *rn
= "invalid";
4847 uint64_t mask
= ctx
->PAMask
>> 36;
4849 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4857 tcg_gen_andi_tl(arg
, arg
, mask
);
4858 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4868 tcg_gen_andi_tl(arg
, arg
, mask
);
4869 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4879 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4880 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4881 relevant for modern MIPS cores supporting MTHC0, therefore
4882 treating MTHC0 to LLAddr as NOP. */
4895 tcg_gen_andi_tl(arg
, arg
, mask
);
4896 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4907 (void)rn
; /* avoid a compiler warning */
4909 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4912 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4914 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4915 tcg_gen_movi_tl(arg
, 0);
4917 tcg_gen_movi_tl(arg
, ~0);
4921 #define CP0_CHECK(c) \
4924 goto cp0_unimplemented; \
4928 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4930 const char *rn
= "invalid";
4933 check_insn(ctx
, ISA_MIPS32
);
4939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4944 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4948 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4949 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4954 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4963 goto cp0_unimplemented
;
4969 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4970 gen_helper_mfc0_random(arg
, cpu_env
);
4974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4990 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4995 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5000 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5001 rn
= "VPEScheFBack";
5004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5005 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5009 goto cp0_unimplemented
;
5016 TCGv_i64 tmp
= tcg_temp_new_i64();
5017 tcg_gen_ld_i64(tmp
, cpu_env
,
5018 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5019 #if defined(TARGET_MIPS64)
5021 /* Move RI/XI fields to bits 31:30 */
5022 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5023 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5026 gen_move_low32(arg
, tmp
);
5027 tcg_temp_free_i64(tmp
);
5032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5033 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5038 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5043 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5048 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5053 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5058 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5063 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5067 goto cp0_unimplemented
;
5074 TCGv_i64 tmp
= tcg_temp_new_i64();
5075 tcg_gen_ld_i64(tmp
, cpu_env
,
5076 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5077 #if defined(TARGET_MIPS64)
5079 /* Move RI/XI fields to bits 31:30 */
5080 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5081 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5084 gen_move_low32(arg
, tmp
);
5085 tcg_temp_free_i64(tmp
);
5091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5092 rn
= "GlobalNumber";
5095 goto cp0_unimplemented
;
5101 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5102 tcg_gen_ext32s_tl(arg
, arg
);
5106 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5107 rn
= "ContextConfig";
5108 goto cp0_unimplemented
;
5111 CP0_CHECK(ctx
->ulri
);
5112 tcg_gen_ld32s_tl(arg
, cpu_env
,
5113 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5117 goto cp0_unimplemented
;
5123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5127 check_insn(ctx
, ISA_MIPS32R2
);
5128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5132 goto cp0_unimplemented
;
5138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5142 check_insn(ctx
, ISA_MIPS32R2
);
5143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5152 check_insn(ctx
, ISA_MIPS32R2
);
5153 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5157 check_insn(ctx
, ISA_MIPS32R2
);
5158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5162 check_insn(ctx
, ISA_MIPS32R2
);
5163 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5167 goto cp0_unimplemented
;
5173 check_insn(ctx
, ISA_MIPS32R2
);
5174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5178 goto cp0_unimplemented
;
5184 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5185 tcg_gen_ext32s_tl(arg
, arg
);
5190 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5195 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5199 goto cp0_unimplemented
;
5205 /* Mark as an IO operation because we read the time. */
5206 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5209 gen_helper_mfc0_count(arg
, cpu_env
);
5210 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5213 /* Break the TB to be able to take timer interrupts immediately
5214 after reading count. */
5215 ctx
->bstate
= BS_STOP
;
5218 /* 6,7 are implementation dependent */
5220 goto cp0_unimplemented
;
5226 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5227 tcg_gen_ext32s_tl(arg
, arg
);
5231 goto cp0_unimplemented
;
5237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5240 /* 6,7 are implementation dependent */
5242 goto cp0_unimplemented
;
5248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5252 check_insn(ctx
, ISA_MIPS32R2
);
5253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5257 check_insn(ctx
, ISA_MIPS32R2
);
5258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5262 check_insn(ctx
, ISA_MIPS32R2
);
5263 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5267 goto cp0_unimplemented
;
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5277 goto cp0_unimplemented
;
5283 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5284 tcg_gen_ext32s_tl(arg
, arg
);
5288 goto cp0_unimplemented
;
5294 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5298 check_insn(ctx
, ISA_MIPS32R2
);
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5303 check_insn(ctx
, ISA_MIPS32R2
);
5304 CP0_CHECK(ctx
->cmgcr
);
5305 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5306 tcg_gen_ext32s_tl(arg
, arg
);
5310 goto cp0_unimplemented
;
5316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5328 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5332 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5339 /* 6,7 are implementation dependent */
5341 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5349 goto cp0_unimplemented
;
5355 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5359 goto cp0_unimplemented
;
5365 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5369 goto cp0_unimplemented
;
5375 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5379 goto cp0_unimplemented
;
5385 #if defined(TARGET_MIPS64)
5386 check_insn(ctx
, ISA_MIPS3
);
5387 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5388 tcg_gen_ext32s_tl(arg
, arg
);
5393 goto cp0_unimplemented
;
5397 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5398 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5405 goto cp0_unimplemented
;
5409 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5410 rn
= "'Diagnostic"; /* implementation dependent */
5415 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5419 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5420 rn
= "TraceControl";
5423 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5424 rn
= "TraceControl2";
5427 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5428 rn
= "UserTraceData";
5431 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5435 goto cp0_unimplemented
;
5442 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5443 tcg_gen_ext32s_tl(arg
, arg
);
5447 goto cp0_unimplemented
;
5453 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5454 rn
= "Performance0";
5457 // gen_helper_mfc0_performance1(arg);
5458 rn
= "Performance1";
5461 // gen_helper_mfc0_performance2(arg);
5462 rn
= "Performance2";
5465 // gen_helper_mfc0_performance3(arg);
5466 rn
= "Performance3";
5469 // gen_helper_mfc0_performance4(arg);
5470 rn
= "Performance4";
5473 // gen_helper_mfc0_performance5(arg);
5474 rn
= "Performance5";
5477 // gen_helper_mfc0_performance6(arg);
5478 rn
= "Performance6";
5481 // gen_helper_mfc0_performance7(arg);
5482 rn
= "Performance7";
5485 goto cp0_unimplemented
;
5491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5495 goto cp0_unimplemented
;
5501 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5505 goto cp0_unimplemented
;
5515 TCGv_i64 tmp
= tcg_temp_new_i64();
5516 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5517 gen_move_low32(arg
, tmp
);
5518 tcg_temp_free_i64(tmp
);
5526 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5530 goto cp0_unimplemented
;
5539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5546 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5550 goto cp0_unimplemented
;
5556 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5557 tcg_gen_ext32s_tl(arg
, arg
);
5561 goto cp0_unimplemented
;
5568 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5572 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5573 tcg_gen_ld_tl(arg
, cpu_env
,
5574 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5575 tcg_gen_ext32s_tl(arg
, arg
);
5579 goto cp0_unimplemented
;
5583 goto cp0_unimplemented
;
5585 (void)rn
; /* avoid a compiler warning */
5586 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5590 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5591 gen_mfc0_unimplemented(ctx
, arg
);
5594 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5596 const char *rn
= "invalid";
5599 check_insn(ctx
, ISA_MIPS32
);
5601 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5609 gen_helper_mtc0_index(cpu_env
, arg
);
5613 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5614 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5618 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5623 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5633 goto cp0_unimplemented
;
5643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5644 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5648 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5649 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5653 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5654 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5658 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5659 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5664 tcg_gen_st_tl(arg
, cpu_env
,
5665 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5669 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5670 tcg_gen_st_tl(arg
, cpu_env
,
5671 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5672 rn
= "VPEScheFBack";
5675 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5676 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5680 goto cp0_unimplemented
;
5686 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5690 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5691 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5695 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5696 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5700 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5701 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5705 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5706 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5710 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5711 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5715 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5716 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5720 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5721 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5725 goto cp0_unimplemented
;
5731 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5737 rn
= "GlobalNumber";
5740 goto cp0_unimplemented
;
5746 gen_helper_mtc0_context(cpu_env
, arg
);
5750 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5751 rn
= "ContextConfig";
5752 goto cp0_unimplemented
;
5755 CP0_CHECK(ctx
->ulri
);
5756 tcg_gen_st_tl(arg
, cpu_env
,
5757 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5761 goto cp0_unimplemented
;
5767 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5771 check_insn(ctx
, ISA_MIPS32R2
);
5772 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5774 ctx
->bstate
= BS_STOP
;
5777 goto cp0_unimplemented
;
5783 gen_helper_mtc0_wired(cpu_env
, arg
);
5787 check_insn(ctx
, ISA_MIPS32R2
);
5788 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5792 check_insn(ctx
, ISA_MIPS32R2
);
5793 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5797 check_insn(ctx
, ISA_MIPS32R2
);
5798 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5802 check_insn(ctx
, ISA_MIPS32R2
);
5803 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5807 check_insn(ctx
, ISA_MIPS32R2
);
5808 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5812 goto cp0_unimplemented
;
5818 check_insn(ctx
, ISA_MIPS32R2
);
5819 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5820 ctx
->bstate
= BS_STOP
;
5824 goto cp0_unimplemented
;
5842 goto cp0_unimplemented
;
5848 gen_helper_mtc0_count(cpu_env
, arg
);
5851 /* 6,7 are implementation dependent */
5853 goto cp0_unimplemented
;
5859 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5863 goto cp0_unimplemented
;
5869 gen_helper_mtc0_compare(cpu_env
, arg
);
5872 /* 6,7 are implementation dependent */
5874 goto cp0_unimplemented
;
5880 save_cpu_state(ctx
, 1);
5881 gen_helper_mtc0_status(cpu_env
, arg
);
5882 /* BS_STOP isn't good enough here, hflags may have changed. */
5883 gen_save_pc(ctx
->pc
+ 4);
5884 ctx
->bstate
= BS_EXCP
;
5888 check_insn(ctx
, ISA_MIPS32R2
);
5889 gen_helper_mtc0_intctl(cpu_env
, arg
);
5890 /* Stop translation as we may have switched the execution mode */
5891 ctx
->bstate
= BS_STOP
;
5895 check_insn(ctx
, ISA_MIPS32R2
);
5896 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5897 /* Stop translation as we may have switched the execution mode */
5898 ctx
->bstate
= BS_STOP
;
5902 check_insn(ctx
, ISA_MIPS32R2
);
5903 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5904 /* Stop translation as we may have switched the execution mode */
5905 ctx
->bstate
= BS_STOP
;
5909 goto cp0_unimplemented
;
5915 save_cpu_state(ctx
, 1);
5916 gen_helper_mtc0_cause(cpu_env
, arg
);
5920 goto cp0_unimplemented
;
5926 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5930 goto cp0_unimplemented
;
5940 check_insn(ctx
, ISA_MIPS32R2
);
5941 gen_helper_mtc0_ebase(cpu_env
, arg
);
5945 goto cp0_unimplemented
;
5951 gen_helper_mtc0_config0(cpu_env
, arg
);
5953 /* Stop translation as we may have switched the execution mode */
5954 ctx
->bstate
= BS_STOP
;
5957 /* ignored, read only */
5961 gen_helper_mtc0_config2(cpu_env
, arg
);
5963 /* Stop translation as we may have switched the execution mode */
5964 ctx
->bstate
= BS_STOP
;
5967 gen_helper_mtc0_config3(cpu_env
, arg
);
5969 /* Stop translation as we may have switched the execution mode */
5970 ctx
->bstate
= BS_STOP
;
5973 gen_helper_mtc0_config4(cpu_env
, arg
);
5975 ctx
->bstate
= BS_STOP
;
5978 gen_helper_mtc0_config5(cpu_env
, arg
);
5980 /* Stop translation as we may have switched the execution mode */
5981 ctx
->bstate
= BS_STOP
;
5983 /* 6,7 are implementation dependent */
5993 rn
= "Invalid config selector";
5994 goto cp0_unimplemented
;
6000 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6004 goto cp0_unimplemented
;
6010 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6014 goto cp0_unimplemented
;
6020 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6024 goto cp0_unimplemented
;
6030 #if defined(TARGET_MIPS64)
6031 check_insn(ctx
, ISA_MIPS3
);
6032 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6037 goto cp0_unimplemented
;
6041 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6042 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6045 gen_helper_mtc0_framemask(cpu_env
, arg
);
6049 goto cp0_unimplemented
;
6054 rn
= "Diagnostic"; /* implementation dependent */
6059 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6060 /* BS_STOP isn't good enough here, hflags may have changed. */
6061 gen_save_pc(ctx
->pc
+ 4);
6062 ctx
->bstate
= BS_EXCP
;
6066 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6067 rn
= "TraceControl";
6068 /* Stop translation as we may have switched the execution mode */
6069 ctx
->bstate
= BS_STOP
;
6072 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6073 rn
= "TraceControl2";
6074 /* Stop translation as we may have switched the execution mode */
6075 ctx
->bstate
= BS_STOP
;
6078 /* Stop translation as we may have switched the execution mode */
6079 ctx
->bstate
= BS_STOP
;
6080 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6081 rn
= "UserTraceData";
6082 /* Stop translation as we may have switched the execution mode */
6083 ctx
->bstate
= BS_STOP
;
6086 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6087 /* Stop translation as we may have switched the execution mode */
6088 ctx
->bstate
= BS_STOP
;
6092 goto cp0_unimplemented
;
6099 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6103 goto cp0_unimplemented
;
6109 gen_helper_mtc0_performance0(cpu_env
, arg
);
6110 rn
= "Performance0";
6113 // gen_helper_mtc0_performance1(arg);
6114 rn
= "Performance1";
6117 // gen_helper_mtc0_performance2(arg);
6118 rn
= "Performance2";
6121 // gen_helper_mtc0_performance3(arg);
6122 rn
= "Performance3";
6125 // gen_helper_mtc0_performance4(arg);
6126 rn
= "Performance4";
6129 // gen_helper_mtc0_performance5(arg);
6130 rn
= "Performance5";
6133 // gen_helper_mtc0_performance6(arg);
6134 rn
= "Performance6";
6137 // gen_helper_mtc0_performance7(arg);
6138 rn
= "Performance7";
6141 goto cp0_unimplemented
;
6147 gen_helper_mtc0_errctl(cpu_env
, arg
);
6148 ctx
->bstate
= BS_STOP
;
6152 goto cp0_unimplemented
;
6162 goto cp0_unimplemented
;
6171 gen_helper_mtc0_taglo(cpu_env
, arg
);
6178 gen_helper_mtc0_datalo(cpu_env
, arg
);
6182 goto cp0_unimplemented
;
6191 gen_helper_mtc0_taghi(cpu_env
, arg
);
6198 gen_helper_mtc0_datahi(cpu_env
, arg
);
6203 goto cp0_unimplemented
;
6209 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6213 goto cp0_unimplemented
;
6220 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6224 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6225 tcg_gen_st_tl(arg
, cpu_env
,
6226 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6230 goto cp0_unimplemented
;
6232 /* Stop translation as we may have switched the execution mode */
6233 ctx
->bstate
= BS_STOP
;
6236 goto cp0_unimplemented
;
6238 (void)rn
; /* avoid a compiler warning */
6239 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6240 /* For simplicity assume that all writes can cause interrupts. */
6241 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6243 ctx
->bstate
= BS_STOP
;
6248 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6251 #if defined(TARGET_MIPS64)
6252 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6254 const char *rn
= "invalid";
6257 check_insn(ctx
, ISA_MIPS64
);
6263 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6268 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6272 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6273 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6277 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6278 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6287 goto cp0_unimplemented
;
6293 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6294 gen_helper_mfc0_random(arg
, cpu_env
);
6298 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6303 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6304 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6308 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6309 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6313 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6314 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6318 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6319 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6323 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6324 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6325 rn
= "VPEScheFBack";
6328 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6329 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6333 goto cp0_unimplemented
;
6339 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6343 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6344 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6348 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6349 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6353 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6354 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6359 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6364 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6369 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6373 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6374 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6378 goto cp0_unimplemented
;
6384 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6390 rn
= "GlobalNumber";
6393 goto cp0_unimplemented
;
6399 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6403 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6404 rn
= "ContextConfig";
6405 goto cp0_unimplemented
;
6408 CP0_CHECK(ctx
->ulri
);
6409 tcg_gen_ld_tl(arg
, cpu_env
,
6410 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6414 goto cp0_unimplemented
;
6420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6424 check_insn(ctx
, ISA_MIPS32R2
);
6425 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6429 goto cp0_unimplemented
;
6435 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6439 check_insn(ctx
, ISA_MIPS32R2
);
6440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6444 check_insn(ctx
, ISA_MIPS32R2
);
6445 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6449 check_insn(ctx
, ISA_MIPS32R2
);
6450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6454 check_insn(ctx
, ISA_MIPS32R2
);
6455 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6459 check_insn(ctx
, ISA_MIPS32R2
);
6460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6464 goto cp0_unimplemented
;
6470 check_insn(ctx
, ISA_MIPS32R2
);
6471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6475 goto cp0_unimplemented
;
6481 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6495 goto cp0_unimplemented
;
6501 /* Mark as an IO operation because we read the time. */
6502 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6505 gen_helper_mfc0_count(arg
, cpu_env
);
6506 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6509 /* Break the TB to be able to take timer interrupts immediately
6510 after reading count. */
6511 ctx
->bstate
= BS_STOP
;
6514 /* 6,7 are implementation dependent */
6516 goto cp0_unimplemented
;
6522 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6526 goto cp0_unimplemented
;
6532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6535 /* 6,7 are implementation dependent */
6537 goto cp0_unimplemented
;
6543 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6547 check_insn(ctx
, ISA_MIPS32R2
);
6548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6552 check_insn(ctx
, ISA_MIPS32R2
);
6553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6557 check_insn(ctx
, ISA_MIPS32R2
);
6558 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6562 goto cp0_unimplemented
;
6568 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6572 goto cp0_unimplemented
;
6578 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6582 goto cp0_unimplemented
;
6588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6592 check_insn(ctx
, ISA_MIPS32R2
);
6593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6597 check_insn(ctx
, ISA_MIPS32R2
);
6598 CP0_CHECK(ctx
->cmgcr
);
6599 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6603 goto cp0_unimplemented
;
6609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6617 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6621 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6632 /* 6,7 are implementation dependent */
6634 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6638 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6642 goto cp0_unimplemented
;
6648 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6652 goto cp0_unimplemented
;
6658 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6662 goto cp0_unimplemented
;
6668 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6672 goto cp0_unimplemented
;
6678 check_insn(ctx
, ISA_MIPS3
);
6679 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6683 goto cp0_unimplemented
;
6687 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6688 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6691 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6695 goto cp0_unimplemented
;
6699 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6700 rn
= "'Diagnostic"; /* implementation dependent */
6705 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6709 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6710 rn
= "TraceControl";
6713 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6714 rn
= "TraceControl2";
6717 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6718 rn
= "UserTraceData";
6721 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6725 goto cp0_unimplemented
;
6732 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6736 goto cp0_unimplemented
;
6742 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6743 rn
= "Performance0";
6746 // gen_helper_dmfc0_performance1(arg);
6747 rn
= "Performance1";
6750 // gen_helper_dmfc0_performance2(arg);
6751 rn
= "Performance2";
6754 // gen_helper_dmfc0_performance3(arg);
6755 rn
= "Performance3";
6758 // gen_helper_dmfc0_performance4(arg);
6759 rn
= "Performance4";
6762 // gen_helper_dmfc0_performance5(arg);
6763 rn
= "Performance5";
6766 // gen_helper_dmfc0_performance6(arg);
6767 rn
= "Performance6";
6770 // gen_helper_dmfc0_performance7(arg);
6771 rn
= "Performance7";
6774 goto cp0_unimplemented
;
6780 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6784 goto cp0_unimplemented
;
6791 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6795 goto cp0_unimplemented
;
6804 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6811 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6815 goto cp0_unimplemented
;
6824 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6831 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6835 goto cp0_unimplemented
;
6841 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6845 goto cp0_unimplemented
;
6852 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6856 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6857 tcg_gen_ld_tl(arg
, cpu_env
,
6858 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6862 goto cp0_unimplemented
;
6866 goto cp0_unimplemented
;
6868 (void)rn
; /* avoid a compiler warning */
6869 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6873 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6874 gen_mfc0_unimplemented(ctx
, arg
);
6877 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6879 const char *rn
= "invalid";
6882 check_insn(ctx
, ISA_MIPS64
);
6884 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6892 gen_helper_mtc0_index(cpu_env
, arg
);
6896 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6897 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6901 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6906 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6916 goto cp0_unimplemented
;
6926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6927 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6931 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6932 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6936 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6937 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6941 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6942 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6947 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6952 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6953 rn
= "VPEScheFBack";
6956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6957 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6961 goto cp0_unimplemented
;
6967 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6971 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6972 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6976 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6977 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6982 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6987 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6992 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6997 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7002 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7006 goto cp0_unimplemented
;
7012 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7018 rn
= "GlobalNumber";
7021 goto cp0_unimplemented
;
7027 gen_helper_mtc0_context(cpu_env
, arg
);
7031 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7032 rn
= "ContextConfig";
7033 goto cp0_unimplemented
;
7036 CP0_CHECK(ctx
->ulri
);
7037 tcg_gen_st_tl(arg
, cpu_env
,
7038 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7042 goto cp0_unimplemented
;
7048 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7052 check_insn(ctx
, ISA_MIPS32R2
);
7053 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7057 goto cp0_unimplemented
;
7063 gen_helper_mtc0_wired(cpu_env
, arg
);
7067 check_insn(ctx
, ISA_MIPS32R2
);
7068 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7072 check_insn(ctx
, ISA_MIPS32R2
);
7073 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7077 check_insn(ctx
, ISA_MIPS32R2
);
7078 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7082 check_insn(ctx
, ISA_MIPS32R2
);
7083 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7087 check_insn(ctx
, ISA_MIPS32R2
);
7088 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7092 goto cp0_unimplemented
;
7098 check_insn(ctx
, ISA_MIPS32R2
);
7099 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7100 ctx
->bstate
= BS_STOP
;
7104 goto cp0_unimplemented
;
7122 goto cp0_unimplemented
;
7128 gen_helper_mtc0_count(cpu_env
, arg
);
7131 /* 6,7 are implementation dependent */
7133 goto cp0_unimplemented
;
7135 /* Stop translation as we may have switched the execution mode */
7136 ctx
->bstate
= BS_STOP
;
7141 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7145 goto cp0_unimplemented
;
7151 gen_helper_mtc0_compare(cpu_env
, arg
);
7154 /* 6,7 are implementation dependent */
7156 goto cp0_unimplemented
;
7158 /* Stop translation as we may have switched the execution mode */
7159 ctx
->bstate
= BS_STOP
;
7164 save_cpu_state(ctx
, 1);
7165 gen_helper_mtc0_status(cpu_env
, arg
);
7166 /* BS_STOP isn't good enough here, hflags may have changed. */
7167 gen_save_pc(ctx
->pc
+ 4);
7168 ctx
->bstate
= BS_EXCP
;
7172 check_insn(ctx
, ISA_MIPS32R2
);
7173 gen_helper_mtc0_intctl(cpu_env
, arg
);
7174 /* Stop translation as we may have switched the execution mode */
7175 ctx
->bstate
= BS_STOP
;
7179 check_insn(ctx
, ISA_MIPS32R2
);
7180 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7181 /* Stop translation as we may have switched the execution mode */
7182 ctx
->bstate
= BS_STOP
;
7186 check_insn(ctx
, ISA_MIPS32R2
);
7187 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7188 /* Stop translation as we may have switched the execution mode */
7189 ctx
->bstate
= BS_STOP
;
7193 goto cp0_unimplemented
;
7199 save_cpu_state(ctx
, 1);
7200 /* Mark as an IO operation because we may trigger a software
7202 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7205 gen_helper_mtc0_cause(cpu_env
, arg
);
7206 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7209 /* Stop translation as we may have triggered an intetrupt */
7210 ctx
->bstate
= BS_STOP
;
7214 goto cp0_unimplemented
;
7220 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7224 goto cp0_unimplemented
;
7234 check_insn(ctx
, ISA_MIPS32R2
);
7235 gen_helper_mtc0_ebase(cpu_env
, arg
);
7239 goto cp0_unimplemented
;
7245 gen_helper_mtc0_config0(cpu_env
, arg
);
7247 /* Stop translation as we may have switched the execution mode */
7248 ctx
->bstate
= BS_STOP
;
7251 /* ignored, read only */
7255 gen_helper_mtc0_config2(cpu_env
, arg
);
7257 /* Stop translation as we may have switched the execution mode */
7258 ctx
->bstate
= BS_STOP
;
7261 gen_helper_mtc0_config3(cpu_env
, arg
);
7263 /* Stop translation as we may have switched the execution mode */
7264 ctx
->bstate
= BS_STOP
;
7267 /* currently ignored */
7271 gen_helper_mtc0_config5(cpu_env
, arg
);
7273 /* Stop translation as we may have switched the execution mode */
7274 ctx
->bstate
= BS_STOP
;
7276 /* 6,7 are implementation dependent */
7278 rn
= "Invalid config selector";
7279 goto cp0_unimplemented
;
7285 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7289 goto cp0_unimplemented
;
7295 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7299 goto cp0_unimplemented
;
7305 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7309 goto cp0_unimplemented
;
7315 check_insn(ctx
, ISA_MIPS3
);
7316 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7320 goto cp0_unimplemented
;
7324 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7325 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7328 gen_helper_mtc0_framemask(cpu_env
, arg
);
7332 goto cp0_unimplemented
;
7337 rn
= "Diagnostic"; /* implementation dependent */
7342 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7343 /* BS_STOP isn't good enough here, hflags may have changed. */
7344 gen_save_pc(ctx
->pc
+ 4);
7345 ctx
->bstate
= BS_EXCP
;
7349 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7350 /* Stop translation as we may have switched the execution mode */
7351 ctx
->bstate
= BS_STOP
;
7352 rn
= "TraceControl";
7355 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7356 /* Stop translation as we may have switched the execution mode */
7357 ctx
->bstate
= BS_STOP
;
7358 rn
= "TraceControl2";
7361 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7362 /* Stop translation as we may have switched the execution mode */
7363 ctx
->bstate
= BS_STOP
;
7364 rn
= "UserTraceData";
7367 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7368 /* Stop translation as we may have switched the execution mode */
7369 ctx
->bstate
= BS_STOP
;
7373 goto cp0_unimplemented
;
7380 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7384 goto cp0_unimplemented
;
7390 gen_helper_mtc0_performance0(cpu_env
, arg
);
7391 rn
= "Performance0";
7394 // gen_helper_mtc0_performance1(cpu_env, arg);
7395 rn
= "Performance1";
7398 // gen_helper_mtc0_performance2(cpu_env, arg);
7399 rn
= "Performance2";
7402 // gen_helper_mtc0_performance3(cpu_env, arg);
7403 rn
= "Performance3";
7406 // gen_helper_mtc0_performance4(cpu_env, arg);
7407 rn
= "Performance4";
7410 // gen_helper_mtc0_performance5(cpu_env, arg);
7411 rn
= "Performance5";
7414 // gen_helper_mtc0_performance6(cpu_env, arg);
7415 rn
= "Performance6";
7418 // gen_helper_mtc0_performance7(cpu_env, arg);
7419 rn
= "Performance7";
7422 goto cp0_unimplemented
;
7428 gen_helper_mtc0_errctl(cpu_env
, arg
);
7429 ctx
->bstate
= BS_STOP
;
7433 goto cp0_unimplemented
;
7443 goto cp0_unimplemented
;
7452 gen_helper_mtc0_taglo(cpu_env
, arg
);
7459 gen_helper_mtc0_datalo(cpu_env
, arg
);
7463 goto cp0_unimplemented
;
7472 gen_helper_mtc0_taghi(cpu_env
, arg
);
7479 gen_helper_mtc0_datahi(cpu_env
, arg
);
7484 goto cp0_unimplemented
;
7490 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7494 goto cp0_unimplemented
;
7501 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7505 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7506 tcg_gen_st_tl(arg
, cpu_env
,
7507 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7511 goto cp0_unimplemented
;
7513 /* Stop translation as we may have switched the execution mode */
7514 ctx
->bstate
= BS_STOP
;
7517 goto cp0_unimplemented
;
7519 (void)rn
; /* avoid a compiler warning */
7520 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7521 /* For simplicity assume that all writes can cause interrupts. */
7522 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7524 ctx
->bstate
= BS_STOP
;
7529 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7531 #endif /* TARGET_MIPS64 */
7533 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7534 int u
, int sel
, int h
)
7536 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7537 TCGv t0
= tcg_temp_local_new();
7539 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7540 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7541 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7542 tcg_gen_movi_tl(t0
, -1);
7543 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7544 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7545 tcg_gen_movi_tl(t0
, -1);
7551 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7554 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7564 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7567 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7570 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7573 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7576 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7579 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7582 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7585 gen_mfc0(ctx
, t0
, rt
, sel
);
7592 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7595 gen_mfc0(ctx
, t0
, rt
, sel
);
7601 gen_helper_mftc0_status(t0
, cpu_env
);
7604 gen_mfc0(ctx
, t0
, rt
, sel
);
7610 gen_helper_mftc0_cause(t0
, cpu_env
);
7620 gen_helper_mftc0_epc(t0
, cpu_env
);
7630 gen_helper_mftc0_ebase(t0
, cpu_env
);
7640 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7650 gen_helper_mftc0_debug(t0
, cpu_env
);
7653 gen_mfc0(ctx
, t0
, rt
, sel
);
7658 gen_mfc0(ctx
, t0
, rt
, sel
);
7660 } else switch (sel
) {
7661 /* GPR registers. */
7663 gen_helper_1e0i(mftgpr
, t0
, rt
);
7665 /* Auxiliary CPU registers */
7669 gen_helper_1e0i(mftlo
, t0
, 0);
7672 gen_helper_1e0i(mfthi
, t0
, 0);
7675 gen_helper_1e0i(mftacx
, t0
, 0);
7678 gen_helper_1e0i(mftlo
, t0
, 1);
7681 gen_helper_1e0i(mfthi
, t0
, 1);
7684 gen_helper_1e0i(mftacx
, t0
, 1);
7687 gen_helper_1e0i(mftlo
, t0
, 2);
7690 gen_helper_1e0i(mfthi
, t0
, 2);
7693 gen_helper_1e0i(mftacx
, t0
, 2);
7696 gen_helper_1e0i(mftlo
, t0
, 3);
7699 gen_helper_1e0i(mfthi
, t0
, 3);
7702 gen_helper_1e0i(mftacx
, t0
, 3);
7705 gen_helper_mftdsp(t0
, cpu_env
);
7711 /* Floating point (COP1). */
7713 /* XXX: For now we support only a single FPU context. */
7715 TCGv_i32 fp0
= tcg_temp_new_i32();
7717 gen_load_fpr32(ctx
, fp0
, rt
);
7718 tcg_gen_ext_i32_tl(t0
, fp0
);
7719 tcg_temp_free_i32(fp0
);
7721 TCGv_i32 fp0
= tcg_temp_new_i32();
7723 gen_load_fpr32h(ctx
, fp0
, rt
);
7724 tcg_gen_ext_i32_tl(t0
, fp0
);
7725 tcg_temp_free_i32(fp0
);
7729 /* XXX: For now we support only a single FPU context. */
7730 gen_helper_1e0i(cfc1
, t0
, rt
);
7732 /* COP2: Not implemented. */
7739 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7740 gen_store_gpr(t0
, rd
);
7746 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7747 generate_exception_end(ctx
, EXCP_RI
);
7750 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7751 int u
, int sel
, int h
)
7753 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7754 TCGv t0
= tcg_temp_local_new();
7756 gen_load_gpr(t0
, rt
);
7757 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7758 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7759 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7761 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7762 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7769 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7772 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7782 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7785 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7788 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7791 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7794 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7797 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7800 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7803 gen_mtc0(ctx
, t0
, rd
, sel
);
7810 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7813 gen_mtc0(ctx
, t0
, rd
, sel
);
7819 gen_helper_mttc0_status(cpu_env
, t0
);
7822 gen_mtc0(ctx
, t0
, rd
, sel
);
7828 gen_helper_mttc0_cause(cpu_env
, t0
);
7838 gen_helper_mttc0_ebase(cpu_env
, t0
);
7848 gen_helper_mttc0_debug(cpu_env
, t0
);
7851 gen_mtc0(ctx
, t0
, rd
, sel
);
7856 gen_mtc0(ctx
, t0
, rd
, sel
);
7858 } else switch (sel
) {
7859 /* GPR registers. */
7861 gen_helper_0e1i(mttgpr
, t0
, rd
);
7863 /* Auxiliary CPU registers */
7867 gen_helper_0e1i(mttlo
, t0
, 0);
7870 gen_helper_0e1i(mtthi
, t0
, 0);
7873 gen_helper_0e1i(mttacx
, t0
, 0);
7876 gen_helper_0e1i(mttlo
, t0
, 1);
7879 gen_helper_0e1i(mtthi
, t0
, 1);
7882 gen_helper_0e1i(mttacx
, t0
, 1);
7885 gen_helper_0e1i(mttlo
, t0
, 2);
7888 gen_helper_0e1i(mtthi
, t0
, 2);
7891 gen_helper_0e1i(mttacx
, t0
, 2);
7894 gen_helper_0e1i(mttlo
, t0
, 3);
7897 gen_helper_0e1i(mtthi
, t0
, 3);
7900 gen_helper_0e1i(mttacx
, t0
, 3);
7903 gen_helper_mttdsp(cpu_env
, t0
);
7909 /* Floating point (COP1). */
7911 /* XXX: For now we support only a single FPU context. */
7913 TCGv_i32 fp0
= tcg_temp_new_i32();
7915 tcg_gen_trunc_tl_i32(fp0
, t0
);
7916 gen_store_fpr32(ctx
, fp0
, rd
);
7917 tcg_temp_free_i32(fp0
);
7919 TCGv_i32 fp0
= tcg_temp_new_i32();
7921 tcg_gen_trunc_tl_i32(fp0
, t0
);
7922 gen_store_fpr32h(ctx
, fp0
, rd
);
7923 tcg_temp_free_i32(fp0
);
7927 /* XXX: For now we support only a single FPU context. */
7929 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7931 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7932 tcg_temp_free_i32(fs_tmp
);
7934 /* Stop translation as we may have changed hflags */
7935 ctx
->bstate
= BS_STOP
;
7937 /* COP2: Not implemented. */
7944 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7950 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7951 generate_exception_end(ctx
, EXCP_RI
);
7954 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7956 const char *opn
= "ldst";
7958 check_cp0_enabled(ctx
);
7965 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7970 TCGv t0
= tcg_temp_new();
7972 gen_load_gpr(t0
, rt
);
7973 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7978 #if defined(TARGET_MIPS64)
7980 check_insn(ctx
, ISA_MIPS3
);
7985 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7989 check_insn(ctx
, ISA_MIPS3
);
7991 TCGv t0
= tcg_temp_new();
7993 gen_load_gpr(t0
, rt
);
7994 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8006 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8012 TCGv t0
= tcg_temp_new();
8013 gen_load_gpr(t0
, rt
);
8014 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8020 check_insn(ctx
, ASE_MT
);
8025 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8026 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8030 check_insn(ctx
, ASE_MT
);
8031 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8032 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8037 if (!env
->tlb
->helper_tlbwi
)
8039 gen_helper_tlbwi(cpu_env
);
8044 if (!env
->tlb
->helper_tlbinv
) {
8047 gen_helper_tlbinv(cpu_env
);
8048 } /* treat as nop if TLBINV not supported */
8053 if (!env
->tlb
->helper_tlbinvf
) {
8056 gen_helper_tlbinvf(cpu_env
);
8057 } /* treat as nop if TLBINV not supported */
8061 if (!env
->tlb
->helper_tlbwr
)
8063 gen_helper_tlbwr(cpu_env
);
8067 if (!env
->tlb
->helper_tlbp
)
8069 gen_helper_tlbp(cpu_env
);
8073 if (!env
->tlb
->helper_tlbr
)
8075 gen_helper_tlbr(cpu_env
);
8077 case OPC_ERET
: /* OPC_ERETNC */
8078 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8079 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8082 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8083 if (ctx
->opcode
& (1 << bit_shift
)) {
8086 check_insn(ctx
, ISA_MIPS32R5
);
8087 gen_helper_eretnc(cpu_env
);
8091 check_insn(ctx
, ISA_MIPS2
);
8092 gen_helper_eret(cpu_env
);
8094 ctx
->bstate
= BS_EXCP
;
8099 check_insn(ctx
, ISA_MIPS32
);
8100 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8101 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8104 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8106 generate_exception_end(ctx
, EXCP_RI
);
8108 gen_helper_deret(cpu_env
);
8109 ctx
->bstate
= BS_EXCP
;
8114 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8115 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8116 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8119 /* If we get an exception, we want to restart at next instruction */
8121 save_cpu_state(ctx
, 1);
8123 gen_helper_wait(cpu_env
);
8124 ctx
->bstate
= BS_EXCP
;
8129 generate_exception_end(ctx
, EXCP_RI
);
8132 (void)opn
; /* avoid a compiler warning */
8134 #endif /* !CONFIG_USER_ONLY */
8136 /* CP1 Branches (before delay slot) */
8137 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8138 int32_t cc
, int32_t offset
)
8140 target_ulong btarget
;
8141 TCGv_i32 t0
= tcg_temp_new_i32();
8143 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8144 generate_exception_end(ctx
, EXCP_RI
);
8149 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8151 btarget
= ctx
->pc
+ 4 + offset
;
8155 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8156 tcg_gen_not_i32(t0
, t0
);
8157 tcg_gen_andi_i32(t0
, t0
, 1);
8158 tcg_gen_extu_i32_tl(bcond
, t0
);
8161 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8162 tcg_gen_not_i32(t0
, t0
);
8163 tcg_gen_andi_i32(t0
, t0
, 1);
8164 tcg_gen_extu_i32_tl(bcond
, t0
);
8167 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8168 tcg_gen_andi_i32(t0
, t0
, 1);
8169 tcg_gen_extu_i32_tl(bcond
, t0
);
8172 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8173 tcg_gen_andi_i32(t0
, t0
, 1);
8174 tcg_gen_extu_i32_tl(bcond
, t0
);
8176 ctx
->hflags
|= MIPS_HFLAG_BL
;
8180 TCGv_i32 t1
= tcg_temp_new_i32();
8181 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8182 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8183 tcg_gen_nand_i32(t0
, t0
, t1
);
8184 tcg_temp_free_i32(t1
);
8185 tcg_gen_andi_i32(t0
, t0
, 1);
8186 tcg_gen_extu_i32_tl(bcond
, t0
);
8191 TCGv_i32 t1
= tcg_temp_new_i32();
8192 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8193 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8194 tcg_gen_or_i32(t0
, t0
, t1
);
8195 tcg_temp_free_i32(t1
);
8196 tcg_gen_andi_i32(t0
, t0
, 1);
8197 tcg_gen_extu_i32_tl(bcond
, t0
);
8202 TCGv_i32 t1
= tcg_temp_new_i32();
8203 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8204 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8205 tcg_gen_and_i32(t0
, t0
, t1
);
8206 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8207 tcg_gen_and_i32(t0
, t0
, t1
);
8208 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8209 tcg_gen_nand_i32(t0
, t0
, t1
);
8210 tcg_temp_free_i32(t1
);
8211 tcg_gen_andi_i32(t0
, t0
, 1);
8212 tcg_gen_extu_i32_tl(bcond
, t0
);
8217 TCGv_i32 t1
= tcg_temp_new_i32();
8218 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8219 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8220 tcg_gen_or_i32(t0
, t0
, t1
);
8221 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8222 tcg_gen_or_i32(t0
, t0
, t1
);
8223 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8224 tcg_gen_or_i32(t0
, t0
, t1
);
8225 tcg_temp_free_i32(t1
);
8226 tcg_gen_andi_i32(t0
, t0
, 1);
8227 tcg_gen_extu_i32_tl(bcond
, t0
);
8230 ctx
->hflags
|= MIPS_HFLAG_BC
;
8233 MIPS_INVAL("cp1 cond branch");
8234 generate_exception_end(ctx
, EXCP_RI
);
8237 ctx
->btarget
= btarget
;
8238 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8240 tcg_temp_free_i32(t0
);
8243 /* R6 CP1 Branches */
8244 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8245 int32_t ft
, int32_t offset
,
8248 target_ulong btarget
;
8249 TCGv_i64 t0
= tcg_temp_new_i64();
8251 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8252 #ifdef MIPS_DEBUG_DISAS
8253 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8256 generate_exception_end(ctx
, EXCP_RI
);
8260 gen_load_fpr64(ctx
, t0
, ft
);
8261 tcg_gen_andi_i64(t0
, t0
, 1);
8263 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8267 tcg_gen_xori_i64(t0
, t0
, 1);
8268 ctx
->hflags
|= MIPS_HFLAG_BC
;
8271 /* t0 already set */
8272 ctx
->hflags
|= MIPS_HFLAG_BC
;
8275 MIPS_INVAL("cp1 cond branch");
8276 generate_exception_end(ctx
, EXCP_RI
);
8280 tcg_gen_trunc_i64_tl(bcond
, t0
);
8282 ctx
->btarget
= btarget
;
8284 switch (delayslot_size
) {
8286 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8289 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8294 tcg_temp_free_i64(t0
);
8297 /* Coprocessor 1 (FPU) */
8299 #define FOP(func, fmt) (((fmt) << 21) | (func))
8302 OPC_ADD_S
= FOP(0, FMT_S
),
8303 OPC_SUB_S
= FOP(1, FMT_S
),
8304 OPC_MUL_S
= FOP(2, FMT_S
),
8305 OPC_DIV_S
= FOP(3, FMT_S
),
8306 OPC_SQRT_S
= FOP(4, FMT_S
),
8307 OPC_ABS_S
= FOP(5, FMT_S
),
8308 OPC_MOV_S
= FOP(6, FMT_S
),
8309 OPC_NEG_S
= FOP(7, FMT_S
),
8310 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8311 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8312 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8313 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8314 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8315 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8316 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8317 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8318 OPC_SEL_S
= FOP(16, FMT_S
),
8319 OPC_MOVCF_S
= FOP(17, FMT_S
),
8320 OPC_MOVZ_S
= FOP(18, FMT_S
),
8321 OPC_MOVN_S
= FOP(19, FMT_S
),
8322 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8323 OPC_RECIP_S
= FOP(21, FMT_S
),
8324 OPC_RSQRT_S
= FOP(22, FMT_S
),
8325 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8326 OPC_MADDF_S
= FOP(24, FMT_S
),
8327 OPC_MSUBF_S
= FOP(25, FMT_S
),
8328 OPC_RINT_S
= FOP(26, FMT_S
),
8329 OPC_CLASS_S
= FOP(27, FMT_S
),
8330 OPC_MIN_S
= FOP(28, FMT_S
),
8331 OPC_RECIP2_S
= FOP(28, FMT_S
),
8332 OPC_MINA_S
= FOP(29, FMT_S
),
8333 OPC_RECIP1_S
= FOP(29, FMT_S
),
8334 OPC_MAX_S
= FOP(30, FMT_S
),
8335 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8336 OPC_MAXA_S
= FOP(31, FMT_S
),
8337 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8338 OPC_CVT_D_S
= FOP(33, FMT_S
),
8339 OPC_CVT_W_S
= FOP(36, FMT_S
),
8340 OPC_CVT_L_S
= FOP(37, FMT_S
),
8341 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8342 OPC_CMP_F_S
= FOP (48, FMT_S
),
8343 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8344 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8345 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8346 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8347 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8348 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8349 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8350 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8351 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8352 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8353 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8354 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8355 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8356 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8357 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8359 OPC_ADD_D
= FOP(0, FMT_D
),
8360 OPC_SUB_D
= FOP(1, FMT_D
),
8361 OPC_MUL_D
= FOP(2, FMT_D
),
8362 OPC_DIV_D
= FOP(3, FMT_D
),
8363 OPC_SQRT_D
= FOP(4, FMT_D
),
8364 OPC_ABS_D
= FOP(5, FMT_D
),
8365 OPC_MOV_D
= FOP(6, FMT_D
),
8366 OPC_NEG_D
= FOP(7, FMT_D
),
8367 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8368 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8369 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8370 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8371 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8372 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8373 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8374 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8375 OPC_SEL_D
= FOP(16, FMT_D
),
8376 OPC_MOVCF_D
= FOP(17, FMT_D
),
8377 OPC_MOVZ_D
= FOP(18, FMT_D
),
8378 OPC_MOVN_D
= FOP(19, FMT_D
),
8379 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8380 OPC_RECIP_D
= FOP(21, FMT_D
),
8381 OPC_RSQRT_D
= FOP(22, FMT_D
),
8382 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8383 OPC_MADDF_D
= FOP(24, FMT_D
),
8384 OPC_MSUBF_D
= FOP(25, FMT_D
),
8385 OPC_RINT_D
= FOP(26, FMT_D
),
8386 OPC_CLASS_D
= FOP(27, FMT_D
),
8387 OPC_MIN_D
= FOP(28, FMT_D
),
8388 OPC_RECIP2_D
= FOP(28, FMT_D
),
8389 OPC_MINA_D
= FOP(29, FMT_D
),
8390 OPC_RECIP1_D
= FOP(29, FMT_D
),
8391 OPC_MAX_D
= FOP(30, FMT_D
),
8392 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8393 OPC_MAXA_D
= FOP(31, FMT_D
),
8394 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8395 OPC_CVT_S_D
= FOP(32, FMT_D
),
8396 OPC_CVT_W_D
= FOP(36, FMT_D
),
8397 OPC_CVT_L_D
= FOP(37, FMT_D
),
8398 OPC_CMP_F_D
= FOP (48, FMT_D
),
8399 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8400 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8401 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8402 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8403 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8404 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8405 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8406 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8407 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8408 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8409 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8410 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8411 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8412 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8413 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8415 OPC_CVT_S_W
= FOP(32, FMT_W
),
8416 OPC_CVT_D_W
= FOP(33, FMT_W
),
8417 OPC_CVT_S_L
= FOP(32, FMT_L
),
8418 OPC_CVT_D_L
= FOP(33, FMT_L
),
8419 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8421 OPC_ADD_PS
= FOP(0, FMT_PS
),
8422 OPC_SUB_PS
= FOP(1, FMT_PS
),
8423 OPC_MUL_PS
= FOP(2, FMT_PS
),
8424 OPC_DIV_PS
= FOP(3, FMT_PS
),
8425 OPC_ABS_PS
= FOP(5, FMT_PS
),
8426 OPC_MOV_PS
= FOP(6, FMT_PS
),
8427 OPC_NEG_PS
= FOP(7, FMT_PS
),
8428 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8429 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8430 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8431 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8432 OPC_MULR_PS
= FOP(26, FMT_PS
),
8433 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8434 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8435 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8436 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8438 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8439 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8440 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8441 OPC_PLL_PS
= FOP(44, FMT_PS
),
8442 OPC_PLU_PS
= FOP(45, FMT_PS
),
8443 OPC_PUL_PS
= FOP(46, FMT_PS
),
8444 OPC_PUU_PS
= FOP(47, FMT_PS
),
8445 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8446 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8447 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8448 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8449 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8450 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8451 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8452 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8453 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8454 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8455 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8456 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8457 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8458 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8459 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8460 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8464 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8465 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8466 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8467 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8468 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8469 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8470 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8471 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8472 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8473 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8474 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8475 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8476 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8477 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8478 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8479 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8480 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8481 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8482 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8483 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8484 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8485 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8487 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8488 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8489 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8490 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8491 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8492 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8493 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8494 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8495 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8496 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8497 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8498 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8499 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8500 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8501 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8502 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8503 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8504 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8505 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8506 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8507 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8508 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8510 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8512 TCGv t0
= tcg_temp_new();
8517 TCGv_i32 fp0
= tcg_temp_new_i32();
8519 gen_load_fpr32(ctx
, fp0
, fs
);
8520 tcg_gen_ext_i32_tl(t0
, fp0
);
8521 tcg_temp_free_i32(fp0
);
8523 gen_store_gpr(t0
, rt
);
8526 gen_load_gpr(t0
, rt
);
8528 TCGv_i32 fp0
= tcg_temp_new_i32();
8530 tcg_gen_trunc_tl_i32(fp0
, t0
);
8531 gen_store_fpr32(ctx
, fp0
, fs
);
8532 tcg_temp_free_i32(fp0
);
8536 gen_helper_1e0i(cfc1
, t0
, fs
);
8537 gen_store_gpr(t0
, rt
);
8540 gen_load_gpr(t0
, rt
);
8541 save_cpu_state(ctx
, 0);
8543 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8545 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8546 tcg_temp_free_i32(fs_tmp
);
8548 /* Stop translation as we may have changed hflags */
8549 ctx
->bstate
= BS_STOP
;
8551 #if defined(TARGET_MIPS64)
8553 gen_load_fpr64(ctx
, t0
, fs
);
8554 gen_store_gpr(t0
, rt
);
8557 gen_load_gpr(t0
, rt
);
8558 gen_store_fpr64(ctx
, t0
, fs
);
8563 TCGv_i32 fp0
= tcg_temp_new_i32();
8565 gen_load_fpr32h(ctx
, fp0
, fs
);
8566 tcg_gen_ext_i32_tl(t0
, fp0
);
8567 tcg_temp_free_i32(fp0
);
8569 gen_store_gpr(t0
, rt
);
8572 gen_load_gpr(t0
, rt
);
8574 TCGv_i32 fp0
= tcg_temp_new_i32();
8576 tcg_gen_trunc_tl_i32(fp0
, t0
);
8577 gen_store_fpr32h(ctx
, fp0
, fs
);
8578 tcg_temp_free_i32(fp0
);
8582 MIPS_INVAL("cp1 move");
8583 generate_exception_end(ctx
, EXCP_RI
);
8591 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8607 l1
= gen_new_label();
8608 t0
= tcg_temp_new_i32();
8609 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8610 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8611 tcg_temp_free_i32(t0
);
8613 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8615 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8620 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8624 TCGv_i32 t0
= tcg_temp_new_i32();
8625 TCGLabel
*l1
= gen_new_label();
8632 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8633 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8634 gen_load_fpr32(ctx
, t0
, fs
);
8635 gen_store_fpr32(ctx
, t0
, fd
);
8637 tcg_temp_free_i32(t0
);
8640 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8643 TCGv_i32 t0
= tcg_temp_new_i32();
8645 TCGLabel
*l1
= gen_new_label();
8652 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8653 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8654 tcg_temp_free_i32(t0
);
8655 fp0
= tcg_temp_new_i64();
8656 gen_load_fpr64(ctx
, fp0
, fs
);
8657 gen_store_fpr64(ctx
, fp0
, fd
);
8658 tcg_temp_free_i64(fp0
);
8662 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8666 TCGv_i32 t0
= tcg_temp_new_i32();
8667 TCGLabel
*l1
= gen_new_label();
8668 TCGLabel
*l2
= gen_new_label();
8675 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8676 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8677 gen_load_fpr32(ctx
, t0
, fs
);
8678 gen_store_fpr32(ctx
, t0
, fd
);
8681 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8682 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8683 gen_load_fpr32h(ctx
, t0
, fs
);
8684 gen_store_fpr32h(ctx
, t0
, fd
);
8685 tcg_temp_free_i32(t0
);
8689 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8692 TCGv_i32 t1
= tcg_const_i32(0);
8693 TCGv_i32 fp0
= tcg_temp_new_i32();
8694 TCGv_i32 fp1
= tcg_temp_new_i32();
8695 TCGv_i32 fp2
= tcg_temp_new_i32();
8696 gen_load_fpr32(ctx
, fp0
, fd
);
8697 gen_load_fpr32(ctx
, fp1
, ft
);
8698 gen_load_fpr32(ctx
, fp2
, fs
);
8702 tcg_gen_andi_i32(fp0
, fp0
, 1);
8703 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8706 tcg_gen_andi_i32(fp1
, fp1
, 1);
8707 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8710 tcg_gen_andi_i32(fp1
, fp1
, 1);
8711 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8714 MIPS_INVAL("gen_sel_s");
8715 generate_exception_end(ctx
, EXCP_RI
);
8719 gen_store_fpr32(ctx
, fp0
, fd
);
8720 tcg_temp_free_i32(fp2
);
8721 tcg_temp_free_i32(fp1
);
8722 tcg_temp_free_i32(fp0
);
8723 tcg_temp_free_i32(t1
);
8726 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8729 TCGv_i64 t1
= tcg_const_i64(0);
8730 TCGv_i64 fp0
= tcg_temp_new_i64();
8731 TCGv_i64 fp1
= tcg_temp_new_i64();
8732 TCGv_i64 fp2
= tcg_temp_new_i64();
8733 gen_load_fpr64(ctx
, fp0
, fd
);
8734 gen_load_fpr64(ctx
, fp1
, ft
);
8735 gen_load_fpr64(ctx
, fp2
, fs
);
8739 tcg_gen_andi_i64(fp0
, fp0
, 1);
8740 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8743 tcg_gen_andi_i64(fp1
, fp1
, 1);
8744 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8747 tcg_gen_andi_i64(fp1
, fp1
, 1);
8748 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8751 MIPS_INVAL("gen_sel_d");
8752 generate_exception_end(ctx
, EXCP_RI
);
8756 gen_store_fpr64(ctx
, fp0
, fd
);
8757 tcg_temp_free_i64(fp2
);
8758 tcg_temp_free_i64(fp1
);
8759 tcg_temp_free_i64(fp0
);
8760 tcg_temp_free_i64(t1
);
8763 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8764 int ft
, int fs
, int fd
, int cc
)
8766 uint32_t func
= ctx
->opcode
& 0x3f;
8770 TCGv_i32 fp0
= tcg_temp_new_i32();
8771 TCGv_i32 fp1
= tcg_temp_new_i32();
8773 gen_load_fpr32(ctx
, fp0
, fs
);
8774 gen_load_fpr32(ctx
, fp1
, ft
);
8775 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8776 tcg_temp_free_i32(fp1
);
8777 gen_store_fpr32(ctx
, fp0
, fd
);
8778 tcg_temp_free_i32(fp0
);
8783 TCGv_i32 fp0
= tcg_temp_new_i32();
8784 TCGv_i32 fp1
= tcg_temp_new_i32();
8786 gen_load_fpr32(ctx
, fp0
, fs
);
8787 gen_load_fpr32(ctx
, fp1
, ft
);
8788 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8789 tcg_temp_free_i32(fp1
);
8790 gen_store_fpr32(ctx
, fp0
, fd
);
8791 tcg_temp_free_i32(fp0
);
8796 TCGv_i32 fp0
= tcg_temp_new_i32();
8797 TCGv_i32 fp1
= tcg_temp_new_i32();
8799 gen_load_fpr32(ctx
, fp0
, fs
);
8800 gen_load_fpr32(ctx
, fp1
, ft
);
8801 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8802 tcg_temp_free_i32(fp1
);
8803 gen_store_fpr32(ctx
, fp0
, fd
);
8804 tcg_temp_free_i32(fp0
);
8809 TCGv_i32 fp0
= tcg_temp_new_i32();
8810 TCGv_i32 fp1
= tcg_temp_new_i32();
8812 gen_load_fpr32(ctx
, fp0
, fs
);
8813 gen_load_fpr32(ctx
, fp1
, ft
);
8814 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8815 tcg_temp_free_i32(fp1
);
8816 gen_store_fpr32(ctx
, fp0
, fd
);
8817 tcg_temp_free_i32(fp0
);
8822 TCGv_i32 fp0
= tcg_temp_new_i32();
8824 gen_load_fpr32(ctx
, fp0
, fs
);
8825 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8826 gen_store_fpr32(ctx
, fp0
, fd
);
8827 tcg_temp_free_i32(fp0
);
8832 TCGv_i32 fp0
= tcg_temp_new_i32();
8834 gen_load_fpr32(ctx
, fp0
, fs
);
8835 gen_helper_float_abs_s(fp0
, fp0
);
8836 gen_store_fpr32(ctx
, fp0
, fd
);
8837 tcg_temp_free_i32(fp0
);
8842 TCGv_i32 fp0
= tcg_temp_new_i32();
8844 gen_load_fpr32(ctx
, fp0
, fs
);
8845 gen_store_fpr32(ctx
, fp0
, fd
);
8846 tcg_temp_free_i32(fp0
);
8851 TCGv_i32 fp0
= tcg_temp_new_i32();
8853 gen_load_fpr32(ctx
, fp0
, fs
);
8854 gen_helper_float_chs_s(fp0
, fp0
);
8855 gen_store_fpr32(ctx
, fp0
, fd
);
8856 tcg_temp_free_i32(fp0
);
8860 check_cp1_64bitmode(ctx
);
8862 TCGv_i32 fp32
= tcg_temp_new_i32();
8863 TCGv_i64 fp64
= tcg_temp_new_i64();
8865 gen_load_fpr32(ctx
, fp32
, fs
);
8866 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8867 tcg_temp_free_i32(fp32
);
8868 gen_store_fpr64(ctx
, fp64
, fd
);
8869 tcg_temp_free_i64(fp64
);
8873 check_cp1_64bitmode(ctx
);
8875 TCGv_i32 fp32
= tcg_temp_new_i32();
8876 TCGv_i64 fp64
= tcg_temp_new_i64();
8878 gen_load_fpr32(ctx
, fp32
, fs
);
8879 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8880 tcg_temp_free_i32(fp32
);
8881 gen_store_fpr64(ctx
, fp64
, fd
);
8882 tcg_temp_free_i64(fp64
);
8886 check_cp1_64bitmode(ctx
);
8888 TCGv_i32 fp32
= tcg_temp_new_i32();
8889 TCGv_i64 fp64
= tcg_temp_new_i64();
8891 gen_load_fpr32(ctx
, fp32
, fs
);
8892 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8893 tcg_temp_free_i32(fp32
);
8894 gen_store_fpr64(ctx
, fp64
, fd
);
8895 tcg_temp_free_i64(fp64
);
8899 check_cp1_64bitmode(ctx
);
8901 TCGv_i32 fp32
= tcg_temp_new_i32();
8902 TCGv_i64 fp64
= tcg_temp_new_i64();
8904 gen_load_fpr32(ctx
, fp32
, fs
);
8905 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8906 tcg_temp_free_i32(fp32
);
8907 gen_store_fpr64(ctx
, fp64
, fd
);
8908 tcg_temp_free_i64(fp64
);
8913 TCGv_i32 fp0
= tcg_temp_new_i32();
8915 gen_load_fpr32(ctx
, fp0
, fs
);
8916 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8917 gen_store_fpr32(ctx
, fp0
, fd
);
8918 tcg_temp_free_i32(fp0
);
8923 TCGv_i32 fp0
= tcg_temp_new_i32();
8925 gen_load_fpr32(ctx
, fp0
, fs
);
8926 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8927 gen_store_fpr32(ctx
, fp0
, fd
);
8928 tcg_temp_free_i32(fp0
);
8933 TCGv_i32 fp0
= tcg_temp_new_i32();
8935 gen_load_fpr32(ctx
, fp0
, fs
);
8936 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8937 gen_store_fpr32(ctx
, fp0
, fd
);
8938 tcg_temp_free_i32(fp0
);
8943 TCGv_i32 fp0
= tcg_temp_new_i32();
8945 gen_load_fpr32(ctx
, fp0
, fs
);
8946 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8947 gen_store_fpr32(ctx
, fp0
, fd
);
8948 tcg_temp_free_i32(fp0
);
8952 check_insn(ctx
, ISA_MIPS32R6
);
8953 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8956 check_insn(ctx
, ISA_MIPS32R6
);
8957 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8960 check_insn(ctx
, ISA_MIPS32R6
);
8961 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8964 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8965 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8968 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8970 TCGLabel
*l1
= gen_new_label();
8974 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8976 fp0
= tcg_temp_new_i32();
8977 gen_load_fpr32(ctx
, fp0
, fs
);
8978 gen_store_fpr32(ctx
, fp0
, fd
);
8979 tcg_temp_free_i32(fp0
);
8984 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8986 TCGLabel
*l1
= gen_new_label();
8990 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8991 fp0
= tcg_temp_new_i32();
8992 gen_load_fpr32(ctx
, fp0
, fs
);
8993 gen_store_fpr32(ctx
, fp0
, fd
);
8994 tcg_temp_free_i32(fp0
);
9001 TCGv_i32 fp0
= tcg_temp_new_i32();
9003 gen_load_fpr32(ctx
, fp0
, fs
);
9004 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9005 gen_store_fpr32(ctx
, fp0
, fd
);
9006 tcg_temp_free_i32(fp0
);
9011 TCGv_i32 fp0
= tcg_temp_new_i32();
9013 gen_load_fpr32(ctx
, fp0
, fs
);
9014 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9015 gen_store_fpr32(ctx
, fp0
, fd
);
9016 tcg_temp_free_i32(fp0
);
9020 check_insn(ctx
, ISA_MIPS32R6
);
9022 TCGv_i32 fp0
= tcg_temp_new_i32();
9023 TCGv_i32 fp1
= tcg_temp_new_i32();
9024 TCGv_i32 fp2
= tcg_temp_new_i32();
9025 gen_load_fpr32(ctx
, fp0
, fs
);
9026 gen_load_fpr32(ctx
, fp1
, ft
);
9027 gen_load_fpr32(ctx
, fp2
, fd
);
9028 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9029 gen_store_fpr32(ctx
, fp2
, fd
);
9030 tcg_temp_free_i32(fp2
);
9031 tcg_temp_free_i32(fp1
);
9032 tcg_temp_free_i32(fp0
);
9036 check_insn(ctx
, ISA_MIPS32R6
);
9038 TCGv_i32 fp0
= tcg_temp_new_i32();
9039 TCGv_i32 fp1
= tcg_temp_new_i32();
9040 TCGv_i32 fp2
= tcg_temp_new_i32();
9041 gen_load_fpr32(ctx
, fp0
, fs
);
9042 gen_load_fpr32(ctx
, fp1
, ft
);
9043 gen_load_fpr32(ctx
, fp2
, fd
);
9044 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9045 gen_store_fpr32(ctx
, fp2
, fd
);
9046 tcg_temp_free_i32(fp2
);
9047 tcg_temp_free_i32(fp1
);
9048 tcg_temp_free_i32(fp0
);
9052 check_insn(ctx
, ISA_MIPS32R6
);
9054 TCGv_i32 fp0
= tcg_temp_new_i32();
9055 gen_load_fpr32(ctx
, fp0
, fs
);
9056 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9057 gen_store_fpr32(ctx
, fp0
, fd
);
9058 tcg_temp_free_i32(fp0
);
9062 check_insn(ctx
, ISA_MIPS32R6
);
9064 TCGv_i32 fp0
= tcg_temp_new_i32();
9065 gen_load_fpr32(ctx
, fp0
, fs
);
9066 gen_helper_float_class_s(fp0
, fp0
);
9067 gen_store_fpr32(ctx
, fp0
, fd
);
9068 tcg_temp_free_i32(fp0
);
9071 case OPC_MIN_S
: /* OPC_RECIP2_S */
9072 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9074 TCGv_i32 fp0
= tcg_temp_new_i32();
9075 TCGv_i32 fp1
= tcg_temp_new_i32();
9076 TCGv_i32 fp2
= tcg_temp_new_i32();
9077 gen_load_fpr32(ctx
, fp0
, fs
);
9078 gen_load_fpr32(ctx
, fp1
, ft
);
9079 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9080 gen_store_fpr32(ctx
, fp2
, fd
);
9081 tcg_temp_free_i32(fp2
);
9082 tcg_temp_free_i32(fp1
);
9083 tcg_temp_free_i32(fp0
);
9086 check_cp1_64bitmode(ctx
);
9088 TCGv_i32 fp0
= tcg_temp_new_i32();
9089 TCGv_i32 fp1
= tcg_temp_new_i32();
9091 gen_load_fpr32(ctx
, fp0
, fs
);
9092 gen_load_fpr32(ctx
, fp1
, ft
);
9093 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9094 tcg_temp_free_i32(fp1
);
9095 gen_store_fpr32(ctx
, fp0
, fd
);
9096 tcg_temp_free_i32(fp0
);
9100 case OPC_MINA_S
: /* OPC_RECIP1_S */
9101 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9103 TCGv_i32 fp0
= tcg_temp_new_i32();
9104 TCGv_i32 fp1
= tcg_temp_new_i32();
9105 TCGv_i32 fp2
= tcg_temp_new_i32();
9106 gen_load_fpr32(ctx
, fp0
, fs
);
9107 gen_load_fpr32(ctx
, fp1
, ft
);
9108 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9109 gen_store_fpr32(ctx
, fp2
, fd
);
9110 tcg_temp_free_i32(fp2
);
9111 tcg_temp_free_i32(fp1
);
9112 tcg_temp_free_i32(fp0
);
9115 check_cp1_64bitmode(ctx
);
9117 TCGv_i32 fp0
= tcg_temp_new_i32();
9119 gen_load_fpr32(ctx
, fp0
, fs
);
9120 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9121 gen_store_fpr32(ctx
, fp0
, fd
);
9122 tcg_temp_free_i32(fp0
);
9126 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9127 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9129 TCGv_i32 fp0
= tcg_temp_new_i32();
9130 TCGv_i32 fp1
= tcg_temp_new_i32();
9131 gen_load_fpr32(ctx
, fp0
, fs
);
9132 gen_load_fpr32(ctx
, fp1
, ft
);
9133 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9134 gen_store_fpr32(ctx
, fp1
, fd
);
9135 tcg_temp_free_i32(fp1
);
9136 tcg_temp_free_i32(fp0
);
9139 check_cp1_64bitmode(ctx
);
9141 TCGv_i32 fp0
= tcg_temp_new_i32();
9143 gen_load_fpr32(ctx
, fp0
, fs
);
9144 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9145 gen_store_fpr32(ctx
, fp0
, fd
);
9146 tcg_temp_free_i32(fp0
);
9150 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9151 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9153 TCGv_i32 fp0
= tcg_temp_new_i32();
9154 TCGv_i32 fp1
= tcg_temp_new_i32();
9155 gen_load_fpr32(ctx
, fp0
, fs
);
9156 gen_load_fpr32(ctx
, fp1
, ft
);
9157 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9158 gen_store_fpr32(ctx
, fp1
, fd
);
9159 tcg_temp_free_i32(fp1
);
9160 tcg_temp_free_i32(fp0
);
9163 check_cp1_64bitmode(ctx
);
9165 TCGv_i32 fp0
= tcg_temp_new_i32();
9166 TCGv_i32 fp1
= tcg_temp_new_i32();
9168 gen_load_fpr32(ctx
, fp0
, fs
);
9169 gen_load_fpr32(ctx
, fp1
, ft
);
9170 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9171 tcg_temp_free_i32(fp1
);
9172 gen_store_fpr32(ctx
, fp0
, fd
);
9173 tcg_temp_free_i32(fp0
);
9178 check_cp1_registers(ctx
, fd
);
9180 TCGv_i32 fp32
= tcg_temp_new_i32();
9181 TCGv_i64 fp64
= tcg_temp_new_i64();
9183 gen_load_fpr32(ctx
, fp32
, fs
);
9184 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9185 tcg_temp_free_i32(fp32
);
9186 gen_store_fpr64(ctx
, fp64
, fd
);
9187 tcg_temp_free_i64(fp64
);
9192 TCGv_i32 fp0
= tcg_temp_new_i32();
9194 gen_load_fpr32(ctx
, fp0
, fs
);
9195 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9196 gen_store_fpr32(ctx
, fp0
, fd
);
9197 tcg_temp_free_i32(fp0
);
9201 check_cp1_64bitmode(ctx
);
9203 TCGv_i32 fp32
= tcg_temp_new_i32();
9204 TCGv_i64 fp64
= tcg_temp_new_i64();
9206 gen_load_fpr32(ctx
, fp32
, fs
);
9207 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9208 tcg_temp_free_i32(fp32
);
9209 gen_store_fpr64(ctx
, fp64
, fd
);
9210 tcg_temp_free_i64(fp64
);
9216 TCGv_i64 fp64
= tcg_temp_new_i64();
9217 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9218 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9220 gen_load_fpr32(ctx
, fp32_0
, fs
);
9221 gen_load_fpr32(ctx
, fp32_1
, ft
);
9222 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9223 tcg_temp_free_i32(fp32_1
);
9224 tcg_temp_free_i32(fp32_0
);
9225 gen_store_fpr64(ctx
, fp64
, fd
);
9226 tcg_temp_free_i64(fp64
);
9238 case OPC_CMP_NGLE_S
:
9245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9246 if (ctx
->opcode
& (1 << 6)) {
9247 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9249 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9253 check_cp1_registers(ctx
, fs
| ft
| fd
);
9255 TCGv_i64 fp0
= tcg_temp_new_i64();
9256 TCGv_i64 fp1
= tcg_temp_new_i64();
9258 gen_load_fpr64(ctx
, fp0
, fs
);
9259 gen_load_fpr64(ctx
, fp1
, ft
);
9260 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9261 tcg_temp_free_i64(fp1
);
9262 gen_store_fpr64(ctx
, fp0
, fd
);
9263 tcg_temp_free_i64(fp0
);
9267 check_cp1_registers(ctx
, fs
| ft
| fd
);
9269 TCGv_i64 fp0
= tcg_temp_new_i64();
9270 TCGv_i64 fp1
= tcg_temp_new_i64();
9272 gen_load_fpr64(ctx
, fp0
, fs
);
9273 gen_load_fpr64(ctx
, fp1
, ft
);
9274 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9275 tcg_temp_free_i64(fp1
);
9276 gen_store_fpr64(ctx
, fp0
, fd
);
9277 tcg_temp_free_i64(fp0
);
9281 check_cp1_registers(ctx
, fs
| ft
| fd
);
9283 TCGv_i64 fp0
= tcg_temp_new_i64();
9284 TCGv_i64 fp1
= tcg_temp_new_i64();
9286 gen_load_fpr64(ctx
, fp0
, fs
);
9287 gen_load_fpr64(ctx
, fp1
, ft
);
9288 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9289 tcg_temp_free_i64(fp1
);
9290 gen_store_fpr64(ctx
, fp0
, fd
);
9291 tcg_temp_free_i64(fp0
);
9295 check_cp1_registers(ctx
, fs
| ft
| fd
);
9297 TCGv_i64 fp0
= tcg_temp_new_i64();
9298 TCGv_i64 fp1
= tcg_temp_new_i64();
9300 gen_load_fpr64(ctx
, fp0
, fs
);
9301 gen_load_fpr64(ctx
, fp1
, ft
);
9302 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9303 tcg_temp_free_i64(fp1
);
9304 gen_store_fpr64(ctx
, fp0
, fd
);
9305 tcg_temp_free_i64(fp0
);
9309 check_cp1_registers(ctx
, fs
| fd
);
9311 TCGv_i64 fp0
= tcg_temp_new_i64();
9313 gen_load_fpr64(ctx
, fp0
, fs
);
9314 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9315 gen_store_fpr64(ctx
, fp0
, fd
);
9316 tcg_temp_free_i64(fp0
);
9320 check_cp1_registers(ctx
, fs
| fd
);
9322 TCGv_i64 fp0
= tcg_temp_new_i64();
9324 gen_load_fpr64(ctx
, fp0
, fs
);
9325 gen_helper_float_abs_d(fp0
, fp0
);
9326 gen_store_fpr64(ctx
, fp0
, fd
);
9327 tcg_temp_free_i64(fp0
);
9331 check_cp1_registers(ctx
, fs
| fd
);
9333 TCGv_i64 fp0
= tcg_temp_new_i64();
9335 gen_load_fpr64(ctx
, fp0
, fs
);
9336 gen_store_fpr64(ctx
, fp0
, fd
);
9337 tcg_temp_free_i64(fp0
);
9341 check_cp1_registers(ctx
, fs
| fd
);
9343 TCGv_i64 fp0
= tcg_temp_new_i64();
9345 gen_load_fpr64(ctx
, fp0
, fs
);
9346 gen_helper_float_chs_d(fp0
, fp0
);
9347 gen_store_fpr64(ctx
, fp0
, fd
);
9348 tcg_temp_free_i64(fp0
);
9352 check_cp1_64bitmode(ctx
);
9354 TCGv_i64 fp0
= tcg_temp_new_i64();
9356 gen_load_fpr64(ctx
, fp0
, fs
);
9357 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9358 gen_store_fpr64(ctx
, fp0
, fd
);
9359 tcg_temp_free_i64(fp0
);
9363 check_cp1_64bitmode(ctx
);
9365 TCGv_i64 fp0
= tcg_temp_new_i64();
9367 gen_load_fpr64(ctx
, fp0
, fs
);
9368 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9369 gen_store_fpr64(ctx
, fp0
, fd
);
9370 tcg_temp_free_i64(fp0
);
9374 check_cp1_64bitmode(ctx
);
9376 TCGv_i64 fp0
= tcg_temp_new_i64();
9378 gen_load_fpr64(ctx
, fp0
, fs
);
9379 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9380 gen_store_fpr64(ctx
, fp0
, fd
);
9381 tcg_temp_free_i64(fp0
);
9385 check_cp1_64bitmode(ctx
);
9387 TCGv_i64 fp0
= tcg_temp_new_i64();
9389 gen_load_fpr64(ctx
, fp0
, fs
);
9390 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9391 gen_store_fpr64(ctx
, fp0
, fd
);
9392 tcg_temp_free_i64(fp0
);
9396 check_cp1_registers(ctx
, fs
);
9398 TCGv_i32 fp32
= tcg_temp_new_i32();
9399 TCGv_i64 fp64
= tcg_temp_new_i64();
9401 gen_load_fpr64(ctx
, fp64
, fs
);
9402 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9403 tcg_temp_free_i64(fp64
);
9404 gen_store_fpr32(ctx
, fp32
, fd
);
9405 tcg_temp_free_i32(fp32
);
9409 check_cp1_registers(ctx
, fs
);
9411 TCGv_i32 fp32
= tcg_temp_new_i32();
9412 TCGv_i64 fp64
= tcg_temp_new_i64();
9414 gen_load_fpr64(ctx
, fp64
, fs
);
9415 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9416 tcg_temp_free_i64(fp64
);
9417 gen_store_fpr32(ctx
, fp32
, fd
);
9418 tcg_temp_free_i32(fp32
);
9422 check_cp1_registers(ctx
, fs
);
9424 TCGv_i32 fp32
= tcg_temp_new_i32();
9425 TCGv_i64 fp64
= tcg_temp_new_i64();
9427 gen_load_fpr64(ctx
, fp64
, fs
);
9428 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9429 tcg_temp_free_i64(fp64
);
9430 gen_store_fpr32(ctx
, fp32
, fd
);
9431 tcg_temp_free_i32(fp32
);
9435 check_cp1_registers(ctx
, fs
);
9437 TCGv_i32 fp32
= tcg_temp_new_i32();
9438 TCGv_i64 fp64
= tcg_temp_new_i64();
9440 gen_load_fpr64(ctx
, fp64
, fs
);
9441 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9442 tcg_temp_free_i64(fp64
);
9443 gen_store_fpr32(ctx
, fp32
, fd
);
9444 tcg_temp_free_i32(fp32
);
9448 check_insn(ctx
, ISA_MIPS32R6
);
9449 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9452 check_insn(ctx
, ISA_MIPS32R6
);
9453 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9456 check_insn(ctx
, ISA_MIPS32R6
);
9457 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9460 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9461 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9466 TCGLabel
*l1
= gen_new_label();
9470 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9472 fp0
= tcg_temp_new_i64();
9473 gen_load_fpr64(ctx
, fp0
, fs
);
9474 gen_store_fpr64(ctx
, fp0
, fd
);
9475 tcg_temp_free_i64(fp0
);
9480 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9482 TCGLabel
*l1
= gen_new_label();
9486 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9487 fp0
= tcg_temp_new_i64();
9488 gen_load_fpr64(ctx
, fp0
, fs
);
9489 gen_store_fpr64(ctx
, fp0
, fd
);
9490 tcg_temp_free_i64(fp0
);
9496 check_cp1_registers(ctx
, fs
| fd
);
9498 TCGv_i64 fp0
= tcg_temp_new_i64();
9500 gen_load_fpr64(ctx
, fp0
, fs
);
9501 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9502 gen_store_fpr64(ctx
, fp0
, fd
);
9503 tcg_temp_free_i64(fp0
);
9507 check_cp1_registers(ctx
, fs
| fd
);
9509 TCGv_i64 fp0
= tcg_temp_new_i64();
9511 gen_load_fpr64(ctx
, fp0
, fs
);
9512 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9513 gen_store_fpr64(ctx
, fp0
, fd
);
9514 tcg_temp_free_i64(fp0
);
9518 check_insn(ctx
, ISA_MIPS32R6
);
9520 TCGv_i64 fp0
= tcg_temp_new_i64();
9521 TCGv_i64 fp1
= tcg_temp_new_i64();
9522 TCGv_i64 fp2
= tcg_temp_new_i64();
9523 gen_load_fpr64(ctx
, fp0
, fs
);
9524 gen_load_fpr64(ctx
, fp1
, ft
);
9525 gen_load_fpr64(ctx
, fp2
, fd
);
9526 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9527 gen_store_fpr64(ctx
, fp2
, fd
);
9528 tcg_temp_free_i64(fp2
);
9529 tcg_temp_free_i64(fp1
);
9530 tcg_temp_free_i64(fp0
);
9534 check_insn(ctx
, ISA_MIPS32R6
);
9536 TCGv_i64 fp0
= tcg_temp_new_i64();
9537 TCGv_i64 fp1
= tcg_temp_new_i64();
9538 TCGv_i64 fp2
= tcg_temp_new_i64();
9539 gen_load_fpr64(ctx
, fp0
, fs
);
9540 gen_load_fpr64(ctx
, fp1
, ft
);
9541 gen_load_fpr64(ctx
, fp2
, fd
);
9542 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9543 gen_store_fpr64(ctx
, fp2
, fd
);
9544 tcg_temp_free_i64(fp2
);
9545 tcg_temp_free_i64(fp1
);
9546 tcg_temp_free_i64(fp0
);
9550 check_insn(ctx
, ISA_MIPS32R6
);
9552 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 gen_load_fpr64(ctx
, fp0
, fs
);
9554 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9555 gen_store_fpr64(ctx
, fp0
, fd
);
9556 tcg_temp_free_i64(fp0
);
9560 check_insn(ctx
, ISA_MIPS32R6
);
9562 TCGv_i64 fp0
= tcg_temp_new_i64();
9563 gen_load_fpr64(ctx
, fp0
, fs
);
9564 gen_helper_float_class_d(fp0
, fp0
);
9565 gen_store_fpr64(ctx
, fp0
, fd
);
9566 tcg_temp_free_i64(fp0
);
9569 case OPC_MIN_D
: /* OPC_RECIP2_D */
9570 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9572 TCGv_i64 fp0
= tcg_temp_new_i64();
9573 TCGv_i64 fp1
= tcg_temp_new_i64();
9574 gen_load_fpr64(ctx
, fp0
, fs
);
9575 gen_load_fpr64(ctx
, fp1
, ft
);
9576 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9577 gen_store_fpr64(ctx
, fp1
, fd
);
9578 tcg_temp_free_i64(fp1
);
9579 tcg_temp_free_i64(fp0
);
9582 check_cp1_64bitmode(ctx
);
9584 TCGv_i64 fp0
= tcg_temp_new_i64();
9585 TCGv_i64 fp1
= tcg_temp_new_i64();
9587 gen_load_fpr64(ctx
, fp0
, fs
);
9588 gen_load_fpr64(ctx
, fp1
, ft
);
9589 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9590 tcg_temp_free_i64(fp1
);
9591 gen_store_fpr64(ctx
, fp0
, fd
);
9592 tcg_temp_free_i64(fp0
);
9596 case OPC_MINA_D
: /* OPC_RECIP1_D */
9597 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9599 TCGv_i64 fp0
= tcg_temp_new_i64();
9600 TCGv_i64 fp1
= tcg_temp_new_i64();
9601 gen_load_fpr64(ctx
, fp0
, fs
);
9602 gen_load_fpr64(ctx
, fp1
, ft
);
9603 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9604 gen_store_fpr64(ctx
, fp1
, fd
);
9605 tcg_temp_free_i64(fp1
);
9606 tcg_temp_free_i64(fp0
);
9609 check_cp1_64bitmode(ctx
);
9611 TCGv_i64 fp0
= tcg_temp_new_i64();
9613 gen_load_fpr64(ctx
, fp0
, fs
);
9614 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9615 gen_store_fpr64(ctx
, fp0
, fd
);
9616 tcg_temp_free_i64(fp0
);
9620 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9621 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9623 TCGv_i64 fp0
= tcg_temp_new_i64();
9624 TCGv_i64 fp1
= tcg_temp_new_i64();
9625 gen_load_fpr64(ctx
, fp0
, fs
);
9626 gen_load_fpr64(ctx
, fp1
, ft
);
9627 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9628 gen_store_fpr64(ctx
, fp1
, fd
);
9629 tcg_temp_free_i64(fp1
);
9630 tcg_temp_free_i64(fp0
);
9633 check_cp1_64bitmode(ctx
);
9635 TCGv_i64 fp0
= tcg_temp_new_i64();
9637 gen_load_fpr64(ctx
, fp0
, fs
);
9638 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9639 gen_store_fpr64(ctx
, fp0
, fd
);
9640 tcg_temp_free_i64(fp0
);
9644 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9645 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9647 TCGv_i64 fp0
= tcg_temp_new_i64();
9648 TCGv_i64 fp1
= tcg_temp_new_i64();
9649 gen_load_fpr64(ctx
, fp0
, fs
);
9650 gen_load_fpr64(ctx
, fp1
, ft
);
9651 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9652 gen_store_fpr64(ctx
, fp1
, fd
);
9653 tcg_temp_free_i64(fp1
);
9654 tcg_temp_free_i64(fp0
);
9657 check_cp1_64bitmode(ctx
);
9659 TCGv_i64 fp0
= tcg_temp_new_i64();
9660 TCGv_i64 fp1
= tcg_temp_new_i64();
9662 gen_load_fpr64(ctx
, fp0
, fs
);
9663 gen_load_fpr64(ctx
, fp1
, ft
);
9664 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9665 tcg_temp_free_i64(fp1
);
9666 gen_store_fpr64(ctx
, fp0
, fd
);
9667 tcg_temp_free_i64(fp0
);
9680 case OPC_CMP_NGLE_D
:
9687 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9688 if (ctx
->opcode
& (1 << 6)) {
9689 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9691 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9695 check_cp1_registers(ctx
, fs
);
9697 TCGv_i32 fp32
= tcg_temp_new_i32();
9698 TCGv_i64 fp64
= tcg_temp_new_i64();
9700 gen_load_fpr64(ctx
, fp64
, fs
);
9701 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9702 tcg_temp_free_i64(fp64
);
9703 gen_store_fpr32(ctx
, fp32
, fd
);
9704 tcg_temp_free_i32(fp32
);
9708 check_cp1_registers(ctx
, fs
);
9710 TCGv_i32 fp32
= tcg_temp_new_i32();
9711 TCGv_i64 fp64
= tcg_temp_new_i64();
9713 gen_load_fpr64(ctx
, fp64
, fs
);
9714 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9715 tcg_temp_free_i64(fp64
);
9716 gen_store_fpr32(ctx
, fp32
, fd
);
9717 tcg_temp_free_i32(fp32
);
9721 check_cp1_64bitmode(ctx
);
9723 TCGv_i64 fp0
= tcg_temp_new_i64();
9725 gen_load_fpr64(ctx
, fp0
, fs
);
9726 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9727 gen_store_fpr64(ctx
, fp0
, fd
);
9728 tcg_temp_free_i64(fp0
);
9733 TCGv_i32 fp0
= tcg_temp_new_i32();
9735 gen_load_fpr32(ctx
, fp0
, fs
);
9736 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9737 gen_store_fpr32(ctx
, fp0
, fd
);
9738 tcg_temp_free_i32(fp0
);
9742 check_cp1_registers(ctx
, fd
);
9744 TCGv_i32 fp32
= tcg_temp_new_i32();
9745 TCGv_i64 fp64
= tcg_temp_new_i64();
9747 gen_load_fpr32(ctx
, fp32
, fs
);
9748 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9749 tcg_temp_free_i32(fp32
);
9750 gen_store_fpr64(ctx
, fp64
, fd
);
9751 tcg_temp_free_i64(fp64
);
9755 check_cp1_64bitmode(ctx
);
9757 TCGv_i32 fp32
= tcg_temp_new_i32();
9758 TCGv_i64 fp64
= tcg_temp_new_i64();
9760 gen_load_fpr64(ctx
, fp64
, fs
);
9761 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9762 tcg_temp_free_i64(fp64
);
9763 gen_store_fpr32(ctx
, fp32
, fd
);
9764 tcg_temp_free_i32(fp32
);
9768 check_cp1_64bitmode(ctx
);
9770 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 gen_load_fpr64(ctx
, fp0
, fs
);
9773 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9774 gen_store_fpr64(ctx
, fp0
, fd
);
9775 tcg_temp_free_i64(fp0
);
9781 TCGv_i64 fp0
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9785 gen_store_fpr64(ctx
, fp0
, fd
);
9786 tcg_temp_free_i64(fp0
);
9792 TCGv_i64 fp0
= tcg_temp_new_i64();
9793 TCGv_i64 fp1
= tcg_temp_new_i64();
9795 gen_load_fpr64(ctx
, fp0
, fs
);
9796 gen_load_fpr64(ctx
, fp1
, ft
);
9797 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9798 tcg_temp_free_i64(fp1
);
9799 gen_store_fpr64(ctx
, fp0
, fd
);
9800 tcg_temp_free_i64(fp0
);
9806 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 TCGv_i64 fp1
= tcg_temp_new_i64();
9809 gen_load_fpr64(ctx
, fp0
, fs
);
9810 gen_load_fpr64(ctx
, fp1
, ft
);
9811 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9812 tcg_temp_free_i64(fp1
);
9813 gen_store_fpr64(ctx
, fp0
, fd
);
9814 tcg_temp_free_i64(fp0
);
9820 TCGv_i64 fp0
= tcg_temp_new_i64();
9821 TCGv_i64 fp1
= tcg_temp_new_i64();
9823 gen_load_fpr64(ctx
, fp0
, fs
);
9824 gen_load_fpr64(ctx
, fp1
, ft
);
9825 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9826 tcg_temp_free_i64(fp1
);
9827 gen_store_fpr64(ctx
, fp0
, fd
);
9828 tcg_temp_free_i64(fp0
);
9834 TCGv_i64 fp0
= tcg_temp_new_i64();
9836 gen_load_fpr64(ctx
, fp0
, fs
);
9837 gen_helper_float_abs_ps(fp0
, fp0
);
9838 gen_store_fpr64(ctx
, fp0
, fd
);
9839 tcg_temp_free_i64(fp0
);
9845 TCGv_i64 fp0
= tcg_temp_new_i64();
9847 gen_load_fpr64(ctx
, fp0
, fs
);
9848 gen_store_fpr64(ctx
, fp0
, fd
);
9849 tcg_temp_free_i64(fp0
);
9855 TCGv_i64 fp0
= tcg_temp_new_i64();
9857 gen_load_fpr64(ctx
, fp0
, fs
);
9858 gen_helper_float_chs_ps(fp0
, fp0
);
9859 gen_store_fpr64(ctx
, fp0
, fd
);
9860 tcg_temp_free_i64(fp0
);
9865 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9870 TCGLabel
*l1
= gen_new_label();
9874 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9875 fp0
= tcg_temp_new_i64();
9876 gen_load_fpr64(ctx
, fp0
, fs
);
9877 gen_store_fpr64(ctx
, fp0
, fd
);
9878 tcg_temp_free_i64(fp0
);
9885 TCGLabel
*l1
= gen_new_label();
9889 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9890 fp0
= tcg_temp_new_i64();
9891 gen_load_fpr64(ctx
, fp0
, fs
);
9892 gen_store_fpr64(ctx
, fp0
, fd
);
9893 tcg_temp_free_i64(fp0
);
9901 TCGv_i64 fp0
= tcg_temp_new_i64();
9902 TCGv_i64 fp1
= tcg_temp_new_i64();
9904 gen_load_fpr64(ctx
, fp0
, ft
);
9905 gen_load_fpr64(ctx
, fp1
, fs
);
9906 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9907 tcg_temp_free_i64(fp1
);
9908 gen_store_fpr64(ctx
, fp0
, fd
);
9909 tcg_temp_free_i64(fp0
);
9915 TCGv_i64 fp0
= tcg_temp_new_i64();
9916 TCGv_i64 fp1
= tcg_temp_new_i64();
9918 gen_load_fpr64(ctx
, fp0
, ft
);
9919 gen_load_fpr64(ctx
, fp1
, fs
);
9920 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9921 tcg_temp_free_i64(fp1
);
9922 gen_store_fpr64(ctx
, fp0
, fd
);
9923 tcg_temp_free_i64(fp0
);
9929 TCGv_i64 fp0
= tcg_temp_new_i64();
9930 TCGv_i64 fp1
= tcg_temp_new_i64();
9932 gen_load_fpr64(ctx
, fp0
, fs
);
9933 gen_load_fpr64(ctx
, fp1
, ft
);
9934 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9935 tcg_temp_free_i64(fp1
);
9936 gen_store_fpr64(ctx
, fp0
, fd
);
9937 tcg_temp_free_i64(fp0
);
9943 TCGv_i64 fp0
= tcg_temp_new_i64();
9945 gen_load_fpr64(ctx
, fp0
, fs
);
9946 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9947 gen_store_fpr64(ctx
, fp0
, fd
);
9948 tcg_temp_free_i64(fp0
);
9954 TCGv_i64 fp0
= tcg_temp_new_i64();
9956 gen_load_fpr64(ctx
, fp0
, fs
);
9957 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9958 gen_store_fpr64(ctx
, fp0
, fd
);
9959 tcg_temp_free_i64(fp0
);
9965 TCGv_i64 fp0
= tcg_temp_new_i64();
9966 TCGv_i64 fp1
= tcg_temp_new_i64();
9968 gen_load_fpr64(ctx
, fp0
, fs
);
9969 gen_load_fpr64(ctx
, fp1
, ft
);
9970 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9971 tcg_temp_free_i64(fp1
);
9972 gen_store_fpr64(ctx
, fp0
, fd
);
9973 tcg_temp_free_i64(fp0
);
9977 check_cp1_64bitmode(ctx
);
9979 TCGv_i32 fp0
= tcg_temp_new_i32();
9981 gen_load_fpr32h(ctx
, fp0
, fs
);
9982 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9983 gen_store_fpr32(ctx
, fp0
, fd
);
9984 tcg_temp_free_i32(fp0
);
9990 TCGv_i64 fp0
= tcg_temp_new_i64();
9992 gen_load_fpr64(ctx
, fp0
, fs
);
9993 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
9999 check_cp1_64bitmode(ctx
);
10001 TCGv_i32 fp0
= tcg_temp_new_i32();
10003 gen_load_fpr32(ctx
, fp0
, fs
);
10004 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10005 gen_store_fpr32(ctx
, fp0
, fd
);
10006 tcg_temp_free_i32(fp0
);
10012 TCGv_i32 fp0
= tcg_temp_new_i32();
10013 TCGv_i32 fp1
= tcg_temp_new_i32();
10015 gen_load_fpr32(ctx
, fp0
, fs
);
10016 gen_load_fpr32(ctx
, fp1
, ft
);
10017 gen_store_fpr32h(ctx
, fp0
, fd
);
10018 gen_store_fpr32(ctx
, fp1
, fd
);
10019 tcg_temp_free_i32(fp0
);
10020 tcg_temp_free_i32(fp1
);
10026 TCGv_i32 fp0
= tcg_temp_new_i32();
10027 TCGv_i32 fp1
= tcg_temp_new_i32();
10029 gen_load_fpr32(ctx
, fp0
, fs
);
10030 gen_load_fpr32h(ctx
, fp1
, ft
);
10031 gen_store_fpr32(ctx
, fp1
, fd
);
10032 gen_store_fpr32h(ctx
, fp0
, fd
);
10033 tcg_temp_free_i32(fp0
);
10034 tcg_temp_free_i32(fp1
);
10040 TCGv_i32 fp0
= tcg_temp_new_i32();
10041 TCGv_i32 fp1
= tcg_temp_new_i32();
10043 gen_load_fpr32h(ctx
, fp0
, fs
);
10044 gen_load_fpr32(ctx
, fp1
, ft
);
10045 gen_store_fpr32(ctx
, fp1
, fd
);
10046 gen_store_fpr32h(ctx
, fp0
, fd
);
10047 tcg_temp_free_i32(fp0
);
10048 tcg_temp_free_i32(fp1
);
10054 TCGv_i32 fp0
= tcg_temp_new_i32();
10055 TCGv_i32 fp1
= tcg_temp_new_i32();
10057 gen_load_fpr32h(ctx
, fp0
, fs
);
10058 gen_load_fpr32h(ctx
, fp1
, ft
);
10059 gen_store_fpr32(ctx
, fp1
, fd
);
10060 gen_store_fpr32h(ctx
, fp0
, fd
);
10061 tcg_temp_free_i32(fp0
);
10062 tcg_temp_free_i32(fp1
);
10066 case OPC_CMP_UN_PS
:
10067 case OPC_CMP_EQ_PS
:
10068 case OPC_CMP_UEQ_PS
:
10069 case OPC_CMP_OLT_PS
:
10070 case OPC_CMP_ULT_PS
:
10071 case OPC_CMP_OLE_PS
:
10072 case OPC_CMP_ULE_PS
:
10073 case OPC_CMP_SF_PS
:
10074 case OPC_CMP_NGLE_PS
:
10075 case OPC_CMP_SEQ_PS
:
10076 case OPC_CMP_NGL_PS
:
10077 case OPC_CMP_LT_PS
:
10078 case OPC_CMP_NGE_PS
:
10079 case OPC_CMP_LE_PS
:
10080 case OPC_CMP_NGT_PS
:
10081 if (ctx
->opcode
& (1 << 6)) {
10082 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10084 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10088 MIPS_INVAL("farith");
10089 generate_exception_end(ctx
, EXCP_RI
);
10094 /* Coprocessor 3 (FPU) */
10095 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10096 int fd
, int fs
, int base
, int index
)
10098 TCGv t0
= tcg_temp_new();
10101 gen_load_gpr(t0
, index
);
10102 } else if (index
== 0) {
10103 gen_load_gpr(t0
, base
);
10105 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10107 /* Don't do NOP if destination is zero: we must perform the actual
10113 TCGv_i32 fp0
= tcg_temp_new_i32();
10115 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10116 tcg_gen_trunc_tl_i32(fp0
, t0
);
10117 gen_store_fpr32(ctx
, fp0
, fd
);
10118 tcg_temp_free_i32(fp0
);
10123 check_cp1_registers(ctx
, fd
);
10125 TCGv_i64 fp0
= tcg_temp_new_i64();
10126 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10127 gen_store_fpr64(ctx
, fp0
, fd
);
10128 tcg_temp_free_i64(fp0
);
10132 check_cp1_64bitmode(ctx
);
10133 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10135 TCGv_i64 fp0
= tcg_temp_new_i64();
10137 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10138 gen_store_fpr64(ctx
, fp0
, fd
);
10139 tcg_temp_free_i64(fp0
);
10145 TCGv_i32 fp0
= tcg_temp_new_i32();
10146 gen_load_fpr32(ctx
, fp0
, fs
);
10147 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10148 tcg_temp_free_i32(fp0
);
10153 check_cp1_registers(ctx
, fs
);
10155 TCGv_i64 fp0
= tcg_temp_new_i64();
10156 gen_load_fpr64(ctx
, fp0
, fs
);
10157 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10158 tcg_temp_free_i64(fp0
);
10162 check_cp1_64bitmode(ctx
);
10163 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10165 TCGv_i64 fp0
= tcg_temp_new_i64();
10166 gen_load_fpr64(ctx
, fp0
, fs
);
10167 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10168 tcg_temp_free_i64(fp0
);
10175 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10176 int fd
, int fr
, int fs
, int ft
)
10182 TCGv t0
= tcg_temp_local_new();
10183 TCGv_i32 fp
= tcg_temp_new_i32();
10184 TCGv_i32 fph
= tcg_temp_new_i32();
10185 TCGLabel
*l1
= gen_new_label();
10186 TCGLabel
*l2
= gen_new_label();
10188 gen_load_gpr(t0
, fr
);
10189 tcg_gen_andi_tl(t0
, t0
, 0x7);
10191 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10192 gen_load_fpr32(ctx
, fp
, fs
);
10193 gen_load_fpr32h(ctx
, fph
, fs
);
10194 gen_store_fpr32(ctx
, fp
, fd
);
10195 gen_store_fpr32h(ctx
, fph
, fd
);
10198 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10200 #ifdef TARGET_WORDS_BIGENDIAN
10201 gen_load_fpr32(ctx
, fp
, fs
);
10202 gen_load_fpr32h(ctx
, fph
, ft
);
10203 gen_store_fpr32h(ctx
, fp
, fd
);
10204 gen_store_fpr32(ctx
, fph
, fd
);
10206 gen_load_fpr32h(ctx
, fph
, fs
);
10207 gen_load_fpr32(ctx
, fp
, ft
);
10208 gen_store_fpr32(ctx
, fph
, fd
);
10209 gen_store_fpr32h(ctx
, fp
, fd
);
10212 tcg_temp_free_i32(fp
);
10213 tcg_temp_free_i32(fph
);
10219 TCGv_i32 fp0
= tcg_temp_new_i32();
10220 TCGv_i32 fp1
= tcg_temp_new_i32();
10221 TCGv_i32 fp2
= tcg_temp_new_i32();
10223 gen_load_fpr32(ctx
, fp0
, fs
);
10224 gen_load_fpr32(ctx
, fp1
, ft
);
10225 gen_load_fpr32(ctx
, fp2
, fr
);
10226 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10227 tcg_temp_free_i32(fp0
);
10228 tcg_temp_free_i32(fp1
);
10229 gen_store_fpr32(ctx
, fp2
, fd
);
10230 tcg_temp_free_i32(fp2
);
10235 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10237 TCGv_i64 fp0
= tcg_temp_new_i64();
10238 TCGv_i64 fp1
= tcg_temp_new_i64();
10239 TCGv_i64 fp2
= tcg_temp_new_i64();
10241 gen_load_fpr64(ctx
, fp0
, fs
);
10242 gen_load_fpr64(ctx
, fp1
, ft
);
10243 gen_load_fpr64(ctx
, fp2
, fr
);
10244 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10245 tcg_temp_free_i64(fp0
);
10246 tcg_temp_free_i64(fp1
);
10247 gen_store_fpr64(ctx
, fp2
, fd
);
10248 tcg_temp_free_i64(fp2
);
10254 TCGv_i64 fp0
= tcg_temp_new_i64();
10255 TCGv_i64 fp1
= tcg_temp_new_i64();
10256 TCGv_i64 fp2
= tcg_temp_new_i64();
10258 gen_load_fpr64(ctx
, fp0
, fs
);
10259 gen_load_fpr64(ctx
, fp1
, ft
);
10260 gen_load_fpr64(ctx
, fp2
, fr
);
10261 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10262 tcg_temp_free_i64(fp0
);
10263 tcg_temp_free_i64(fp1
);
10264 gen_store_fpr64(ctx
, fp2
, fd
);
10265 tcg_temp_free_i64(fp2
);
10271 TCGv_i32 fp0
= tcg_temp_new_i32();
10272 TCGv_i32 fp1
= tcg_temp_new_i32();
10273 TCGv_i32 fp2
= tcg_temp_new_i32();
10275 gen_load_fpr32(ctx
, fp0
, fs
);
10276 gen_load_fpr32(ctx
, fp1
, ft
);
10277 gen_load_fpr32(ctx
, fp2
, fr
);
10278 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10279 tcg_temp_free_i32(fp0
);
10280 tcg_temp_free_i32(fp1
);
10281 gen_store_fpr32(ctx
, fp2
, fd
);
10282 tcg_temp_free_i32(fp2
);
10287 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10289 TCGv_i64 fp0
= tcg_temp_new_i64();
10290 TCGv_i64 fp1
= tcg_temp_new_i64();
10291 TCGv_i64 fp2
= tcg_temp_new_i64();
10293 gen_load_fpr64(ctx
, fp0
, fs
);
10294 gen_load_fpr64(ctx
, fp1
, ft
);
10295 gen_load_fpr64(ctx
, fp2
, fr
);
10296 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10297 tcg_temp_free_i64(fp0
);
10298 tcg_temp_free_i64(fp1
);
10299 gen_store_fpr64(ctx
, fp2
, fd
);
10300 tcg_temp_free_i64(fp2
);
10306 TCGv_i64 fp0
= tcg_temp_new_i64();
10307 TCGv_i64 fp1
= tcg_temp_new_i64();
10308 TCGv_i64 fp2
= tcg_temp_new_i64();
10310 gen_load_fpr64(ctx
, fp0
, fs
);
10311 gen_load_fpr64(ctx
, fp1
, ft
);
10312 gen_load_fpr64(ctx
, fp2
, fr
);
10313 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10314 tcg_temp_free_i64(fp0
);
10315 tcg_temp_free_i64(fp1
);
10316 gen_store_fpr64(ctx
, fp2
, fd
);
10317 tcg_temp_free_i64(fp2
);
10323 TCGv_i32 fp0
= tcg_temp_new_i32();
10324 TCGv_i32 fp1
= tcg_temp_new_i32();
10325 TCGv_i32 fp2
= tcg_temp_new_i32();
10327 gen_load_fpr32(ctx
, fp0
, fs
);
10328 gen_load_fpr32(ctx
, fp1
, ft
);
10329 gen_load_fpr32(ctx
, fp2
, fr
);
10330 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10331 tcg_temp_free_i32(fp0
);
10332 tcg_temp_free_i32(fp1
);
10333 gen_store_fpr32(ctx
, fp2
, fd
);
10334 tcg_temp_free_i32(fp2
);
10339 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10341 TCGv_i64 fp0
= tcg_temp_new_i64();
10342 TCGv_i64 fp1
= tcg_temp_new_i64();
10343 TCGv_i64 fp2
= tcg_temp_new_i64();
10345 gen_load_fpr64(ctx
, fp0
, fs
);
10346 gen_load_fpr64(ctx
, fp1
, ft
);
10347 gen_load_fpr64(ctx
, fp2
, fr
);
10348 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10349 tcg_temp_free_i64(fp0
);
10350 tcg_temp_free_i64(fp1
);
10351 gen_store_fpr64(ctx
, fp2
, fd
);
10352 tcg_temp_free_i64(fp2
);
10358 TCGv_i64 fp0
= tcg_temp_new_i64();
10359 TCGv_i64 fp1
= tcg_temp_new_i64();
10360 TCGv_i64 fp2
= tcg_temp_new_i64();
10362 gen_load_fpr64(ctx
, fp0
, fs
);
10363 gen_load_fpr64(ctx
, fp1
, ft
);
10364 gen_load_fpr64(ctx
, fp2
, fr
);
10365 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10366 tcg_temp_free_i64(fp0
);
10367 tcg_temp_free_i64(fp1
);
10368 gen_store_fpr64(ctx
, fp2
, fd
);
10369 tcg_temp_free_i64(fp2
);
10375 TCGv_i32 fp0
= tcg_temp_new_i32();
10376 TCGv_i32 fp1
= tcg_temp_new_i32();
10377 TCGv_i32 fp2
= tcg_temp_new_i32();
10379 gen_load_fpr32(ctx
, fp0
, fs
);
10380 gen_load_fpr32(ctx
, fp1
, ft
);
10381 gen_load_fpr32(ctx
, fp2
, fr
);
10382 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10383 tcg_temp_free_i32(fp0
);
10384 tcg_temp_free_i32(fp1
);
10385 gen_store_fpr32(ctx
, fp2
, fd
);
10386 tcg_temp_free_i32(fp2
);
10391 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10393 TCGv_i64 fp0
= tcg_temp_new_i64();
10394 TCGv_i64 fp1
= tcg_temp_new_i64();
10395 TCGv_i64 fp2
= tcg_temp_new_i64();
10397 gen_load_fpr64(ctx
, fp0
, fs
);
10398 gen_load_fpr64(ctx
, fp1
, ft
);
10399 gen_load_fpr64(ctx
, fp2
, fr
);
10400 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10401 tcg_temp_free_i64(fp0
);
10402 tcg_temp_free_i64(fp1
);
10403 gen_store_fpr64(ctx
, fp2
, fd
);
10404 tcg_temp_free_i64(fp2
);
10410 TCGv_i64 fp0
= tcg_temp_new_i64();
10411 TCGv_i64 fp1
= tcg_temp_new_i64();
10412 TCGv_i64 fp2
= tcg_temp_new_i64();
10414 gen_load_fpr64(ctx
, fp0
, fs
);
10415 gen_load_fpr64(ctx
, fp1
, ft
);
10416 gen_load_fpr64(ctx
, fp2
, fr
);
10417 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10418 tcg_temp_free_i64(fp0
);
10419 tcg_temp_free_i64(fp1
);
10420 gen_store_fpr64(ctx
, fp2
, fd
);
10421 tcg_temp_free_i64(fp2
);
10425 MIPS_INVAL("flt3_arith");
10426 generate_exception_end(ctx
, EXCP_RI
);
10431 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10435 #if !defined(CONFIG_USER_ONLY)
10436 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10437 Therefore only check the ISA in system mode. */
10438 check_insn(ctx
, ISA_MIPS32R2
);
10440 t0
= tcg_temp_new();
10444 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10445 gen_store_gpr(t0
, rt
);
10448 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10449 gen_store_gpr(t0
, rt
);
10452 gen_helper_rdhwr_cc(t0
, cpu_env
);
10453 gen_store_gpr(t0
, rt
);
10456 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10457 gen_store_gpr(t0
, rt
);
10460 check_insn(ctx
, ISA_MIPS32R6
);
10462 /* Performance counter registers are not implemented other than
10463 * control register 0.
10465 generate_exception(ctx
, EXCP_RI
);
10467 gen_helper_rdhwr_performance(t0
, cpu_env
);
10468 gen_store_gpr(t0
, rt
);
10471 check_insn(ctx
, ISA_MIPS32R6
);
10472 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10473 gen_store_gpr(t0
, rt
);
10476 #if defined(CONFIG_USER_ONLY)
10477 tcg_gen_ld_tl(t0
, cpu_env
,
10478 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10479 gen_store_gpr(t0
, rt
);
10482 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10483 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10484 tcg_gen_ld_tl(t0
, cpu_env
,
10485 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10486 gen_store_gpr(t0
, rt
);
10488 generate_exception_end(ctx
, EXCP_RI
);
10492 default: /* Invalid */
10493 MIPS_INVAL("rdhwr");
10494 generate_exception_end(ctx
, EXCP_RI
);
10500 static inline void clear_branch_hflags(DisasContext
*ctx
)
10502 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10503 if (ctx
->bstate
== BS_NONE
) {
10504 save_cpu_state(ctx
, 0);
10506 /* it is not safe to save ctx->hflags as hflags may be changed
10507 in execution time by the instruction in delay / forbidden slot. */
10508 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10512 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10514 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10515 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10516 /* Branches completion */
10517 clear_branch_hflags(ctx
);
10518 ctx
->bstate
= BS_BRANCH
;
10519 /* FIXME: Need to clear can_do_io. */
10520 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10521 case MIPS_HFLAG_FBNSLOT
:
10522 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10525 /* unconditional branch */
10526 if (proc_hflags
& MIPS_HFLAG_BX
) {
10527 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10529 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10531 case MIPS_HFLAG_BL
:
10532 /* blikely taken case */
10533 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10535 case MIPS_HFLAG_BC
:
10536 /* Conditional branch */
10538 TCGLabel
*l1
= gen_new_label();
10540 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10541 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10543 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10546 case MIPS_HFLAG_BR
:
10547 /* unconditional branch to register */
10548 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10549 TCGv t0
= tcg_temp_new();
10550 TCGv_i32 t1
= tcg_temp_new_i32();
10552 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10553 tcg_gen_trunc_tl_i32(t1
, t0
);
10555 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10556 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10557 tcg_gen_or_i32(hflags
, hflags
, t1
);
10558 tcg_temp_free_i32(t1
);
10560 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10562 tcg_gen_mov_tl(cpu_PC
, btarget
);
10564 if (ctx
->singlestep_enabled
) {
10565 save_cpu_state(ctx
, 0);
10566 gen_helper_raise_exception_debug(cpu_env
);
10568 tcg_gen_exit_tb(0);
10571 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10577 /* Compact Branches */
10578 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10579 int rs
, int rt
, int32_t offset
)
10581 int bcond_compute
= 0;
10582 TCGv t0
= tcg_temp_new();
10583 TCGv t1
= tcg_temp_new();
10584 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10586 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10587 #ifdef MIPS_DEBUG_DISAS
10588 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10591 generate_exception_end(ctx
, EXCP_RI
);
10595 /* Load needed operands and calculate btarget */
10597 /* compact branch */
10598 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10599 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10600 gen_load_gpr(t0
, rs
);
10601 gen_load_gpr(t1
, rt
);
10603 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10604 if (rs
<= rt
&& rs
== 0) {
10605 /* OPC_BEQZALC, OPC_BNEZALC */
10606 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10609 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10610 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10611 gen_load_gpr(t0
, rs
);
10612 gen_load_gpr(t1
, rt
);
10614 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10616 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10617 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10618 if (rs
== 0 || rs
== rt
) {
10619 /* OPC_BLEZALC, OPC_BGEZALC */
10620 /* OPC_BGTZALC, OPC_BLTZALC */
10621 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10623 gen_load_gpr(t0
, rs
);
10624 gen_load_gpr(t1
, rt
);
10626 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10630 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10635 /* OPC_BEQZC, OPC_BNEZC */
10636 gen_load_gpr(t0
, rs
);
10638 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10640 /* OPC_JIC, OPC_JIALC */
10641 TCGv tbase
= tcg_temp_new();
10642 TCGv toffset
= tcg_temp_new();
10644 gen_load_gpr(tbase
, rt
);
10645 tcg_gen_movi_tl(toffset
, offset
);
10646 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10647 tcg_temp_free(tbase
);
10648 tcg_temp_free(toffset
);
10652 MIPS_INVAL("Compact branch/jump");
10653 generate_exception_end(ctx
, EXCP_RI
);
10657 if (bcond_compute
== 0) {
10658 /* Uncoditional compact branch */
10661 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10664 ctx
->hflags
|= MIPS_HFLAG_BR
;
10667 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10670 ctx
->hflags
|= MIPS_HFLAG_B
;
10673 MIPS_INVAL("Compact branch/jump");
10674 generate_exception_end(ctx
, EXCP_RI
);
10678 /* Generating branch here as compact branches don't have delay slot */
10679 gen_branch(ctx
, 4);
10681 /* Conditional compact branch */
10682 TCGLabel
*fs
= gen_new_label();
10683 save_cpu_state(ctx
, 0);
10686 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10687 if (rs
== 0 && rt
!= 0) {
10689 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10690 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10692 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10695 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10698 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10699 if (rs
== 0 && rt
!= 0) {
10701 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10702 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10704 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10707 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10710 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10711 if (rs
== 0 && rt
!= 0) {
10713 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10714 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10716 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10719 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10722 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10723 if (rs
== 0 && rt
!= 0) {
10725 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10726 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10728 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10731 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10734 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10735 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10737 /* OPC_BOVC, OPC_BNVC */
10738 TCGv t2
= tcg_temp_new();
10739 TCGv t3
= tcg_temp_new();
10740 TCGv t4
= tcg_temp_new();
10741 TCGv input_overflow
= tcg_temp_new();
10743 gen_load_gpr(t0
, rs
);
10744 gen_load_gpr(t1
, rt
);
10745 tcg_gen_ext32s_tl(t2
, t0
);
10746 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10747 tcg_gen_ext32s_tl(t3
, t1
);
10748 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10749 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10751 tcg_gen_add_tl(t4
, t2
, t3
);
10752 tcg_gen_ext32s_tl(t4
, t4
);
10753 tcg_gen_xor_tl(t2
, t2
, t3
);
10754 tcg_gen_xor_tl(t3
, t4
, t3
);
10755 tcg_gen_andc_tl(t2
, t3
, t2
);
10756 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10757 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10758 if (opc
== OPC_BOVC
) {
10760 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10763 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10765 tcg_temp_free(input_overflow
);
10769 } else if (rs
< rt
&& rs
== 0) {
10770 /* OPC_BEQZALC, OPC_BNEZALC */
10771 if (opc
== OPC_BEQZALC
) {
10773 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10776 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10779 /* OPC_BEQC, OPC_BNEC */
10780 if (opc
== OPC_BEQC
) {
10782 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10785 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10790 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10793 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10796 MIPS_INVAL("Compact conditional branch/jump");
10797 generate_exception_end(ctx
, EXCP_RI
);
10801 /* Generating branch here as compact branches don't have delay slot */
10802 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10805 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10813 /* ISA extensions (ASEs) */
10814 /* MIPS16 extension to MIPS32 */
10816 /* MIPS16 major opcodes */
10818 M16_OPC_ADDIUSP
= 0x00,
10819 M16_OPC_ADDIUPC
= 0x01,
10821 M16_OPC_JAL
= 0x03,
10822 M16_OPC_BEQZ
= 0x04,
10823 M16_OPC_BNEQZ
= 0x05,
10824 M16_OPC_SHIFT
= 0x06,
10826 M16_OPC_RRIA
= 0x08,
10827 M16_OPC_ADDIU8
= 0x09,
10828 M16_OPC_SLTI
= 0x0a,
10829 M16_OPC_SLTIU
= 0x0b,
10832 M16_OPC_CMPI
= 0x0e,
10836 M16_OPC_LWSP
= 0x12,
10838 M16_OPC_LBU
= 0x14,
10839 M16_OPC_LHU
= 0x15,
10840 M16_OPC_LWPC
= 0x16,
10841 M16_OPC_LWU
= 0x17,
10844 M16_OPC_SWSP
= 0x1a,
10846 M16_OPC_RRR
= 0x1c,
10848 M16_OPC_EXTEND
= 0x1e,
10852 /* I8 funct field */
10871 /* RR funct field */
10905 /* I64 funct field */
10913 I64_DADDIUPC
= 0x6,
10917 /* RR ry field for CNVT */
10919 RR_RY_CNVT_ZEB
= 0x0,
10920 RR_RY_CNVT_ZEH
= 0x1,
10921 RR_RY_CNVT_ZEW
= 0x2,
10922 RR_RY_CNVT_SEB
= 0x4,
10923 RR_RY_CNVT_SEH
= 0x5,
10924 RR_RY_CNVT_SEW
= 0x6,
10927 static int xlat (int r
)
10929 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10934 static void gen_mips16_save (DisasContext
*ctx
,
10935 int xsregs
, int aregs
,
10936 int do_ra
, int do_s0
, int do_s1
,
10939 TCGv t0
= tcg_temp_new();
10940 TCGv t1
= tcg_temp_new();
10941 TCGv t2
= tcg_temp_new();
10971 generate_exception_end(ctx
, EXCP_RI
);
10977 gen_base_offset_addr(ctx
, t0
, 29, 12);
10978 gen_load_gpr(t1
, 7);
10979 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10982 gen_base_offset_addr(ctx
, t0
, 29, 8);
10983 gen_load_gpr(t1
, 6);
10984 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10987 gen_base_offset_addr(ctx
, t0
, 29, 4);
10988 gen_load_gpr(t1
, 5);
10989 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10992 gen_base_offset_addr(ctx
, t0
, 29, 0);
10993 gen_load_gpr(t1
, 4);
10994 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10997 gen_load_gpr(t0
, 29);
10999 #define DECR_AND_STORE(reg) do { \
11000 tcg_gen_movi_tl(t2, -4); \
11001 gen_op_addr_add(ctx, t0, t0, t2); \
11002 gen_load_gpr(t1, reg); \
11003 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11007 DECR_AND_STORE(31);
11012 DECR_AND_STORE(30);
11015 DECR_AND_STORE(23);
11018 DECR_AND_STORE(22);
11021 DECR_AND_STORE(21);
11024 DECR_AND_STORE(20);
11027 DECR_AND_STORE(19);
11030 DECR_AND_STORE(18);
11034 DECR_AND_STORE(17);
11037 DECR_AND_STORE(16);
11067 generate_exception_end(ctx
, EXCP_RI
);
11083 #undef DECR_AND_STORE
11085 tcg_gen_movi_tl(t2
, -framesize
);
11086 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11092 static void gen_mips16_restore (DisasContext
*ctx
,
11093 int xsregs
, int aregs
,
11094 int do_ra
, int do_s0
, int do_s1
,
11098 TCGv t0
= tcg_temp_new();
11099 TCGv t1
= tcg_temp_new();
11100 TCGv t2
= tcg_temp_new();
11102 tcg_gen_movi_tl(t2
, framesize
);
11103 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11105 #define DECR_AND_LOAD(reg) do { \
11106 tcg_gen_movi_tl(t2, -4); \
11107 gen_op_addr_add(ctx, t0, t0, t2); \
11108 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11109 gen_store_gpr(t1, reg); \
11173 generate_exception_end(ctx
, EXCP_RI
);
11189 #undef DECR_AND_LOAD
11191 tcg_gen_movi_tl(t2
, framesize
);
11192 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11198 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11199 int is_64_bit
, int extended
)
11203 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11204 generate_exception_end(ctx
, EXCP_RI
);
11208 t0
= tcg_temp_new();
11210 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11211 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11213 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11219 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11222 TCGv_i32 t0
= tcg_const_i32(op
);
11223 TCGv t1
= tcg_temp_new();
11224 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11225 gen_helper_cache(cpu_env
, t1
, t0
);
11228 #if defined(TARGET_MIPS64)
11229 static void decode_i64_mips16 (DisasContext
*ctx
,
11230 int ry
, int funct
, int16_t offset
,
11235 check_insn(ctx
, ISA_MIPS3
);
11236 check_mips_64(ctx
);
11237 offset
= extended
? offset
: offset
<< 3;
11238 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11241 check_insn(ctx
, ISA_MIPS3
);
11242 check_mips_64(ctx
);
11243 offset
= extended
? offset
: offset
<< 3;
11244 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11247 check_insn(ctx
, ISA_MIPS3
);
11248 check_mips_64(ctx
);
11249 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11250 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11253 check_insn(ctx
, ISA_MIPS3
);
11254 check_mips_64(ctx
);
11255 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11256 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11259 check_insn(ctx
, ISA_MIPS3
);
11260 check_mips_64(ctx
);
11261 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11262 generate_exception_end(ctx
, EXCP_RI
);
11264 offset
= extended
? offset
: offset
<< 3;
11265 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11269 check_insn(ctx
, ISA_MIPS3
);
11270 check_mips_64(ctx
);
11271 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11272 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11275 check_insn(ctx
, ISA_MIPS3
);
11276 check_mips_64(ctx
);
11277 offset
= extended
? offset
: offset
<< 2;
11278 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11281 check_insn(ctx
, ISA_MIPS3
);
11282 check_mips_64(ctx
);
11283 offset
= extended
? offset
: offset
<< 2;
11284 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11290 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11292 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11293 int op
, rx
, ry
, funct
, sa
;
11294 int16_t imm
, offset
;
11296 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11297 op
= (ctx
->opcode
>> 11) & 0x1f;
11298 sa
= (ctx
->opcode
>> 22) & 0x1f;
11299 funct
= (ctx
->opcode
>> 8) & 0x7;
11300 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11301 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11302 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11303 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11304 | (ctx
->opcode
& 0x1f));
11306 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11309 case M16_OPC_ADDIUSP
:
11310 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11312 case M16_OPC_ADDIUPC
:
11313 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11316 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11317 /* No delay slot, so just process as a normal instruction */
11320 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11321 /* No delay slot, so just process as a normal instruction */
11323 case M16_OPC_BNEQZ
:
11324 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11325 /* No delay slot, so just process as a normal instruction */
11327 case M16_OPC_SHIFT
:
11328 switch (ctx
->opcode
& 0x3) {
11330 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11333 #if defined(TARGET_MIPS64)
11334 check_mips_64(ctx
);
11335 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11337 generate_exception_end(ctx
, EXCP_RI
);
11341 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11344 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11348 #if defined(TARGET_MIPS64)
11350 check_insn(ctx
, ISA_MIPS3
);
11351 check_mips_64(ctx
);
11352 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11356 imm
= ctx
->opcode
& 0xf;
11357 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11358 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11359 imm
= (int16_t) (imm
<< 1) >> 1;
11360 if ((ctx
->opcode
>> 4) & 0x1) {
11361 #if defined(TARGET_MIPS64)
11362 check_mips_64(ctx
);
11363 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11365 generate_exception_end(ctx
, EXCP_RI
);
11368 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11371 case M16_OPC_ADDIU8
:
11372 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11375 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11377 case M16_OPC_SLTIU
:
11378 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11383 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11386 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11389 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11392 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11395 check_insn(ctx
, ISA_MIPS32
);
11397 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11398 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11399 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11400 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11401 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11402 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11403 | (ctx
->opcode
& 0xf)) << 3;
11405 if (ctx
->opcode
& (1 << 7)) {
11406 gen_mips16_save(ctx
, xsregs
, aregs
,
11407 do_ra
, do_s0
, do_s1
,
11410 gen_mips16_restore(ctx
, xsregs
, aregs
,
11411 do_ra
, do_s0
, do_s1
,
11417 generate_exception_end(ctx
, EXCP_RI
);
11422 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11425 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11427 #if defined(TARGET_MIPS64)
11429 check_insn(ctx
, ISA_MIPS3
);
11430 check_mips_64(ctx
);
11431 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11435 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11438 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11441 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11444 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11447 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11450 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11453 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11455 #if defined(TARGET_MIPS64)
11457 check_insn(ctx
, ISA_MIPS3
);
11458 check_mips_64(ctx
);
11459 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11463 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11466 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11469 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11472 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11474 #if defined(TARGET_MIPS64)
11476 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11480 generate_exception_end(ctx
, EXCP_RI
);
11487 static inline bool is_uhi(int sdbbp_code
)
11489 #ifdef CONFIG_USER_ONLY
11492 return semihosting_enabled() && sdbbp_code
== 1;
11496 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11500 int op
, cnvt_op
, op1
, offset
;
11504 op
= (ctx
->opcode
>> 11) & 0x1f;
11505 sa
= (ctx
->opcode
>> 2) & 0x7;
11506 sa
= sa
== 0 ? 8 : sa
;
11507 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11508 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11509 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11510 op1
= offset
= ctx
->opcode
& 0x1f;
11515 case M16_OPC_ADDIUSP
:
11517 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11519 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11522 case M16_OPC_ADDIUPC
:
11523 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11526 offset
= (ctx
->opcode
& 0x7ff) << 1;
11527 offset
= (int16_t)(offset
<< 4) >> 4;
11528 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11529 /* No delay slot, so just process as a normal instruction */
11532 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11533 offset
= (((ctx
->opcode
& 0x1f) << 21)
11534 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11536 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11537 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11541 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11542 ((int8_t)ctx
->opcode
) << 1, 0);
11543 /* No delay slot, so just process as a normal instruction */
11545 case M16_OPC_BNEQZ
:
11546 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11547 ((int8_t)ctx
->opcode
) << 1, 0);
11548 /* No delay slot, so just process as a normal instruction */
11550 case M16_OPC_SHIFT
:
11551 switch (ctx
->opcode
& 0x3) {
11553 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11556 #if defined(TARGET_MIPS64)
11557 check_insn(ctx
, ISA_MIPS3
);
11558 check_mips_64(ctx
);
11559 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11561 generate_exception_end(ctx
, EXCP_RI
);
11565 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11568 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11572 #if defined(TARGET_MIPS64)
11574 check_insn(ctx
, ISA_MIPS3
);
11575 check_mips_64(ctx
);
11576 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11581 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11583 if ((ctx
->opcode
>> 4) & 1) {
11584 #if defined(TARGET_MIPS64)
11585 check_insn(ctx
, ISA_MIPS3
);
11586 check_mips_64(ctx
);
11587 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11589 generate_exception_end(ctx
, EXCP_RI
);
11592 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11596 case M16_OPC_ADDIU8
:
11598 int16_t imm
= (int8_t) ctx
->opcode
;
11600 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11605 int16_t imm
= (uint8_t) ctx
->opcode
;
11606 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11609 case M16_OPC_SLTIU
:
11611 int16_t imm
= (uint8_t) ctx
->opcode
;
11612 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11619 funct
= (ctx
->opcode
>> 8) & 0x7;
11622 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11623 ((int8_t)ctx
->opcode
) << 1, 0);
11626 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11627 ((int8_t)ctx
->opcode
) << 1, 0);
11630 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11633 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11634 ((int8_t)ctx
->opcode
) << 3);
11637 check_insn(ctx
, ISA_MIPS32
);
11639 int do_ra
= ctx
->opcode
& (1 << 6);
11640 int do_s0
= ctx
->opcode
& (1 << 5);
11641 int do_s1
= ctx
->opcode
& (1 << 4);
11642 int framesize
= ctx
->opcode
& 0xf;
11644 if (framesize
== 0) {
11647 framesize
= framesize
<< 3;
11650 if (ctx
->opcode
& (1 << 7)) {
11651 gen_mips16_save(ctx
, 0, 0,
11652 do_ra
, do_s0
, do_s1
, framesize
);
11654 gen_mips16_restore(ctx
, 0, 0,
11655 do_ra
, do_s0
, do_s1
, framesize
);
11661 int rz
= xlat(ctx
->opcode
& 0x7);
11663 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11664 ((ctx
->opcode
>> 5) & 0x7);
11665 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11669 reg32
= ctx
->opcode
& 0x1f;
11670 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11673 generate_exception_end(ctx
, EXCP_RI
);
11680 int16_t imm
= (uint8_t) ctx
->opcode
;
11682 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11687 int16_t imm
= (uint8_t) ctx
->opcode
;
11688 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11691 #if defined(TARGET_MIPS64)
11693 check_insn(ctx
, ISA_MIPS3
);
11694 check_mips_64(ctx
);
11695 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11699 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11702 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11705 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11708 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11711 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11714 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11717 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11719 #if defined (TARGET_MIPS64)
11721 check_insn(ctx
, ISA_MIPS3
);
11722 check_mips_64(ctx
);
11723 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11727 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11730 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11733 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11736 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11740 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11743 switch (ctx
->opcode
& 0x3) {
11745 mips32_op
= OPC_ADDU
;
11748 mips32_op
= OPC_SUBU
;
11750 #if defined(TARGET_MIPS64)
11752 mips32_op
= OPC_DADDU
;
11753 check_insn(ctx
, ISA_MIPS3
);
11754 check_mips_64(ctx
);
11757 mips32_op
= OPC_DSUBU
;
11758 check_insn(ctx
, ISA_MIPS3
);
11759 check_mips_64(ctx
);
11763 generate_exception_end(ctx
, EXCP_RI
);
11767 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11776 int nd
= (ctx
->opcode
>> 7) & 0x1;
11777 int link
= (ctx
->opcode
>> 6) & 0x1;
11778 int ra
= (ctx
->opcode
>> 5) & 0x1;
11781 check_insn(ctx
, ISA_MIPS32
);
11790 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11795 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11796 gen_helper_do_semihosting(cpu_env
);
11798 /* XXX: not clear which exception should be raised
11799 * when in debug mode...
11801 check_insn(ctx
, ISA_MIPS32
);
11802 generate_exception_end(ctx
, EXCP_DBp
);
11806 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11809 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11812 generate_exception_end(ctx
, EXCP_BREAK
);
11815 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11818 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11821 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11823 #if defined (TARGET_MIPS64)
11825 check_insn(ctx
, ISA_MIPS3
);
11826 check_mips_64(ctx
);
11827 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11831 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11834 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11837 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11840 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11843 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11846 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11849 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11852 check_insn(ctx
, ISA_MIPS32
);
11854 case RR_RY_CNVT_ZEB
:
11855 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11857 case RR_RY_CNVT_ZEH
:
11858 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11860 case RR_RY_CNVT_SEB
:
11861 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11863 case RR_RY_CNVT_SEH
:
11864 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11866 #if defined (TARGET_MIPS64)
11867 case RR_RY_CNVT_ZEW
:
11868 check_insn(ctx
, ISA_MIPS64
);
11869 check_mips_64(ctx
);
11870 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11872 case RR_RY_CNVT_SEW
:
11873 check_insn(ctx
, ISA_MIPS64
);
11874 check_mips_64(ctx
);
11875 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11879 generate_exception_end(ctx
, EXCP_RI
);
11884 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11886 #if defined (TARGET_MIPS64)
11888 check_insn(ctx
, ISA_MIPS3
);
11889 check_mips_64(ctx
);
11890 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11893 check_insn(ctx
, ISA_MIPS3
);
11894 check_mips_64(ctx
);
11895 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11898 check_insn(ctx
, ISA_MIPS3
);
11899 check_mips_64(ctx
);
11900 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11903 check_insn(ctx
, ISA_MIPS3
);
11904 check_mips_64(ctx
);
11905 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11909 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11912 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11915 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11918 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11920 #if defined (TARGET_MIPS64)
11922 check_insn(ctx
, ISA_MIPS3
);
11923 check_mips_64(ctx
);
11924 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11927 check_insn(ctx
, ISA_MIPS3
);
11928 check_mips_64(ctx
);
11929 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11932 check_insn(ctx
, ISA_MIPS3
);
11933 check_mips_64(ctx
);
11934 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11937 check_insn(ctx
, ISA_MIPS3
);
11938 check_mips_64(ctx
);
11939 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11943 generate_exception_end(ctx
, EXCP_RI
);
11947 case M16_OPC_EXTEND
:
11948 decode_extended_mips16_opc(env
, ctx
);
11951 #if defined(TARGET_MIPS64)
11953 funct
= (ctx
->opcode
>> 8) & 0x7;
11954 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11958 generate_exception_end(ctx
, EXCP_RI
);
11965 /* microMIPS extension to MIPS32/MIPS64 */
11968 * microMIPS32/microMIPS64 major opcodes
11970 * 1. MIPS Architecture for Programmers Volume II-B:
11971 * The microMIPS32 Instruction Set (Revision 3.05)
11973 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11975 * 2. MIPS Architecture For Programmers Volume II-A:
11976 * The MIPS64 Instruction Set (Revision 3.51)
12006 POOL32S
= 0x16, /* MIPS64 */
12007 DADDIU32
= 0x17, /* MIPS64 */
12036 /* 0x29 is reserved */
12049 /* 0x31 is reserved */
12062 SD32
= 0x36, /* MIPS64 */
12063 LD32
= 0x37, /* MIPS64 */
12065 /* 0x39 is reserved */
12081 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12091 /* POOL32A encoding of minor opcode field */
12094 /* These opcodes are distinguished only by bits 9..6; those bits are
12095 * what are recorded below. */
12132 /* The following can be distinguished by their lower 6 bits. */
12142 /* POOL32AXF encoding of minor opcode field extension */
12145 * 1. MIPS Architecture for Programmers Volume II-B:
12146 * The microMIPS32 Instruction Set (Revision 3.05)
12148 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12150 * 2. MIPS Architecture for Programmers VolumeIV-e:
12151 * The MIPS DSP Application-Specific Extension
12152 * to the microMIPS32 Architecture (Revision 2.34)
12154 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12169 /* begin of microMIPS32 DSP */
12171 /* bits 13..12 for 0x01 */
12177 /* bits 13..12 for 0x2a */
12183 /* bits 13..12 for 0x32 */
12187 /* end of microMIPS32 DSP */
12189 /* bits 15..12 for 0x2c */
12206 /* bits 15..12 for 0x34 */
12214 /* bits 15..12 for 0x3c */
12216 JR
= 0x0, /* alias */
12224 /* bits 15..12 for 0x05 */
12228 /* bits 15..12 for 0x0d */
12240 /* bits 15..12 for 0x15 */
12246 /* bits 15..12 for 0x1d */
12250 /* bits 15..12 for 0x2d */
12255 /* bits 15..12 for 0x35 */
12262 /* POOL32B encoding of minor opcode field (bits 15..12) */
12278 /* POOL32C encoding of minor opcode field (bits 15..12) */
12286 /* 0xa is reserved */
12293 /* 0x6 is reserved */
12299 /* POOL32F encoding of minor opcode field (bits 5..0) */
12302 /* These are the bit 7..6 values */
12311 /* These are the bit 8..6 values */
12336 MOVZ_FMT_05
= 0x05,
12370 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12377 /* POOL32Fxf encoding of minor opcode extension field */
12415 /* POOL32I encoding of minor opcode field (bits 25..21) */
12445 /* These overlap and are distinguished by bit16 of the instruction */
12454 /* POOL16A encoding of minor opcode field */
12461 /* POOL16B encoding of minor opcode field */
12468 /* POOL16C encoding of minor opcode field */
12488 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12508 /* POOL16D encoding of minor opcode field */
12515 /* POOL16E encoding of minor opcode field */
12522 static int mmreg (int r
)
12524 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12529 /* Used for 16-bit store instructions. */
12530 static int mmreg2 (int r
)
12532 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12537 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12538 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12539 #define uMIPS_RS2(op) uMIPS_RS(op)
12540 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12541 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12542 #define uMIPS_RS5(op) (op & 0x1f)
12544 /* Signed immediate */
12545 #define SIMM(op, start, width) \
12546 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12549 /* Zero-extended immediate */
12550 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12552 static void gen_addiur1sp(DisasContext
*ctx
)
12554 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12556 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12559 static void gen_addiur2(DisasContext
*ctx
)
12561 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12562 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12563 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12565 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12568 static void gen_addiusp(DisasContext
*ctx
)
12570 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12573 if (encoded
<= 1) {
12574 decoded
= 256 + encoded
;
12575 } else if (encoded
<= 255) {
12577 } else if (encoded
<= 509) {
12578 decoded
= encoded
- 512;
12580 decoded
= encoded
- 768;
12583 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12586 static void gen_addius5(DisasContext
*ctx
)
12588 int imm
= SIMM(ctx
->opcode
, 1, 4);
12589 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12591 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12594 static void gen_andi16(DisasContext
*ctx
)
12596 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12597 31, 32, 63, 64, 255, 32768, 65535 };
12598 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12599 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12600 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12602 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12605 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12606 int base
, int16_t offset
)
12611 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12612 generate_exception_end(ctx
, EXCP_RI
);
12616 t0
= tcg_temp_new();
12618 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12620 t1
= tcg_const_tl(reglist
);
12621 t2
= tcg_const_i32(ctx
->mem_idx
);
12623 save_cpu_state(ctx
, 1);
12626 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12629 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12631 #ifdef TARGET_MIPS64
12633 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12636 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12642 tcg_temp_free_i32(t2
);
12646 static void gen_pool16c_insn(DisasContext
*ctx
)
12648 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12649 int rs
= mmreg(ctx
->opcode
& 0x7);
12651 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12656 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12662 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12668 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12674 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12681 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12682 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12684 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12693 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12694 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12696 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12703 int reg
= ctx
->opcode
& 0x1f;
12705 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12711 int reg
= ctx
->opcode
& 0x1f;
12712 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12713 /* Let normal delay slot handling in our caller take us
12714 to the branch target. */
12719 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12720 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12724 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12725 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12729 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12733 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12736 generate_exception_end(ctx
, EXCP_BREAK
);
12739 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12740 gen_helper_do_semihosting(cpu_env
);
12742 /* XXX: not clear which exception should be raised
12743 * when in debug mode...
12745 check_insn(ctx
, ISA_MIPS32
);
12746 generate_exception_end(ctx
, EXCP_DBp
);
12749 case JRADDIUSP
+ 0:
12750 case JRADDIUSP
+ 1:
12752 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12753 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12754 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12755 /* Let normal delay slot handling in our caller take us
12756 to the branch target. */
12760 generate_exception_end(ctx
, EXCP_RI
);
12765 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12768 int rd
, rs
, re
, rt
;
12769 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12770 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12771 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12772 rd
= rd_enc
[enc_dest
];
12773 re
= re_enc
[enc_dest
];
12774 rs
= rs_rt_enc
[enc_rs
];
12775 rt
= rs_rt_enc
[enc_rt
];
12777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12779 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12782 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12784 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12788 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12790 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12791 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12793 switch (ctx
->opcode
& 0xf) {
12795 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12798 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12802 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12803 int offset
= extract32(ctx
->opcode
, 4, 4);
12804 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12807 case R6_JRC16
: /* JRCADDIUSP */
12808 if ((ctx
->opcode
>> 4) & 1) {
12810 int imm
= extract32(ctx
->opcode
, 5, 5);
12811 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12812 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12815 int rs
= extract32(ctx
->opcode
, 5, 5);
12816 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12819 case MOVEP
... MOVEP_07
:
12820 case MOVEP_0C
... MOVEP_0F
:
12822 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12823 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12824 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12825 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12829 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12832 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12836 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12837 int offset
= extract32(ctx
->opcode
, 4, 4);
12838 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12841 case JALRC16
: /* BREAK16, SDBBP16 */
12842 switch (ctx
->opcode
& 0x3f) {
12844 case JALRC16
+ 0x20:
12846 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12851 generate_exception(ctx
, EXCP_BREAK
);
12855 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12856 gen_helper_do_semihosting(cpu_env
);
12858 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12859 generate_exception(ctx
, EXCP_RI
);
12861 generate_exception(ctx
, EXCP_DBp
);
12868 generate_exception(ctx
, EXCP_RI
);
12873 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12875 TCGv t0
= tcg_temp_new();
12876 TCGv t1
= tcg_temp_new();
12878 gen_load_gpr(t0
, base
);
12881 gen_load_gpr(t1
, index
);
12882 tcg_gen_shli_tl(t1
, t1
, 2);
12883 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12886 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12887 gen_store_gpr(t1
, rd
);
12893 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12894 int base
, int16_t offset
)
12898 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12899 generate_exception_end(ctx
, EXCP_RI
);
12903 t0
= tcg_temp_new();
12904 t1
= tcg_temp_new();
12906 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12911 generate_exception_end(ctx
, EXCP_RI
);
12914 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12915 gen_store_gpr(t1
, rd
);
12916 tcg_gen_movi_tl(t1
, 4);
12917 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12918 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12919 gen_store_gpr(t1
, rd
+1);
12922 gen_load_gpr(t1
, rd
);
12923 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12924 tcg_gen_movi_tl(t1
, 4);
12925 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12926 gen_load_gpr(t1
, rd
+1);
12927 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12929 #ifdef TARGET_MIPS64
12932 generate_exception_end(ctx
, EXCP_RI
);
12935 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12936 gen_store_gpr(t1
, rd
);
12937 tcg_gen_movi_tl(t1
, 8);
12938 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12939 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12940 gen_store_gpr(t1
, rd
+1);
12943 gen_load_gpr(t1
, rd
);
12944 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12945 tcg_gen_movi_tl(t1
, 8);
12946 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12947 gen_load_gpr(t1
, rd
+1);
12948 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12956 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12958 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12959 int minor
= (ctx
->opcode
>> 12) & 0xf;
12960 uint32_t mips32_op
;
12962 switch (extension
) {
12964 mips32_op
= OPC_TEQ
;
12967 mips32_op
= OPC_TGE
;
12970 mips32_op
= OPC_TGEU
;
12973 mips32_op
= OPC_TLT
;
12976 mips32_op
= OPC_TLTU
;
12979 mips32_op
= OPC_TNE
;
12981 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12983 #ifndef CONFIG_USER_ONLY
12986 check_cp0_enabled(ctx
);
12988 /* Treat as NOP. */
12991 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12995 check_cp0_enabled(ctx
);
12997 TCGv t0
= tcg_temp_new();
12999 gen_load_gpr(t0
, rt
);
13000 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13006 switch (minor
& 3) {
13008 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13011 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13014 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13017 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13020 goto pool32axf_invalid
;
13024 switch (minor
& 3) {
13026 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13029 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13032 goto pool32axf_invalid
;
13038 check_insn(ctx
, ISA_MIPS32R6
);
13039 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13042 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13045 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13048 mips32_op
= OPC_CLO
;
13051 mips32_op
= OPC_CLZ
;
13053 check_insn(ctx
, ISA_MIPS32
);
13054 gen_cl(ctx
, mips32_op
, rt
, rs
);
13057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13058 gen_rdhwr(ctx
, rt
, rs
, 0);
13061 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13065 mips32_op
= OPC_MULT
;
13068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13069 mips32_op
= OPC_MULTU
;
13072 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13073 mips32_op
= OPC_DIV
;
13076 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13077 mips32_op
= OPC_DIVU
;
13080 check_insn(ctx
, ISA_MIPS32
);
13081 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13085 mips32_op
= OPC_MADD
;
13088 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13089 mips32_op
= OPC_MADDU
;
13092 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13093 mips32_op
= OPC_MSUB
;
13096 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13097 mips32_op
= OPC_MSUBU
;
13099 check_insn(ctx
, ISA_MIPS32
);
13100 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13103 goto pool32axf_invalid
;
13114 generate_exception_err(ctx
, EXCP_CpU
, 2);
13117 goto pool32axf_invalid
;
13122 case JALR
: /* JALRC */
13123 case JALR_HB
: /* JALRC_HB */
13124 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13125 /* JALRC, JALRC_HB */
13126 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13128 /* JALR, JALR_HB */
13129 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13130 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13135 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13136 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13137 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13140 goto pool32axf_invalid
;
13146 check_cp0_enabled(ctx
);
13147 check_insn(ctx
, ISA_MIPS32R2
);
13148 gen_load_srsgpr(rs
, rt
);
13151 check_cp0_enabled(ctx
);
13152 check_insn(ctx
, ISA_MIPS32R2
);
13153 gen_store_srsgpr(rs
, rt
);
13156 goto pool32axf_invalid
;
13159 #ifndef CONFIG_USER_ONLY
13163 mips32_op
= OPC_TLBP
;
13166 mips32_op
= OPC_TLBR
;
13169 mips32_op
= OPC_TLBWI
;
13172 mips32_op
= OPC_TLBWR
;
13175 mips32_op
= OPC_TLBINV
;
13178 mips32_op
= OPC_TLBINVF
;
13181 mips32_op
= OPC_WAIT
;
13184 mips32_op
= OPC_DERET
;
13187 mips32_op
= OPC_ERET
;
13189 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13192 goto pool32axf_invalid
;
13198 check_cp0_enabled(ctx
);
13200 TCGv t0
= tcg_temp_new();
13202 save_cpu_state(ctx
, 1);
13203 gen_helper_di(t0
, cpu_env
);
13204 gen_store_gpr(t0
, rs
);
13205 /* Stop translation as we may have switched the execution mode */
13206 ctx
->bstate
= BS_STOP
;
13211 check_cp0_enabled(ctx
);
13213 TCGv t0
= tcg_temp_new();
13215 save_cpu_state(ctx
, 1);
13216 gen_helper_ei(t0
, cpu_env
);
13217 gen_store_gpr(t0
, rs
);
13218 /* Stop translation as we may have switched the execution mode */
13219 ctx
->bstate
= BS_STOP
;
13224 goto pool32axf_invalid
;
13234 generate_exception_end(ctx
, EXCP_SYSCALL
);
13237 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13238 gen_helper_do_semihosting(cpu_env
);
13240 check_insn(ctx
, ISA_MIPS32
);
13241 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13242 generate_exception_end(ctx
, EXCP_RI
);
13244 generate_exception_end(ctx
, EXCP_DBp
);
13249 goto pool32axf_invalid
;
13253 switch (minor
& 3) {
13255 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13258 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13261 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13264 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13267 goto pool32axf_invalid
;
13271 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13274 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13277 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13280 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13283 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13286 goto pool32axf_invalid
;
13291 MIPS_INVAL("pool32axf");
13292 generate_exception_end(ctx
, EXCP_RI
);
13297 /* Values for microMIPS fmt field. Variable-width, depending on which
13298 formats the instruction supports. */
13317 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13319 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13320 uint32_t mips32_op
;
13322 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13323 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13324 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13326 switch (extension
) {
13327 case FLOAT_1BIT_FMT(CFC1
, 0):
13328 mips32_op
= OPC_CFC1
;
13330 case FLOAT_1BIT_FMT(CTC1
, 0):
13331 mips32_op
= OPC_CTC1
;
13333 case FLOAT_1BIT_FMT(MFC1
, 0):
13334 mips32_op
= OPC_MFC1
;
13336 case FLOAT_1BIT_FMT(MTC1
, 0):
13337 mips32_op
= OPC_MTC1
;
13339 case FLOAT_1BIT_FMT(MFHC1
, 0):
13340 mips32_op
= OPC_MFHC1
;
13342 case FLOAT_1BIT_FMT(MTHC1
, 0):
13343 mips32_op
= OPC_MTHC1
;
13345 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13348 /* Reciprocal square root */
13349 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13350 mips32_op
= OPC_RSQRT_S
;
13352 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13353 mips32_op
= OPC_RSQRT_D
;
13357 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13358 mips32_op
= OPC_SQRT_S
;
13360 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13361 mips32_op
= OPC_SQRT_D
;
13365 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13366 mips32_op
= OPC_RECIP_S
;
13368 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13369 mips32_op
= OPC_RECIP_D
;
13373 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13374 mips32_op
= OPC_FLOOR_L_S
;
13376 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13377 mips32_op
= OPC_FLOOR_L_D
;
13379 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13380 mips32_op
= OPC_FLOOR_W_S
;
13382 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13383 mips32_op
= OPC_FLOOR_W_D
;
13387 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13388 mips32_op
= OPC_CEIL_L_S
;
13390 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13391 mips32_op
= OPC_CEIL_L_D
;
13393 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13394 mips32_op
= OPC_CEIL_W_S
;
13396 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13397 mips32_op
= OPC_CEIL_W_D
;
13401 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13402 mips32_op
= OPC_TRUNC_L_S
;
13404 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13405 mips32_op
= OPC_TRUNC_L_D
;
13407 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13408 mips32_op
= OPC_TRUNC_W_S
;
13410 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13411 mips32_op
= OPC_TRUNC_W_D
;
13415 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13416 mips32_op
= OPC_ROUND_L_S
;
13418 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13419 mips32_op
= OPC_ROUND_L_D
;
13421 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13422 mips32_op
= OPC_ROUND_W_S
;
13424 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13425 mips32_op
= OPC_ROUND_W_D
;
13428 /* Integer to floating-point conversion */
13429 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13430 mips32_op
= OPC_CVT_L_S
;
13432 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13433 mips32_op
= OPC_CVT_L_D
;
13435 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13436 mips32_op
= OPC_CVT_W_S
;
13438 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13439 mips32_op
= OPC_CVT_W_D
;
13442 /* Paired-foo conversions */
13443 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13444 mips32_op
= OPC_CVT_S_PL
;
13446 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13447 mips32_op
= OPC_CVT_S_PU
;
13449 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13450 mips32_op
= OPC_CVT_PW_PS
;
13452 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13453 mips32_op
= OPC_CVT_PS_PW
;
13456 /* Floating-point moves */
13457 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13458 mips32_op
= OPC_MOV_S
;
13460 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13461 mips32_op
= OPC_MOV_D
;
13463 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13464 mips32_op
= OPC_MOV_PS
;
13467 /* Absolute value */
13468 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13469 mips32_op
= OPC_ABS_S
;
13471 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13472 mips32_op
= OPC_ABS_D
;
13474 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13475 mips32_op
= OPC_ABS_PS
;
13479 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13480 mips32_op
= OPC_NEG_S
;
13482 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13483 mips32_op
= OPC_NEG_D
;
13485 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13486 mips32_op
= OPC_NEG_PS
;
13489 /* Reciprocal square root step */
13490 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13491 mips32_op
= OPC_RSQRT1_S
;
13493 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13494 mips32_op
= OPC_RSQRT1_D
;
13496 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13497 mips32_op
= OPC_RSQRT1_PS
;
13500 /* Reciprocal step */
13501 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13502 mips32_op
= OPC_RECIP1_S
;
13504 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13505 mips32_op
= OPC_RECIP1_S
;
13507 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13508 mips32_op
= OPC_RECIP1_PS
;
13511 /* Conversions from double */
13512 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13513 mips32_op
= OPC_CVT_D_S
;
13515 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13516 mips32_op
= OPC_CVT_D_W
;
13518 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13519 mips32_op
= OPC_CVT_D_L
;
13522 /* Conversions from single */
13523 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13524 mips32_op
= OPC_CVT_S_D
;
13526 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13527 mips32_op
= OPC_CVT_S_W
;
13529 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13530 mips32_op
= OPC_CVT_S_L
;
13532 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13535 /* Conditional moves on floating-point codes */
13536 case COND_FLOAT_MOV(MOVT
, 0):
13537 case COND_FLOAT_MOV(MOVT
, 1):
13538 case COND_FLOAT_MOV(MOVT
, 2):
13539 case COND_FLOAT_MOV(MOVT
, 3):
13540 case COND_FLOAT_MOV(MOVT
, 4):
13541 case COND_FLOAT_MOV(MOVT
, 5):
13542 case COND_FLOAT_MOV(MOVT
, 6):
13543 case COND_FLOAT_MOV(MOVT
, 7):
13544 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13545 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13547 case COND_FLOAT_MOV(MOVF
, 0):
13548 case COND_FLOAT_MOV(MOVF
, 1):
13549 case COND_FLOAT_MOV(MOVF
, 2):
13550 case COND_FLOAT_MOV(MOVF
, 3):
13551 case COND_FLOAT_MOV(MOVF
, 4):
13552 case COND_FLOAT_MOV(MOVF
, 5):
13553 case COND_FLOAT_MOV(MOVF
, 6):
13554 case COND_FLOAT_MOV(MOVF
, 7):
13555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13556 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13559 MIPS_INVAL("pool32fxf");
13560 generate_exception_end(ctx
, EXCP_RI
);
13565 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13569 int rt
, rs
, rd
, rr
;
13571 uint32_t op
, minor
, mips32_op
;
13572 uint32_t cond
, fmt
, cc
;
13574 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13575 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13577 rt
= (ctx
->opcode
>> 21) & 0x1f;
13578 rs
= (ctx
->opcode
>> 16) & 0x1f;
13579 rd
= (ctx
->opcode
>> 11) & 0x1f;
13580 rr
= (ctx
->opcode
>> 6) & 0x1f;
13581 imm
= (int16_t) ctx
->opcode
;
13583 op
= (ctx
->opcode
>> 26) & 0x3f;
13586 minor
= ctx
->opcode
& 0x3f;
13589 minor
= (ctx
->opcode
>> 6) & 0xf;
13592 mips32_op
= OPC_SLL
;
13595 mips32_op
= OPC_SRA
;
13598 mips32_op
= OPC_SRL
;
13601 mips32_op
= OPC_ROTR
;
13603 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13606 check_insn(ctx
, ISA_MIPS32R6
);
13607 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13610 check_insn(ctx
, ISA_MIPS32R6
);
13611 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13614 check_insn(ctx
, ISA_MIPS32R6
);
13615 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13618 goto pool32a_invalid
;
13622 minor
= (ctx
->opcode
>> 6) & 0xf;
13626 mips32_op
= OPC_ADD
;
13629 mips32_op
= OPC_ADDU
;
13632 mips32_op
= OPC_SUB
;
13635 mips32_op
= OPC_SUBU
;
13638 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13639 mips32_op
= OPC_MUL
;
13641 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13645 mips32_op
= OPC_SLLV
;
13648 mips32_op
= OPC_SRLV
;
13651 mips32_op
= OPC_SRAV
;
13654 mips32_op
= OPC_ROTRV
;
13656 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13658 /* Logical operations */
13660 mips32_op
= OPC_AND
;
13663 mips32_op
= OPC_OR
;
13666 mips32_op
= OPC_NOR
;
13669 mips32_op
= OPC_XOR
;
13671 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13673 /* Set less than */
13675 mips32_op
= OPC_SLT
;
13678 mips32_op
= OPC_SLTU
;
13680 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13683 goto pool32a_invalid
;
13687 minor
= (ctx
->opcode
>> 6) & 0xf;
13689 /* Conditional moves */
13690 case MOVN
: /* MUL */
13691 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13693 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13696 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13699 case MOVZ
: /* MUH */
13700 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13702 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13705 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13709 check_insn(ctx
, ISA_MIPS32R6
);
13710 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13713 check_insn(ctx
, ISA_MIPS32R6
);
13714 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13716 case LWXS
: /* DIV */
13717 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13719 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13722 gen_ldxs(ctx
, rs
, rt
, rd
);
13726 check_insn(ctx
, ISA_MIPS32R6
);
13727 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13730 check_insn(ctx
, ISA_MIPS32R6
);
13731 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13734 check_insn(ctx
, ISA_MIPS32R6
);
13735 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13738 goto pool32a_invalid
;
13742 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13745 check_insn(ctx
, ISA_MIPS32R6
);
13746 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13747 extract32(ctx
->opcode
, 9, 2));
13750 check_insn(ctx
, ISA_MIPS32R6
);
13751 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13752 extract32(ctx
->opcode
, 9, 2));
13755 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13758 gen_pool32axf(env
, ctx
, rt
, rs
);
13761 generate_exception_end(ctx
, EXCP_BREAK
);
13764 check_insn(ctx
, ISA_MIPS32R6
);
13765 generate_exception_end(ctx
, EXCP_RI
);
13769 MIPS_INVAL("pool32a");
13770 generate_exception_end(ctx
, EXCP_RI
);
13775 minor
= (ctx
->opcode
>> 12) & 0xf;
13778 check_cp0_enabled(ctx
);
13779 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13780 gen_cache_operation(ctx
, rt
, rs
, imm
);
13785 /* COP2: Not implemented. */
13786 generate_exception_err(ctx
, EXCP_CpU
, 2);
13788 #ifdef TARGET_MIPS64
13791 check_insn(ctx
, ISA_MIPS3
);
13792 check_mips_64(ctx
);
13797 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13799 #ifdef TARGET_MIPS64
13802 check_insn(ctx
, ISA_MIPS3
);
13803 check_mips_64(ctx
);
13808 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13811 MIPS_INVAL("pool32b");
13812 generate_exception_end(ctx
, EXCP_RI
);
13817 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13818 minor
= ctx
->opcode
& 0x3f;
13819 check_cp1_enabled(ctx
);
13822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13823 mips32_op
= OPC_ALNV_PS
;
13826 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13827 mips32_op
= OPC_MADD_S
;
13830 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13831 mips32_op
= OPC_MADD_D
;
13834 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13835 mips32_op
= OPC_MADD_PS
;
13838 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13839 mips32_op
= OPC_MSUB_S
;
13842 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13843 mips32_op
= OPC_MSUB_D
;
13846 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13847 mips32_op
= OPC_MSUB_PS
;
13850 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13851 mips32_op
= OPC_NMADD_S
;
13854 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13855 mips32_op
= OPC_NMADD_D
;
13858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13859 mips32_op
= OPC_NMADD_PS
;
13862 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13863 mips32_op
= OPC_NMSUB_S
;
13866 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13867 mips32_op
= OPC_NMSUB_D
;
13870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13871 mips32_op
= OPC_NMSUB_PS
;
13873 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13875 case CABS_COND_FMT
:
13876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13877 cond
= (ctx
->opcode
>> 6) & 0xf;
13878 cc
= (ctx
->opcode
>> 13) & 0x7;
13879 fmt
= (ctx
->opcode
>> 10) & 0x3;
13882 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13885 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13888 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13891 goto pool32f_invalid
;
13895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13896 cond
= (ctx
->opcode
>> 6) & 0xf;
13897 cc
= (ctx
->opcode
>> 13) & 0x7;
13898 fmt
= (ctx
->opcode
>> 10) & 0x3;
13901 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13904 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13907 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13910 goto pool32f_invalid
;
13914 check_insn(ctx
, ISA_MIPS32R6
);
13915 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13918 check_insn(ctx
, ISA_MIPS32R6
);
13919 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13922 gen_pool32fxf(ctx
, rt
, rs
);
13926 switch ((ctx
->opcode
>> 6) & 0x7) {
13928 mips32_op
= OPC_PLL_PS
;
13931 mips32_op
= OPC_PLU_PS
;
13934 mips32_op
= OPC_PUL_PS
;
13937 mips32_op
= OPC_PUU_PS
;
13940 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13941 mips32_op
= OPC_CVT_PS_S
;
13943 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13946 goto pool32f_invalid
;
13950 check_insn(ctx
, ISA_MIPS32R6
);
13951 switch ((ctx
->opcode
>> 9) & 0x3) {
13953 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13956 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13959 goto pool32f_invalid
;
13964 switch ((ctx
->opcode
>> 6) & 0x7) {
13966 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13967 mips32_op
= OPC_LWXC1
;
13970 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13971 mips32_op
= OPC_SWXC1
;
13974 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13975 mips32_op
= OPC_LDXC1
;
13978 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13979 mips32_op
= OPC_SDXC1
;
13982 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13983 mips32_op
= OPC_LUXC1
;
13986 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13987 mips32_op
= OPC_SUXC1
;
13989 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13992 goto pool32f_invalid
;
13996 check_insn(ctx
, ISA_MIPS32R6
);
13997 switch ((ctx
->opcode
>> 9) & 0x3) {
13999 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14002 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14005 goto pool32f_invalid
;
14010 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14011 fmt
= (ctx
->opcode
>> 9) & 0x3;
14012 switch ((ctx
->opcode
>> 6) & 0x7) {
14016 mips32_op
= OPC_RSQRT2_S
;
14019 mips32_op
= OPC_RSQRT2_D
;
14022 mips32_op
= OPC_RSQRT2_PS
;
14025 goto pool32f_invalid
;
14031 mips32_op
= OPC_RECIP2_S
;
14034 mips32_op
= OPC_RECIP2_D
;
14037 mips32_op
= OPC_RECIP2_PS
;
14040 goto pool32f_invalid
;
14044 mips32_op
= OPC_ADDR_PS
;
14047 mips32_op
= OPC_MULR_PS
;
14049 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14052 goto pool32f_invalid
;
14056 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14057 cc
= (ctx
->opcode
>> 13) & 0x7;
14058 fmt
= (ctx
->opcode
>> 9) & 0x3;
14059 switch ((ctx
->opcode
>> 6) & 0x7) {
14060 case MOVF_FMT
: /* RINT_FMT */
14061 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14065 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14068 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14071 goto pool32f_invalid
;
14077 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14080 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14084 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14087 goto pool32f_invalid
;
14091 case MOVT_FMT
: /* CLASS_FMT */
14092 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14096 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14099 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14102 goto pool32f_invalid
;
14108 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14111 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14115 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14118 goto pool32f_invalid
;
14123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14126 goto pool32f_invalid
;
14129 #define FINSN_3ARG_SDPS(prfx) \
14130 switch ((ctx->opcode >> 8) & 0x3) { \
14132 mips32_op = OPC_##prfx##_S; \
14135 mips32_op = OPC_##prfx##_D; \
14137 case FMT_SDPS_PS: \
14139 mips32_op = OPC_##prfx##_PS; \
14142 goto pool32f_invalid; \
14145 check_insn(ctx
, ISA_MIPS32R6
);
14146 switch ((ctx
->opcode
>> 9) & 0x3) {
14148 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14151 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14154 goto pool32f_invalid
;
14158 check_insn(ctx
, ISA_MIPS32R6
);
14159 switch ((ctx
->opcode
>> 9) & 0x3) {
14161 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14164 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14167 goto pool32f_invalid
;
14171 /* regular FP ops */
14172 switch ((ctx
->opcode
>> 6) & 0x3) {
14174 FINSN_3ARG_SDPS(ADD
);
14177 FINSN_3ARG_SDPS(SUB
);
14180 FINSN_3ARG_SDPS(MUL
);
14183 fmt
= (ctx
->opcode
>> 8) & 0x3;
14185 mips32_op
= OPC_DIV_D
;
14186 } else if (fmt
== 0) {
14187 mips32_op
= OPC_DIV_S
;
14189 goto pool32f_invalid
;
14193 goto pool32f_invalid
;
14198 switch ((ctx
->opcode
>> 6) & 0x7) {
14199 case MOVN_FMT
: /* SELNEZ_FMT */
14200 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14202 switch ((ctx
->opcode
>> 9) & 0x3) {
14204 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14207 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14210 goto pool32f_invalid
;
14214 FINSN_3ARG_SDPS(MOVN
);
14218 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14219 FINSN_3ARG_SDPS(MOVN
);
14221 case MOVZ_FMT
: /* SELEQZ_FMT */
14222 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14224 switch ((ctx
->opcode
>> 9) & 0x3) {
14226 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14229 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14232 goto pool32f_invalid
;
14236 FINSN_3ARG_SDPS(MOVZ
);
14240 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14241 FINSN_3ARG_SDPS(MOVZ
);
14244 check_insn(ctx
, ISA_MIPS32R6
);
14245 switch ((ctx
->opcode
>> 9) & 0x3) {
14247 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14250 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14253 goto pool32f_invalid
;
14257 check_insn(ctx
, ISA_MIPS32R6
);
14258 switch ((ctx
->opcode
>> 9) & 0x3) {
14260 mips32_op
= OPC_MADDF_S
;
14263 mips32_op
= OPC_MADDF_D
;
14266 goto pool32f_invalid
;
14270 check_insn(ctx
, ISA_MIPS32R6
);
14271 switch ((ctx
->opcode
>> 9) & 0x3) {
14273 mips32_op
= OPC_MSUBF_S
;
14276 mips32_op
= OPC_MSUBF_D
;
14279 goto pool32f_invalid
;
14283 goto pool32f_invalid
;
14287 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14291 MIPS_INVAL("pool32f");
14292 generate_exception_end(ctx
, EXCP_RI
);
14296 generate_exception_err(ctx
, EXCP_CpU
, 1);
14300 minor
= (ctx
->opcode
>> 21) & 0x1f;
14303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14304 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14307 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14308 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14309 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14313 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14314 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14317 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14318 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14321 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14322 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14323 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14326 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14327 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14328 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14331 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14332 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14335 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14336 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14340 case TLTI
: /* BC1EQZC */
14341 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14343 check_cp1_enabled(ctx
);
14344 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14347 mips32_op
= OPC_TLTI
;
14351 case TGEI
: /* BC1NEZC */
14352 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14354 check_cp1_enabled(ctx
);
14355 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14358 mips32_op
= OPC_TGEI
;
14363 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14364 mips32_op
= OPC_TLTIU
;
14367 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14368 mips32_op
= OPC_TGEIU
;
14370 case TNEI
: /* SYNCI */
14371 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14373 /* Break the TB to be able to sync copied instructions
14375 ctx
->bstate
= BS_STOP
;
14378 mips32_op
= OPC_TNEI
;
14383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14384 mips32_op
= OPC_TEQI
;
14386 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14391 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14392 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14393 4, rs
, 0, imm
<< 1, 0);
14394 /* Compact branches don't have a delay slot, so just let
14395 the normal delay slot handling take us to the branch
14399 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14400 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14403 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14404 /* Break the TB to be able to sync copied instructions
14406 ctx
->bstate
= BS_STOP
;
14410 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14411 /* COP2: Not implemented. */
14412 generate_exception_err(ctx
, EXCP_CpU
, 2);
14415 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14416 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14420 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14423 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14424 mips32_op
= OPC_BC1FANY4
;
14427 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14428 mips32_op
= OPC_BC1TANY4
;
14431 check_insn(ctx
, ASE_MIPS3D
);
14434 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14435 check_cp1_enabled(ctx
);
14436 gen_compute_branch1(ctx
, mips32_op
,
14437 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14439 generate_exception_err(ctx
, EXCP_CpU
, 1);
14444 /* MIPS DSP: not implemented */
14447 MIPS_INVAL("pool32i");
14448 generate_exception_end(ctx
, EXCP_RI
);
14453 minor
= (ctx
->opcode
>> 12) & 0xf;
14454 offset
= sextract32(ctx
->opcode
, 0,
14455 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14458 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14459 mips32_op
= OPC_LWL
;
14462 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14463 mips32_op
= OPC_SWL
;
14466 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14467 mips32_op
= OPC_LWR
;
14470 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14471 mips32_op
= OPC_SWR
;
14473 #if defined(TARGET_MIPS64)
14475 check_insn(ctx
, ISA_MIPS3
);
14476 check_mips_64(ctx
);
14477 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14478 mips32_op
= OPC_LDL
;
14481 check_insn(ctx
, ISA_MIPS3
);
14482 check_mips_64(ctx
);
14483 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14484 mips32_op
= OPC_SDL
;
14487 check_insn(ctx
, ISA_MIPS3
);
14488 check_mips_64(ctx
);
14489 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14490 mips32_op
= OPC_LDR
;
14493 check_insn(ctx
, ISA_MIPS3
);
14494 check_mips_64(ctx
);
14495 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14496 mips32_op
= OPC_SDR
;
14499 check_insn(ctx
, ISA_MIPS3
);
14500 check_mips_64(ctx
);
14501 mips32_op
= OPC_LWU
;
14504 check_insn(ctx
, ISA_MIPS3
);
14505 check_mips_64(ctx
);
14506 mips32_op
= OPC_LLD
;
14510 mips32_op
= OPC_LL
;
14513 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14516 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14519 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14521 #if defined(TARGET_MIPS64)
14523 check_insn(ctx
, ISA_MIPS3
);
14524 check_mips_64(ctx
);
14525 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14529 /* Treat as no-op */
14530 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14531 /* hint codes 24-31 are reserved and signal RI */
14532 generate_exception(ctx
, EXCP_RI
);
14536 MIPS_INVAL("pool32c");
14537 generate_exception_end(ctx
, EXCP_RI
);
14541 case ADDI32
: /* AUI, LUI */
14542 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14544 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14547 mips32_op
= OPC_ADDI
;
14552 mips32_op
= OPC_ADDIU
;
14554 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14557 /* Logical operations */
14559 mips32_op
= OPC_ORI
;
14562 mips32_op
= OPC_XORI
;
14565 mips32_op
= OPC_ANDI
;
14567 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14570 /* Set less than immediate */
14572 mips32_op
= OPC_SLTI
;
14575 mips32_op
= OPC_SLTIU
;
14577 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14580 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14581 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14582 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14583 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14585 case JALS32
: /* BOVC, BEQC, BEQZALC */
14586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14589 mips32_op
= OPC_BOVC
;
14590 } else if (rs
< rt
&& rs
== 0) {
14592 mips32_op
= OPC_BEQZALC
;
14595 mips32_op
= OPC_BEQC
;
14597 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14600 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14601 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14602 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14605 case BEQ32
: /* BC */
14606 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14608 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14609 sextract32(ctx
->opcode
<< 1, 0, 27));
14612 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14615 case BNE32
: /* BALC */
14616 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14618 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14619 sextract32(ctx
->opcode
<< 1, 0, 27));
14622 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14625 case J32
: /* BGTZC, BLTZC, BLTC */
14626 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14627 if (rs
== 0 && rt
!= 0) {
14629 mips32_op
= OPC_BGTZC
;
14630 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14632 mips32_op
= OPC_BLTZC
;
14635 mips32_op
= OPC_BLTC
;
14637 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14640 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14641 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14644 case JAL32
: /* BLEZC, BGEZC, BGEC */
14645 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14646 if (rs
== 0 && rt
!= 0) {
14648 mips32_op
= OPC_BLEZC
;
14649 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14651 mips32_op
= OPC_BGEZC
;
14654 mips32_op
= OPC_BGEC
;
14656 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14659 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14660 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14661 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14664 /* Floating point (COP1) */
14666 mips32_op
= OPC_LWC1
;
14669 mips32_op
= OPC_LDC1
;
14672 mips32_op
= OPC_SWC1
;
14675 mips32_op
= OPC_SDC1
;
14677 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14679 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14680 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14681 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14682 switch ((ctx
->opcode
>> 16) & 0x1f) {
14683 case ADDIUPC_00
... ADDIUPC_07
:
14684 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14687 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14690 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14692 case LWPC_08
... LWPC_0F
:
14693 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14696 generate_exception(ctx
, EXCP_RI
);
14701 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14702 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14704 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14707 case BNVC
: /* BNEC, BNEZALC */
14708 check_insn(ctx
, ISA_MIPS32R6
);
14711 mips32_op
= OPC_BNVC
;
14712 } else if (rs
< rt
&& rs
== 0) {
14714 mips32_op
= OPC_BNEZALC
;
14717 mips32_op
= OPC_BNEC
;
14719 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14721 case R6_BNEZC
: /* JIALC */
14722 check_insn(ctx
, ISA_MIPS32R6
);
14725 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14726 sextract32(ctx
->opcode
<< 1, 0, 22));
14729 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14732 case R6_BEQZC
: /* JIC */
14733 check_insn(ctx
, ISA_MIPS32R6
);
14736 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14737 sextract32(ctx
->opcode
<< 1, 0, 22));
14740 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14743 case BLEZALC
: /* BGEZALC, BGEUC */
14744 check_insn(ctx
, ISA_MIPS32R6
);
14745 if (rs
== 0 && rt
!= 0) {
14747 mips32_op
= OPC_BLEZALC
;
14748 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14750 mips32_op
= OPC_BGEZALC
;
14753 mips32_op
= OPC_BGEUC
;
14755 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14757 case BGTZALC
: /* BLTZALC, BLTUC */
14758 check_insn(ctx
, ISA_MIPS32R6
);
14759 if (rs
== 0 && rt
!= 0) {
14761 mips32_op
= OPC_BGTZALC
;
14762 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14764 mips32_op
= OPC_BLTZALC
;
14767 mips32_op
= OPC_BLTUC
;
14769 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14771 /* Loads and stores */
14773 mips32_op
= OPC_LB
;
14776 mips32_op
= OPC_LBU
;
14779 mips32_op
= OPC_LH
;
14782 mips32_op
= OPC_LHU
;
14785 mips32_op
= OPC_LW
;
14787 #ifdef TARGET_MIPS64
14789 check_insn(ctx
, ISA_MIPS3
);
14790 check_mips_64(ctx
);
14791 mips32_op
= OPC_LD
;
14794 check_insn(ctx
, ISA_MIPS3
);
14795 check_mips_64(ctx
);
14796 mips32_op
= OPC_SD
;
14800 mips32_op
= OPC_SB
;
14803 mips32_op
= OPC_SH
;
14806 mips32_op
= OPC_SW
;
14809 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14812 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14815 generate_exception_end(ctx
, EXCP_RI
);
14820 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14824 /* make sure instructions are on a halfword boundary */
14825 if (ctx
->pc
& 0x1) {
14826 env
->CP0_BadVAddr
= ctx
->pc
;
14827 generate_exception_end(ctx
, EXCP_AdEL
);
14831 op
= (ctx
->opcode
>> 10) & 0x3f;
14832 /* Enforce properly-sized instructions in a delay slot */
14833 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14834 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14836 /* POOL32A, POOL32B, POOL32I, POOL32C */
14838 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14840 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14842 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14844 /* LB32, LH32, LWC132, LDC132, LW32 */
14845 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14846 generate_exception_end(ctx
, EXCP_RI
);
14851 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14853 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14855 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14856 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14857 generate_exception_end(ctx
, EXCP_RI
);
14867 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14868 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14869 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14872 switch (ctx
->opcode
& 0x1) {
14880 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14881 /* In the Release 6 the register number location in
14882 * the instruction encoding has changed.
14884 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14886 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14892 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14893 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14894 int amount
= (ctx
->opcode
>> 1) & 0x7;
14896 amount
= amount
== 0 ? 8 : amount
;
14898 switch (ctx
->opcode
& 0x1) {
14907 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14911 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14912 gen_pool16c_r6_insn(ctx
);
14914 gen_pool16c_insn(ctx
);
14919 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14920 int rb
= 28; /* GP */
14921 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14923 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14928 if (ctx
->opcode
& 1) {
14929 generate_exception_end(ctx
, EXCP_RI
);
14932 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14933 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14934 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14935 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14940 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14941 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14942 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14943 offset
= (offset
== 0xf ? -1 : offset
);
14945 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14950 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14951 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14952 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14954 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14959 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14960 int rb
= 29; /* SP */
14961 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14963 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14968 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14969 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14970 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14972 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14977 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14978 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14979 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14981 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14986 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14987 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14988 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14990 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14995 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14996 int rb
= 29; /* SP */
14997 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14999 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15004 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15005 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15006 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15008 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15013 int rd
= uMIPS_RD5(ctx
->opcode
);
15014 int rs
= uMIPS_RS5(ctx
->opcode
);
15016 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15023 switch (ctx
->opcode
& 0x1) {
15033 switch (ctx
->opcode
& 0x1) {
15038 gen_addiur1sp(ctx
);
15042 case B16
: /* BC16 */
15043 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15044 sextract32(ctx
->opcode
, 0, 10) << 1,
15045 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15047 case BNEZ16
: /* BNEZC16 */
15048 case BEQZ16
: /* BEQZC16 */
15049 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15050 mmreg(uMIPS_RD(ctx
->opcode
)),
15051 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15052 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15057 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15058 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15060 imm
= (imm
== 0x7f ? -1 : imm
);
15061 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15067 generate_exception_end(ctx
, EXCP_RI
);
15070 decode_micromips32_opc(env
, ctx
);
15077 /* SmartMIPS extension to MIPS32 */
15079 #if defined(TARGET_MIPS64)
15081 /* MDMX extension to MIPS64 */
15085 /* MIPSDSP functions. */
15086 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15087 int rd
, int base
, int offset
)
15092 t0
= tcg_temp_new();
15095 gen_load_gpr(t0
, offset
);
15096 } else if (offset
== 0) {
15097 gen_load_gpr(t0
, base
);
15099 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15104 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15105 gen_store_gpr(t0
, rd
);
15108 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15109 gen_store_gpr(t0
, rd
);
15112 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15113 gen_store_gpr(t0
, rd
);
15115 #if defined(TARGET_MIPS64)
15117 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15118 gen_store_gpr(t0
, rd
);
15125 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15126 int ret
, int v1
, int v2
)
15132 /* Treat as NOP. */
15136 v1_t
= tcg_temp_new();
15137 v2_t
= tcg_temp_new();
15139 gen_load_gpr(v1_t
, v1
);
15140 gen_load_gpr(v2_t
, v2
);
15143 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15144 case OPC_MULT_G_2E
:
15148 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15150 case OPC_ADDUH_R_QB
:
15151 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15154 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15156 case OPC_ADDQH_R_PH
:
15157 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15160 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15162 case OPC_ADDQH_R_W
:
15163 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15166 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15168 case OPC_SUBUH_R_QB
:
15169 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15172 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15174 case OPC_SUBQH_R_PH
:
15175 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15178 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15180 case OPC_SUBQH_R_W
:
15181 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15185 case OPC_ABSQ_S_PH_DSP
:
15187 case OPC_ABSQ_S_QB
:
15189 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15191 case OPC_ABSQ_S_PH
:
15193 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15197 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15199 case OPC_PRECEQ_W_PHL
:
15201 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15202 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15204 case OPC_PRECEQ_W_PHR
:
15206 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15207 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15208 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15210 case OPC_PRECEQU_PH_QBL
:
15212 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15214 case OPC_PRECEQU_PH_QBR
:
15216 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15218 case OPC_PRECEQU_PH_QBLA
:
15220 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15222 case OPC_PRECEQU_PH_QBRA
:
15224 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15226 case OPC_PRECEU_PH_QBL
:
15228 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15230 case OPC_PRECEU_PH_QBR
:
15232 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15234 case OPC_PRECEU_PH_QBLA
:
15236 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15238 case OPC_PRECEU_PH_QBRA
:
15240 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15244 case OPC_ADDU_QB_DSP
:
15248 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15250 case OPC_ADDQ_S_PH
:
15252 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15256 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15260 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15262 case OPC_ADDU_S_QB
:
15264 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15268 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15270 case OPC_ADDU_S_PH
:
15272 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15276 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15278 case OPC_SUBQ_S_PH
:
15280 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15284 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15288 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15290 case OPC_SUBU_S_QB
:
15292 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15296 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15298 case OPC_SUBU_S_PH
:
15300 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15304 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15308 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15312 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15314 case OPC_RADDU_W_QB
:
15316 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15320 case OPC_CMPU_EQ_QB_DSP
:
15322 case OPC_PRECR_QB_PH
:
15324 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15326 case OPC_PRECRQ_QB_PH
:
15328 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15330 case OPC_PRECR_SRA_PH_W
:
15333 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15334 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15336 tcg_temp_free_i32(sa_t
);
15339 case OPC_PRECR_SRA_R_PH_W
:
15342 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15343 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15345 tcg_temp_free_i32(sa_t
);
15348 case OPC_PRECRQ_PH_W
:
15350 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15352 case OPC_PRECRQ_RS_PH_W
:
15354 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 case OPC_PRECRQU_S_QB_PH
:
15358 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15362 #ifdef TARGET_MIPS64
15363 case OPC_ABSQ_S_QH_DSP
:
15365 case OPC_PRECEQ_L_PWL
:
15367 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15369 case OPC_PRECEQ_L_PWR
:
15371 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15373 case OPC_PRECEQ_PW_QHL
:
15375 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15377 case OPC_PRECEQ_PW_QHR
:
15379 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15381 case OPC_PRECEQ_PW_QHLA
:
15383 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15385 case OPC_PRECEQ_PW_QHRA
:
15387 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15389 case OPC_PRECEQU_QH_OBL
:
15391 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15393 case OPC_PRECEQU_QH_OBR
:
15395 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15397 case OPC_PRECEQU_QH_OBLA
:
15399 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15401 case OPC_PRECEQU_QH_OBRA
:
15403 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15405 case OPC_PRECEU_QH_OBL
:
15407 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15409 case OPC_PRECEU_QH_OBR
:
15411 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15413 case OPC_PRECEU_QH_OBLA
:
15415 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15417 case OPC_PRECEU_QH_OBRA
:
15419 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15421 case OPC_ABSQ_S_OB
:
15423 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15425 case OPC_ABSQ_S_PW
:
15427 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15429 case OPC_ABSQ_S_QH
:
15431 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15435 case OPC_ADDU_OB_DSP
:
15437 case OPC_RADDU_L_OB
:
15439 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15443 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15445 case OPC_SUBQ_S_PW
:
15447 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15451 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15453 case OPC_SUBQ_S_QH
:
15455 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15459 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15461 case OPC_SUBU_S_OB
:
15463 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15467 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15469 case OPC_SUBU_S_QH
:
15471 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15475 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15477 case OPC_SUBUH_R_OB
:
15479 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15483 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15485 case OPC_ADDQ_S_PW
:
15487 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15491 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15493 case OPC_ADDQ_S_QH
:
15495 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15499 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15501 case OPC_ADDU_S_OB
:
15503 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15507 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15509 case OPC_ADDU_S_QH
:
15511 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15515 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15517 case OPC_ADDUH_R_OB
:
15519 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15523 case OPC_CMPU_EQ_OB_DSP
:
15525 case OPC_PRECR_OB_QH
:
15527 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15529 case OPC_PRECR_SRA_QH_PW
:
15532 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15533 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15534 tcg_temp_free_i32(ret_t
);
15537 case OPC_PRECR_SRA_R_QH_PW
:
15540 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15541 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15542 tcg_temp_free_i32(sa_v
);
15545 case OPC_PRECRQ_OB_QH
:
15547 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15549 case OPC_PRECRQ_PW_L
:
15551 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15553 case OPC_PRECRQ_QH_PW
:
15555 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15557 case OPC_PRECRQ_RS_QH_PW
:
15559 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15561 case OPC_PRECRQU_S_OB_QH
:
15563 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15570 tcg_temp_free(v1_t
);
15571 tcg_temp_free(v2_t
);
15574 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15575 int ret
, int v1
, int v2
)
15583 /* Treat as NOP. */
15587 t0
= tcg_temp_new();
15588 v1_t
= tcg_temp_new();
15589 v2_t
= tcg_temp_new();
15591 tcg_gen_movi_tl(t0
, v1
);
15592 gen_load_gpr(v1_t
, v1
);
15593 gen_load_gpr(v2_t
, v2
);
15596 case OPC_SHLL_QB_DSP
:
15598 op2
= MASK_SHLL_QB(ctx
->opcode
);
15602 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15606 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15610 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15614 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15616 case OPC_SHLL_S_PH
:
15618 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15620 case OPC_SHLLV_S_PH
:
15622 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15626 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15628 case OPC_SHLLV_S_W
:
15630 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15634 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15638 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15642 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15646 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15650 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15652 case OPC_SHRA_R_QB
:
15654 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15658 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15660 case OPC_SHRAV_R_QB
:
15662 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15666 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15668 case OPC_SHRA_R_PH
:
15670 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15674 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15676 case OPC_SHRAV_R_PH
:
15678 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15682 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15684 case OPC_SHRAV_R_W
:
15686 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15688 default: /* Invalid */
15689 MIPS_INVAL("MASK SHLL.QB");
15690 generate_exception_end(ctx
, EXCP_RI
);
15695 #ifdef TARGET_MIPS64
15696 case OPC_SHLL_OB_DSP
:
15697 op2
= MASK_SHLL_OB(ctx
->opcode
);
15701 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15705 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15707 case OPC_SHLL_S_PW
:
15709 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15711 case OPC_SHLLV_S_PW
:
15713 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15717 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15721 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15725 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15729 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15731 case OPC_SHLL_S_QH
:
15733 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15735 case OPC_SHLLV_S_QH
:
15737 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15741 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15745 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15747 case OPC_SHRA_R_OB
:
15749 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15751 case OPC_SHRAV_R_OB
:
15753 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15757 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15761 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15763 case OPC_SHRA_R_PW
:
15765 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15767 case OPC_SHRAV_R_PW
:
15769 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15773 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15777 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15779 case OPC_SHRA_R_QH
:
15781 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15783 case OPC_SHRAV_R_QH
:
15785 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15789 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15793 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15797 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15801 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15803 default: /* Invalid */
15804 MIPS_INVAL("MASK SHLL.OB");
15805 generate_exception_end(ctx
, EXCP_RI
);
15813 tcg_temp_free(v1_t
);
15814 tcg_temp_free(v2_t
);
15817 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15818 int ret
, int v1
, int v2
, int check_ret
)
15824 if ((ret
== 0) && (check_ret
== 1)) {
15825 /* Treat as NOP. */
15829 t0
= tcg_temp_new_i32();
15830 v1_t
= tcg_temp_new();
15831 v2_t
= tcg_temp_new();
15833 tcg_gen_movi_i32(t0
, ret
);
15834 gen_load_gpr(v1_t
, v1
);
15835 gen_load_gpr(v2_t
, v2
);
15838 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15839 * the same mask and op1. */
15840 case OPC_MULT_G_2E
:
15844 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15847 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15850 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15852 case OPC_MULQ_RS_W
:
15853 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15857 case OPC_DPA_W_PH_DSP
:
15859 case OPC_DPAU_H_QBL
:
15861 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15863 case OPC_DPAU_H_QBR
:
15865 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15867 case OPC_DPSU_H_QBL
:
15869 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15871 case OPC_DPSU_H_QBR
:
15873 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15877 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15879 case OPC_DPAX_W_PH
:
15881 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15883 case OPC_DPAQ_S_W_PH
:
15885 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15887 case OPC_DPAQX_S_W_PH
:
15889 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15891 case OPC_DPAQX_SA_W_PH
:
15893 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15897 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15899 case OPC_DPSX_W_PH
:
15901 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15903 case OPC_DPSQ_S_W_PH
:
15905 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15907 case OPC_DPSQX_S_W_PH
:
15909 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15911 case OPC_DPSQX_SA_W_PH
:
15913 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15915 case OPC_MULSAQ_S_W_PH
:
15917 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15919 case OPC_DPAQ_SA_L_W
:
15921 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15923 case OPC_DPSQ_SA_L_W
:
15925 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15927 case OPC_MAQ_S_W_PHL
:
15929 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15931 case OPC_MAQ_S_W_PHR
:
15933 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15935 case OPC_MAQ_SA_W_PHL
:
15937 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15939 case OPC_MAQ_SA_W_PHR
:
15941 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15943 case OPC_MULSA_W_PH
:
15945 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15949 #ifdef TARGET_MIPS64
15950 case OPC_DPAQ_W_QH_DSP
:
15952 int ac
= ret
& 0x03;
15953 tcg_gen_movi_i32(t0
, ac
);
15958 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15962 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15966 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15970 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15974 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15976 case OPC_DPAQ_S_W_QH
:
15978 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15980 case OPC_DPAQ_SA_L_PW
:
15982 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15984 case OPC_DPAU_H_OBL
:
15986 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15988 case OPC_DPAU_H_OBR
:
15990 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15994 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15996 case OPC_DPSQ_S_W_QH
:
15998 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16000 case OPC_DPSQ_SA_L_PW
:
16002 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16004 case OPC_DPSU_H_OBL
:
16006 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16008 case OPC_DPSU_H_OBR
:
16010 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16012 case OPC_MAQ_S_L_PWL
:
16014 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16016 case OPC_MAQ_S_L_PWR
:
16018 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16020 case OPC_MAQ_S_W_QHLL
:
16022 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16024 case OPC_MAQ_SA_W_QHLL
:
16026 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16028 case OPC_MAQ_S_W_QHLR
:
16030 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16032 case OPC_MAQ_SA_W_QHLR
:
16034 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16036 case OPC_MAQ_S_W_QHRL
:
16038 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16040 case OPC_MAQ_SA_W_QHRL
:
16042 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16044 case OPC_MAQ_S_W_QHRR
:
16046 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16048 case OPC_MAQ_SA_W_QHRR
:
16050 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16052 case OPC_MULSAQ_S_L_PW
:
16054 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16056 case OPC_MULSAQ_S_W_QH
:
16058 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16064 case OPC_ADDU_QB_DSP
:
16066 case OPC_MULEU_S_PH_QBL
:
16068 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16070 case OPC_MULEU_S_PH_QBR
:
16072 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16074 case OPC_MULQ_RS_PH
:
16076 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16078 case OPC_MULEQ_S_W_PHL
:
16080 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16082 case OPC_MULEQ_S_W_PHR
:
16084 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16086 case OPC_MULQ_S_PH
:
16088 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16092 #ifdef TARGET_MIPS64
16093 case OPC_ADDU_OB_DSP
:
16095 case OPC_MULEQ_S_PW_QHL
:
16097 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16099 case OPC_MULEQ_S_PW_QHR
:
16101 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16103 case OPC_MULEU_S_QH_OBL
:
16105 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16107 case OPC_MULEU_S_QH_OBR
:
16109 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16111 case OPC_MULQ_RS_QH
:
16113 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16120 tcg_temp_free_i32(t0
);
16121 tcg_temp_free(v1_t
);
16122 tcg_temp_free(v2_t
);
16125 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16133 /* Treat as NOP. */
16137 t0
= tcg_temp_new();
16138 val_t
= tcg_temp_new();
16139 gen_load_gpr(val_t
, val
);
16142 case OPC_ABSQ_S_PH_DSP
:
16146 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16151 target_long result
;
16152 imm
= (ctx
->opcode
>> 16) & 0xFF;
16153 result
= (uint32_t)imm
<< 24 |
16154 (uint32_t)imm
<< 16 |
16155 (uint32_t)imm
<< 8 |
16157 result
= (int32_t)result
;
16158 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16163 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16164 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16165 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16166 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16167 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16168 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16173 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16174 imm
= (int16_t)(imm
<< 6) >> 6;
16175 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16176 (target_long
)((int32_t)imm
<< 16 | \
16182 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16183 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16184 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16185 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16189 #ifdef TARGET_MIPS64
16190 case OPC_ABSQ_S_QH_DSP
:
16197 imm
= (ctx
->opcode
>> 16) & 0xFF;
16198 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16199 temp
= (temp
<< 16) | temp
;
16200 temp
= (temp
<< 32) | temp
;
16201 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16209 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16210 imm
= (int16_t)(imm
<< 6) >> 6;
16211 temp
= ((target_long
)imm
<< 32) \
16212 | ((target_long
)imm
& 0xFFFFFFFF);
16213 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16221 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16222 imm
= (int16_t)(imm
<< 6) >> 6;
16224 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16225 ((uint64_t)(uint16_t)imm
<< 32) |
16226 ((uint64_t)(uint16_t)imm
<< 16) |
16227 (uint64_t)(uint16_t)imm
;
16228 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16233 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16234 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16235 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16236 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16237 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16238 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16239 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16243 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16244 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16245 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16249 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16250 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16251 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16252 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16253 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16260 tcg_temp_free(val_t
);
16263 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16264 uint32_t op1
, uint32_t op2
,
16265 int ret
, int v1
, int v2
, int check_ret
)
16271 if ((ret
== 0) && (check_ret
== 1)) {
16272 /* Treat as NOP. */
16276 t1
= tcg_temp_new();
16277 v1_t
= tcg_temp_new();
16278 v2_t
= tcg_temp_new();
16280 gen_load_gpr(v1_t
, v1
);
16281 gen_load_gpr(v2_t
, v2
);
16284 case OPC_CMPU_EQ_QB_DSP
:
16286 case OPC_CMPU_EQ_QB
:
16288 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16290 case OPC_CMPU_LT_QB
:
16292 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16294 case OPC_CMPU_LE_QB
:
16296 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16298 case OPC_CMPGU_EQ_QB
:
16300 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16302 case OPC_CMPGU_LT_QB
:
16304 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16306 case OPC_CMPGU_LE_QB
:
16308 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16310 case OPC_CMPGDU_EQ_QB
:
16312 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16313 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16314 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16315 tcg_gen_shli_tl(t1
, t1
, 24);
16316 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16318 case OPC_CMPGDU_LT_QB
:
16320 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16321 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16322 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16323 tcg_gen_shli_tl(t1
, t1
, 24);
16324 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16326 case OPC_CMPGDU_LE_QB
:
16328 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16329 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16330 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16331 tcg_gen_shli_tl(t1
, t1
, 24);
16332 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16334 case OPC_CMP_EQ_PH
:
16336 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16338 case OPC_CMP_LT_PH
:
16340 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16342 case OPC_CMP_LE_PH
:
16344 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16348 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16352 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16354 case OPC_PACKRL_PH
:
16356 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16360 #ifdef TARGET_MIPS64
16361 case OPC_CMPU_EQ_OB_DSP
:
16363 case OPC_CMP_EQ_PW
:
16365 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16367 case OPC_CMP_LT_PW
:
16369 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16371 case OPC_CMP_LE_PW
:
16373 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16375 case OPC_CMP_EQ_QH
:
16377 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16379 case OPC_CMP_LT_QH
:
16381 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16383 case OPC_CMP_LE_QH
:
16385 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16387 case OPC_CMPGDU_EQ_OB
:
16389 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16391 case OPC_CMPGDU_LT_OB
:
16393 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16395 case OPC_CMPGDU_LE_OB
:
16397 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16399 case OPC_CMPGU_EQ_OB
:
16401 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16403 case OPC_CMPGU_LT_OB
:
16405 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16407 case OPC_CMPGU_LE_OB
:
16409 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16411 case OPC_CMPU_EQ_OB
:
16413 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16415 case OPC_CMPU_LT_OB
:
16417 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16419 case OPC_CMPU_LE_OB
:
16421 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16423 case OPC_PACKRL_PW
:
16425 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16429 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16433 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16437 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16445 tcg_temp_free(v1_t
);
16446 tcg_temp_free(v2_t
);
16449 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16450 uint32_t op1
, int rt
, int rs
, int sa
)
16457 /* Treat as NOP. */
16461 t0
= tcg_temp_new();
16462 gen_load_gpr(t0
, rs
);
16465 case OPC_APPEND_DSP
:
16466 switch (MASK_APPEND(ctx
->opcode
)) {
16469 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16471 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16475 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16476 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16477 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16478 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16480 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16484 if (sa
!= 0 && sa
!= 2) {
16485 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16486 tcg_gen_ext32u_tl(t0
, t0
);
16487 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16488 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16490 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16492 default: /* Invalid */
16493 MIPS_INVAL("MASK APPEND");
16494 generate_exception_end(ctx
, EXCP_RI
);
16498 #ifdef TARGET_MIPS64
16499 case OPC_DAPPEND_DSP
:
16500 switch (MASK_DAPPEND(ctx
->opcode
)) {
16503 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16507 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16508 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16509 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16513 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16514 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16515 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16520 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16521 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16522 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16523 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16526 default: /* Invalid */
16527 MIPS_INVAL("MASK DAPPEND");
16528 generate_exception_end(ctx
, EXCP_RI
);
16537 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16538 int ret
, int v1
, int v2
, int check_ret
)
16547 if ((ret
== 0) && (check_ret
== 1)) {
16548 /* Treat as NOP. */
16552 t0
= tcg_temp_new();
16553 t1
= tcg_temp_new();
16554 v1_t
= tcg_temp_new();
16555 v2_t
= tcg_temp_new();
16557 gen_load_gpr(v1_t
, v1
);
16558 gen_load_gpr(v2_t
, v2
);
16561 case OPC_EXTR_W_DSP
:
16565 tcg_gen_movi_tl(t0
, v2
);
16566 tcg_gen_movi_tl(t1
, v1
);
16567 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16570 tcg_gen_movi_tl(t0
, v2
);
16571 tcg_gen_movi_tl(t1
, v1
);
16572 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16574 case OPC_EXTR_RS_W
:
16575 tcg_gen_movi_tl(t0
, v2
);
16576 tcg_gen_movi_tl(t1
, v1
);
16577 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16580 tcg_gen_movi_tl(t0
, v2
);
16581 tcg_gen_movi_tl(t1
, v1
);
16582 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16584 case OPC_EXTRV_S_H
:
16585 tcg_gen_movi_tl(t0
, v2
);
16586 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16589 tcg_gen_movi_tl(t0
, v2
);
16590 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16592 case OPC_EXTRV_R_W
:
16593 tcg_gen_movi_tl(t0
, v2
);
16594 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16596 case OPC_EXTRV_RS_W
:
16597 tcg_gen_movi_tl(t0
, v2
);
16598 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16601 tcg_gen_movi_tl(t0
, v2
);
16602 tcg_gen_movi_tl(t1
, v1
);
16603 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16606 tcg_gen_movi_tl(t0
, v2
);
16607 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16610 tcg_gen_movi_tl(t0
, v2
);
16611 tcg_gen_movi_tl(t1
, v1
);
16612 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16615 tcg_gen_movi_tl(t0
, v2
);
16616 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16619 imm
= (ctx
->opcode
>> 20) & 0x3F;
16620 tcg_gen_movi_tl(t0
, ret
);
16621 tcg_gen_movi_tl(t1
, imm
);
16622 gen_helper_shilo(t0
, t1
, cpu_env
);
16625 tcg_gen_movi_tl(t0
, ret
);
16626 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16629 tcg_gen_movi_tl(t0
, ret
);
16630 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16633 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16634 tcg_gen_movi_tl(t0
, imm
);
16635 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16638 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16639 tcg_gen_movi_tl(t0
, imm
);
16640 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16644 #ifdef TARGET_MIPS64
16645 case OPC_DEXTR_W_DSP
:
16649 tcg_gen_movi_tl(t0
, ret
);
16650 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16654 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16655 int ac
= (ctx
->opcode
>> 11) & 0x03;
16656 tcg_gen_movi_tl(t0
, shift
);
16657 tcg_gen_movi_tl(t1
, ac
);
16658 gen_helper_dshilo(t0
, t1
, cpu_env
);
16663 int ac
= (ctx
->opcode
>> 11) & 0x03;
16664 tcg_gen_movi_tl(t0
, ac
);
16665 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16669 tcg_gen_movi_tl(t0
, v2
);
16670 tcg_gen_movi_tl(t1
, v1
);
16672 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16675 tcg_gen_movi_tl(t0
, v2
);
16676 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16679 tcg_gen_movi_tl(t0
, v2
);
16680 tcg_gen_movi_tl(t1
, v1
);
16681 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16684 tcg_gen_movi_tl(t0
, v2
);
16685 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16688 tcg_gen_movi_tl(t0
, v2
);
16689 tcg_gen_movi_tl(t1
, v1
);
16690 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16692 case OPC_DEXTR_R_L
:
16693 tcg_gen_movi_tl(t0
, v2
);
16694 tcg_gen_movi_tl(t1
, v1
);
16695 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16697 case OPC_DEXTR_RS_L
:
16698 tcg_gen_movi_tl(t0
, v2
);
16699 tcg_gen_movi_tl(t1
, v1
);
16700 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16703 tcg_gen_movi_tl(t0
, v2
);
16704 tcg_gen_movi_tl(t1
, v1
);
16705 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16707 case OPC_DEXTR_R_W
:
16708 tcg_gen_movi_tl(t0
, v2
);
16709 tcg_gen_movi_tl(t1
, v1
);
16710 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16712 case OPC_DEXTR_RS_W
:
16713 tcg_gen_movi_tl(t0
, v2
);
16714 tcg_gen_movi_tl(t1
, v1
);
16715 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16717 case OPC_DEXTR_S_H
:
16718 tcg_gen_movi_tl(t0
, v2
);
16719 tcg_gen_movi_tl(t1
, v1
);
16720 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16722 case OPC_DEXTRV_S_H
:
16723 tcg_gen_movi_tl(t0
, v2
);
16724 tcg_gen_movi_tl(t1
, v1
);
16725 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16728 tcg_gen_movi_tl(t0
, v2
);
16729 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16731 case OPC_DEXTRV_R_L
:
16732 tcg_gen_movi_tl(t0
, v2
);
16733 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16735 case OPC_DEXTRV_RS_L
:
16736 tcg_gen_movi_tl(t0
, v2
);
16737 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16740 tcg_gen_movi_tl(t0
, v2
);
16741 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16743 case OPC_DEXTRV_R_W
:
16744 tcg_gen_movi_tl(t0
, v2
);
16745 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16747 case OPC_DEXTRV_RS_W
:
16748 tcg_gen_movi_tl(t0
, v2
);
16749 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16758 tcg_temp_free(v1_t
);
16759 tcg_temp_free(v2_t
);
16762 /* End MIPSDSP functions. */
16764 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16766 int rs
, rt
, rd
, sa
;
16769 rs
= (ctx
->opcode
>> 21) & 0x1f;
16770 rt
= (ctx
->opcode
>> 16) & 0x1f;
16771 rd
= (ctx
->opcode
>> 11) & 0x1f;
16772 sa
= (ctx
->opcode
>> 6) & 0x1f;
16774 op1
= MASK_SPECIAL(ctx
->opcode
);
16777 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16779 case OPC_MULT
... OPC_DIVU
:
16780 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16790 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16793 MIPS_INVAL("special_r6 muldiv");
16794 generate_exception_end(ctx
, EXCP_RI
);
16800 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16804 if (rt
== 0 && sa
== 1) {
16805 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16806 We need additionally to check other fields */
16807 gen_cl(ctx
, op1
, rd
, rs
);
16809 generate_exception_end(ctx
, EXCP_RI
);
16813 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16814 gen_helper_do_semihosting(cpu_env
);
16816 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16817 generate_exception_end(ctx
, EXCP_RI
);
16819 generate_exception_end(ctx
, EXCP_DBp
);
16823 #if defined(TARGET_MIPS64)
16825 check_mips_64(ctx
);
16826 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16830 if (rt
== 0 && sa
== 1) {
16831 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16832 We need additionally to check other fields */
16833 check_mips_64(ctx
);
16834 gen_cl(ctx
, op1
, rd
, rs
);
16836 generate_exception_end(ctx
, EXCP_RI
);
16839 case OPC_DMULT
... OPC_DDIVU
:
16840 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16850 check_mips_64(ctx
);
16851 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16854 MIPS_INVAL("special_r6 muldiv");
16855 generate_exception_end(ctx
, EXCP_RI
);
16860 default: /* Invalid */
16861 MIPS_INVAL("special_r6");
16862 generate_exception_end(ctx
, EXCP_RI
);
16867 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16869 int rs
, rt
, rd
, sa
;
16872 rs
= (ctx
->opcode
>> 21) & 0x1f;
16873 rt
= (ctx
->opcode
>> 16) & 0x1f;
16874 rd
= (ctx
->opcode
>> 11) & 0x1f;
16875 sa
= (ctx
->opcode
>> 6) & 0x1f;
16877 op1
= MASK_SPECIAL(ctx
->opcode
);
16879 case OPC_MOVN
: /* Conditional move */
16881 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16882 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16883 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16885 case OPC_MFHI
: /* Move from HI/LO */
16887 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16890 case OPC_MTLO
: /* Move to HI/LO */
16891 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16894 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16895 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16896 check_cp1_enabled(ctx
);
16897 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16898 (ctx
->opcode
>> 16) & 1);
16900 generate_exception_err(ctx
, EXCP_CpU
, 1);
16906 check_insn(ctx
, INSN_VR54XX
);
16907 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16908 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16910 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16915 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16917 #if defined(TARGET_MIPS64)
16918 case OPC_DMULT
... OPC_DDIVU
:
16919 check_insn(ctx
, ISA_MIPS3
);
16920 check_mips_64(ctx
);
16921 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16925 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16928 #ifdef MIPS_STRICT_STANDARD
16929 MIPS_INVAL("SPIM");
16930 generate_exception_end(ctx
, EXCP_RI
);
16932 /* Implemented as RI exception for now. */
16933 MIPS_INVAL("spim (unofficial)");
16934 generate_exception_end(ctx
, EXCP_RI
);
16937 default: /* Invalid */
16938 MIPS_INVAL("special_legacy");
16939 generate_exception_end(ctx
, EXCP_RI
);
16944 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16946 int rs
, rt
, rd
, sa
;
16949 rs
= (ctx
->opcode
>> 21) & 0x1f;
16950 rt
= (ctx
->opcode
>> 16) & 0x1f;
16951 rd
= (ctx
->opcode
>> 11) & 0x1f;
16952 sa
= (ctx
->opcode
>> 6) & 0x1f;
16954 op1
= MASK_SPECIAL(ctx
->opcode
);
16956 case OPC_SLL
: /* Shift with immediate */
16957 if (sa
== 5 && rd
== 0 &&
16958 rs
== 0 && rt
== 0) { /* PAUSE */
16959 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16960 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16961 generate_exception_end(ctx
, EXCP_RI
);
16967 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16970 switch ((ctx
->opcode
>> 21) & 0x1f) {
16972 /* rotr is decoded as srl on non-R2 CPUs */
16973 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16978 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16981 generate_exception_end(ctx
, EXCP_RI
);
16985 case OPC_ADD
... OPC_SUBU
:
16986 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16988 case OPC_SLLV
: /* Shifts */
16990 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16993 switch ((ctx
->opcode
>> 6) & 0x1f) {
16995 /* rotrv is decoded as srlv on non-R2 CPUs */
16996 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17001 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17004 generate_exception_end(ctx
, EXCP_RI
);
17008 case OPC_SLT
: /* Set on less than */
17010 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17012 case OPC_AND
: /* Logic*/
17016 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17019 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17021 case OPC_TGE
... OPC_TEQ
: /* Traps */
17023 check_insn(ctx
, ISA_MIPS2
);
17024 gen_trap(ctx
, op1
, rs
, rt
, -1);
17026 case OPC_LSA
: /* OPC_PMON */
17027 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17028 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17029 decode_opc_special_r6(env
, ctx
);
17031 /* Pmon entry point, also R4010 selsl */
17032 #ifdef MIPS_STRICT_STANDARD
17033 MIPS_INVAL("PMON / selsl");
17034 generate_exception_end(ctx
, EXCP_RI
);
17036 gen_helper_0e0i(pmon
, sa
);
17041 generate_exception_end(ctx
, EXCP_SYSCALL
);
17044 generate_exception_end(ctx
, EXCP_BREAK
);
17047 check_insn(ctx
, ISA_MIPS2
);
17048 /* Treat as NOP. */
17051 #if defined(TARGET_MIPS64)
17052 /* MIPS64 specific opcodes */
17057 check_insn(ctx
, ISA_MIPS3
);
17058 check_mips_64(ctx
);
17059 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17062 switch ((ctx
->opcode
>> 21) & 0x1f) {
17064 /* drotr is decoded as dsrl on non-R2 CPUs */
17065 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17070 check_insn(ctx
, ISA_MIPS3
);
17071 check_mips_64(ctx
);
17072 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17075 generate_exception_end(ctx
, EXCP_RI
);
17080 switch ((ctx
->opcode
>> 21) & 0x1f) {
17082 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17083 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17088 check_insn(ctx
, ISA_MIPS3
);
17089 check_mips_64(ctx
);
17090 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17093 generate_exception_end(ctx
, EXCP_RI
);
17097 case OPC_DADD
... OPC_DSUBU
:
17098 check_insn(ctx
, ISA_MIPS3
);
17099 check_mips_64(ctx
);
17100 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17104 check_insn(ctx
, ISA_MIPS3
);
17105 check_mips_64(ctx
);
17106 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17109 switch ((ctx
->opcode
>> 6) & 0x1f) {
17111 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17112 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17117 check_insn(ctx
, ISA_MIPS3
);
17118 check_mips_64(ctx
);
17119 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17122 generate_exception_end(ctx
, EXCP_RI
);
17127 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17128 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17129 decode_opc_special_r6(env
, ctx
);
17134 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17135 decode_opc_special_r6(env
, ctx
);
17137 decode_opc_special_legacy(env
, ctx
);
17142 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17147 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17149 rs
= (ctx
->opcode
>> 21) & 0x1f;
17150 rt
= (ctx
->opcode
>> 16) & 0x1f;
17151 rd
= (ctx
->opcode
>> 11) & 0x1f;
17153 op1
= MASK_SPECIAL2(ctx
->opcode
);
17155 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17156 case OPC_MSUB
... OPC_MSUBU
:
17157 check_insn(ctx
, ISA_MIPS32
);
17158 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17161 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17164 case OPC_DIVU_G_2F
:
17165 case OPC_MULT_G_2F
:
17166 case OPC_MULTU_G_2F
:
17168 case OPC_MODU_G_2F
:
17169 check_insn(ctx
, INSN_LOONGSON2F
);
17170 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17174 check_insn(ctx
, ISA_MIPS32
);
17175 gen_cl(ctx
, op1
, rd
, rs
);
17178 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17179 gen_helper_do_semihosting(cpu_env
);
17181 /* XXX: not clear which exception should be raised
17182 * when in debug mode...
17184 check_insn(ctx
, ISA_MIPS32
);
17185 generate_exception_end(ctx
, EXCP_DBp
);
17188 #if defined(TARGET_MIPS64)
17191 check_insn(ctx
, ISA_MIPS64
);
17192 check_mips_64(ctx
);
17193 gen_cl(ctx
, op1
, rd
, rs
);
17195 case OPC_DMULT_G_2F
:
17196 case OPC_DMULTU_G_2F
:
17197 case OPC_DDIV_G_2F
:
17198 case OPC_DDIVU_G_2F
:
17199 case OPC_DMOD_G_2F
:
17200 case OPC_DMODU_G_2F
:
17201 check_insn(ctx
, INSN_LOONGSON2F
);
17202 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17205 default: /* Invalid */
17206 MIPS_INVAL("special2_legacy");
17207 generate_exception_end(ctx
, EXCP_RI
);
17212 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17214 int rs
, rt
, rd
, sa
;
17218 rs
= (ctx
->opcode
>> 21) & 0x1f;
17219 rt
= (ctx
->opcode
>> 16) & 0x1f;
17220 rd
= (ctx
->opcode
>> 11) & 0x1f;
17221 sa
= (ctx
->opcode
>> 6) & 0x1f;
17222 imm
= (int16_t)ctx
->opcode
>> 7;
17224 op1
= MASK_SPECIAL3(ctx
->opcode
);
17228 /* hint codes 24-31 are reserved and signal RI */
17229 generate_exception_end(ctx
, EXCP_RI
);
17231 /* Treat as NOP. */
17234 check_cp0_enabled(ctx
);
17235 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17236 gen_cache_operation(ctx
, rt
, rs
, imm
);
17240 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17243 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17248 /* Treat as NOP. */
17251 op2
= MASK_BSHFL(ctx
->opcode
);
17253 case OPC_ALIGN
... OPC_ALIGN_END
:
17254 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17257 gen_bitswap(ctx
, op2
, rd
, rt
);
17262 #if defined(TARGET_MIPS64)
17264 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17267 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17270 check_mips_64(ctx
);
17273 /* Treat as NOP. */
17276 op2
= MASK_DBSHFL(ctx
->opcode
);
17278 case OPC_DALIGN
... OPC_DALIGN_END
:
17279 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17282 gen_bitswap(ctx
, op2
, rd
, rt
);
17289 default: /* Invalid */
17290 MIPS_INVAL("special3_r6");
17291 generate_exception_end(ctx
, EXCP_RI
);
17296 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17301 rs
= (ctx
->opcode
>> 21) & 0x1f;
17302 rt
= (ctx
->opcode
>> 16) & 0x1f;
17303 rd
= (ctx
->opcode
>> 11) & 0x1f;
17305 op1
= MASK_SPECIAL3(ctx
->opcode
);
17307 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17308 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17309 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17310 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17311 * the same mask and op1. */
17312 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17313 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17316 case OPC_ADDUH_R_QB
:
17318 case OPC_ADDQH_R_PH
:
17320 case OPC_ADDQH_R_W
:
17322 case OPC_SUBUH_R_QB
:
17324 case OPC_SUBQH_R_PH
:
17326 case OPC_SUBQH_R_W
:
17327 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17332 case OPC_MULQ_RS_W
:
17333 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17336 MIPS_INVAL("MASK ADDUH.QB");
17337 generate_exception_end(ctx
, EXCP_RI
);
17340 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17341 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17343 generate_exception_end(ctx
, EXCP_RI
);
17347 op2
= MASK_LX(ctx
->opcode
);
17349 #if defined(TARGET_MIPS64)
17355 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17357 default: /* Invalid */
17358 MIPS_INVAL("MASK LX");
17359 generate_exception_end(ctx
, EXCP_RI
);
17363 case OPC_ABSQ_S_PH_DSP
:
17364 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17366 case OPC_ABSQ_S_QB
:
17367 case OPC_ABSQ_S_PH
:
17369 case OPC_PRECEQ_W_PHL
:
17370 case OPC_PRECEQ_W_PHR
:
17371 case OPC_PRECEQU_PH_QBL
:
17372 case OPC_PRECEQU_PH_QBR
:
17373 case OPC_PRECEQU_PH_QBLA
:
17374 case OPC_PRECEQU_PH_QBRA
:
17375 case OPC_PRECEU_PH_QBL
:
17376 case OPC_PRECEU_PH_QBR
:
17377 case OPC_PRECEU_PH_QBLA
:
17378 case OPC_PRECEU_PH_QBRA
:
17379 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17386 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17389 MIPS_INVAL("MASK ABSQ_S.PH");
17390 generate_exception_end(ctx
, EXCP_RI
);
17394 case OPC_ADDU_QB_DSP
:
17395 op2
= MASK_ADDU_QB(ctx
->opcode
);
17398 case OPC_ADDQ_S_PH
:
17401 case OPC_ADDU_S_QB
:
17403 case OPC_ADDU_S_PH
:
17405 case OPC_SUBQ_S_PH
:
17408 case OPC_SUBU_S_QB
:
17410 case OPC_SUBU_S_PH
:
17414 case OPC_RADDU_W_QB
:
17415 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17417 case OPC_MULEU_S_PH_QBL
:
17418 case OPC_MULEU_S_PH_QBR
:
17419 case OPC_MULQ_RS_PH
:
17420 case OPC_MULEQ_S_W_PHL
:
17421 case OPC_MULEQ_S_W_PHR
:
17422 case OPC_MULQ_S_PH
:
17423 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17425 default: /* Invalid */
17426 MIPS_INVAL("MASK ADDU.QB");
17427 generate_exception_end(ctx
, EXCP_RI
);
17432 case OPC_CMPU_EQ_QB_DSP
:
17433 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17435 case OPC_PRECR_SRA_PH_W
:
17436 case OPC_PRECR_SRA_R_PH_W
:
17437 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17439 case OPC_PRECR_QB_PH
:
17440 case OPC_PRECRQ_QB_PH
:
17441 case OPC_PRECRQ_PH_W
:
17442 case OPC_PRECRQ_RS_PH_W
:
17443 case OPC_PRECRQU_S_QB_PH
:
17444 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17446 case OPC_CMPU_EQ_QB
:
17447 case OPC_CMPU_LT_QB
:
17448 case OPC_CMPU_LE_QB
:
17449 case OPC_CMP_EQ_PH
:
17450 case OPC_CMP_LT_PH
:
17451 case OPC_CMP_LE_PH
:
17452 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17454 case OPC_CMPGU_EQ_QB
:
17455 case OPC_CMPGU_LT_QB
:
17456 case OPC_CMPGU_LE_QB
:
17457 case OPC_CMPGDU_EQ_QB
:
17458 case OPC_CMPGDU_LT_QB
:
17459 case OPC_CMPGDU_LE_QB
:
17462 case OPC_PACKRL_PH
:
17463 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17465 default: /* Invalid */
17466 MIPS_INVAL("MASK CMPU.EQ.QB");
17467 generate_exception_end(ctx
, EXCP_RI
);
17471 case OPC_SHLL_QB_DSP
:
17472 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17474 case OPC_DPA_W_PH_DSP
:
17475 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17477 case OPC_DPAU_H_QBL
:
17478 case OPC_DPAU_H_QBR
:
17479 case OPC_DPSU_H_QBL
:
17480 case OPC_DPSU_H_QBR
:
17482 case OPC_DPAX_W_PH
:
17483 case OPC_DPAQ_S_W_PH
:
17484 case OPC_DPAQX_S_W_PH
:
17485 case OPC_DPAQX_SA_W_PH
:
17487 case OPC_DPSX_W_PH
:
17488 case OPC_DPSQ_S_W_PH
:
17489 case OPC_DPSQX_S_W_PH
:
17490 case OPC_DPSQX_SA_W_PH
:
17491 case OPC_MULSAQ_S_W_PH
:
17492 case OPC_DPAQ_SA_L_W
:
17493 case OPC_DPSQ_SA_L_W
:
17494 case OPC_MAQ_S_W_PHL
:
17495 case OPC_MAQ_S_W_PHR
:
17496 case OPC_MAQ_SA_W_PHL
:
17497 case OPC_MAQ_SA_W_PHR
:
17498 case OPC_MULSA_W_PH
:
17499 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17501 default: /* Invalid */
17502 MIPS_INVAL("MASK DPAW.PH");
17503 generate_exception_end(ctx
, EXCP_RI
);
17508 op2
= MASK_INSV(ctx
->opcode
);
17519 t0
= tcg_temp_new();
17520 t1
= tcg_temp_new();
17522 gen_load_gpr(t0
, rt
);
17523 gen_load_gpr(t1
, rs
);
17525 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17531 default: /* Invalid */
17532 MIPS_INVAL("MASK INSV");
17533 generate_exception_end(ctx
, EXCP_RI
);
17537 case OPC_APPEND_DSP
:
17538 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17540 case OPC_EXTR_W_DSP
:
17541 op2
= MASK_EXTR_W(ctx
->opcode
);
17545 case OPC_EXTR_RS_W
:
17547 case OPC_EXTRV_S_H
:
17549 case OPC_EXTRV_R_W
:
17550 case OPC_EXTRV_RS_W
:
17555 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17558 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17564 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17566 default: /* Invalid */
17567 MIPS_INVAL("MASK EXTR.W");
17568 generate_exception_end(ctx
, EXCP_RI
);
17572 #if defined(TARGET_MIPS64)
17573 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17574 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17575 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17576 check_insn(ctx
, INSN_LOONGSON2E
);
17577 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17579 case OPC_ABSQ_S_QH_DSP
:
17580 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17582 case OPC_PRECEQ_L_PWL
:
17583 case OPC_PRECEQ_L_PWR
:
17584 case OPC_PRECEQ_PW_QHL
:
17585 case OPC_PRECEQ_PW_QHR
:
17586 case OPC_PRECEQ_PW_QHLA
:
17587 case OPC_PRECEQ_PW_QHRA
:
17588 case OPC_PRECEQU_QH_OBL
:
17589 case OPC_PRECEQU_QH_OBR
:
17590 case OPC_PRECEQU_QH_OBLA
:
17591 case OPC_PRECEQU_QH_OBRA
:
17592 case OPC_PRECEU_QH_OBL
:
17593 case OPC_PRECEU_QH_OBR
:
17594 case OPC_PRECEU_QH_OBLA
:
17595 case OPC_PRECEU_QH_OBRA
:
17596 case OPC_ABSQ_S_OB
:
17597 case OPC_ABSQ_S_PW
:
17598 case OPC_ABSQ_S_QH
:
17599 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17607 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17609 default: /* Invalid */
17610 MIPS_INVAL("MASK ABSQ_S.QH");
17611 generate_exception_end(ctx
, EXCP_RI
);
17615 case OPC_ADDU_OB_DSP
:
17616 op2
= MASK_ADDU_OB(ctx
->opcode
);
17618 case OPC_RADDU_L_OB
:
17620 case OPC_SUBQ_S_PW
:
17622 case OPC_SUBQ_S_QH
:
17624 case OPC_SUBU_S_OB
:
17626 case OPC_SUBU_S_QH
:
17628 case OPC_SUBUH_R_OB
:
17630 case OPC_ADDQ_S_PW
:
17632 case OPC_ADDQ_S_QH
:
17634 case OPC_ADDU_S_OB
:
17636 case OPC_ADDU_S_QH
:
17638 case OPC_ADDUH_R_OB
:
17639 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17641 case OPC_MULEQ_S_PW_QHL
:
17642 case OPC_MULEQ_S_PW_QHR
:
17643 case OPC_MULEU_S_QH_OBL
:
17644 case OPC_MULEU_S_QH_OBR
:
17645 case OPC_MULQ_RS_QH
:
17646 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17648 default: /* Invalid */
17649 MIPS_INVAL("MASK ADDU.OB");
17650 generate_exception_end(ctx
, EXCP_RI
);
17654 case OPC_CMPU_EQ_OB_DSP
:
17655 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17657 case OPC_PRECR_SRA_QH_PW
:
17658 case OPC_PRECR_SRA_R_QH_PW
:
17659 /* Return value is rt. */
17660 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17662 case OPC_PRECR_OB_QH
:
17663 case OPC_PRECRQ_OB_QH
:
17664 case OPC_PRECRQ_PW_L
:
17665 case OPC_PRECRQ_QH_PW
:
17666 case OPC_PRECRQ_RS_QH_PW
:
17667 case OPC_PRECRQU_S_OB_QH
:
17668 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17670 case OPC_CMPU_EQ_OB
:
17671 case OPC_CMPU_LT_OB
:
17672 case OPC_CMPU_LE_OB
:
17673 case OPC_CMP_EQ_QH
:
17674 case OPC_CMP_LT_QH
:
17675 case OPC_CMP_LE_QH
:
17676 case OPC_CMP_EQ_PW
:
17677 case OPC_CMP_LT_PW
:
17678 case OPC_CMP_LE_PW
:
17679 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17681 case OPC_CMPGDU_EQ_OB
:
17682 case OPC_CMPGDU_LT_OB
:
17683 case OPC_CMPGDU_LE_OB
:
17684 case OPC_CMPGU_EQ_OB
:
17685 case OPC_CMPGU_LT_OB
:
17686 case OPC_CMPGU_LE_OB
:
17687 case OPC_PACKRL_PW
:
17691 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17693 default: /* Invalid */
17694 MIPS_INVAL("MASK CMPU_EQ.OB");
17695 generate_exception_end(ctx
, EXCP_RI
);
17699 case OPC_DAPPEND_DSP
:
17700 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17702 case OPC_DEXTR_W_DSP
:
17703 op2
= MASK_DEXTR_W(ctx
->opcode
);
17710 case OPC_DEXTR_R_L
:
17711 case OPC_DEXTR_RS_L
:
17713 case OPC_DEXTR_R_W
:
17714 case OPC_DEXTR_RS_W
:
17715 case OPC_DEXTR_S_H
:
17717 case OPC_DEXTRV_R_L
:
17718 case OPC_DEXTRV_RS_L
:
17719 case OPC_DEXTRV_S_H
:
17721 case OPC_DEXTRV_R_W
:
17722 case OPC_DEXTRV_RS_W
:
17723 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17728 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17730 default: /* Invalid */
17731 MIPS_INVAL("MASK EXTR.W");
17732 generate_exception_end(ctx
, EXCP_RI
);
17736 case OPC_DPAQ_W_QH_DSP
:
17737 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17739 case OPC_DPAU_H_OBL
:
17740 case OPC_DPAU_H_OBR
:
17741 case OPC_DPSU_H_OBL
:
17742 case OPC_DPSU_H_OBR
:
17744 case OPC_DPAQ_S_W_QH
:
17746 case OPC_DPSQ_S_W_QH
:
17747 case OPC_MULSAQ_S_W_QH
:
17748 case OPC_DPAQ_SA_L_PW
:
17749 case OPC_DPSQ_SA_L_PW
:
17750 case OPC_MULSAQ_S_L_PW
:
17751 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17753 case OPC_MAQ_S_W_QHLL
:
17754 case OPC_MAQ_S_W_QHLR
:
17755 case OPC_MAQ_S_W_QHRL
:
17756 case OPC_MAQ_S_W_QHRR
:
17757 case OPC_MAQ_SA_W_QHLL
:
17758 case OPC_MAQ_SA_W_QHLR
:
17759 case OPC_MAQ_SA_W_QHRL
:
17760 case OPC_MAQ_SA_W_QHRR
:
17761 case OPC_MAQ_S_L_PWL
:
17762 case OPC_MAQ_S_L_PWR
:
17767 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17769 default: /* Invalid */
17770 MIPS_INVAL("MASK DPAQ.W.QH");
17771 generate_exception_end(ctx
, EXCP_RI
);
17775 case OPC_DINSV_DSP
:
17776 op2
= MASK_INSV(ctx
->opcode
);
17787 t0
= tcg_temp_new();
17788 t1
= tcg_temp_new();
17790 gen_load_gpr(t0
, rt
);
17791 gen_load_gpr(t1
, rs
);
17793 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17799 default: /* Invalid */
17800 MIPS_INVAL("MASK DINSV");
17801 generate_exception_end(ctx
, EXCP_RI
);
17805 case OPC_SHLL_OB_DSP
:
17806 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17809 default: /* Invalid */
17810 MIPS_INVAL("special3_legacy");
17811 generate_exception_end(ctx
, EXCP_RI
);
17816 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17818 int rs
, rt
, rd
, sa
;
17821 rs
= (ctx
->opcode
>> 21) & 0x1f;
17822 rt
= (ctx
->opcode
>> 16) & 0x1f;
17823 rd
= (ctx
->opcode
>> 11) & 0x1f;
17824 sa
= (ctx
->opcode
>> 6) & 0x1f;
17826 op1
= MASK_SPECIAL3(ctx
->opcode
);
17830 check_insn(ctx
, ISA_MIPS32R2
);
17831 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17834 op2
= MASK_BSHFL(ctx
->opcode
);
17836 case OPC_ALIGN
... OPC_ALIGN_END
:
17838 check_insn(ctx
, ISA_MIPS32R6
);
17839 decode_opc_special3_r6(env
, ctx
);
17842 check_insn(ctx
, ISA_MIPS32R2
);
17843 gen_bshfl(ctx
, op2
, rt
, rd
);
17847 #if defined(TARGET_MIPS64)
17848 case OPC_DEXTM
... OPC_DEXT
:
17849 case OPC_DINSM
... OPC_DINS
:
17850 check_insn(ctx
, ISA_MIPS64R2
);
17851 check_mips_64(ctx
);
17852 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17855 op2
= MASK_DBSHFL(ctx
->opcode
);
17857 case OPC_DALIGN
... OPC_DALIGN_END
:
17859 check_insn(ctx
, ISA_MIPS32R6
);
17860 decode_opc_special3_r6(env
, ctx
);
17863 check_insn(ctx
, ISA_MIPS64R2
);
17864 check_mips_64(ctx
);
17865 op2
= MASK_DBSHFL(ctx
->opcode
);
17866 gen_bshfl(ctx
, op2
, rt
, rd
);
17872 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17875 check_insn(ctx
, ASE_MT
);
17877 TCGv t0
= tcg_temp_new();
17878 TCGv t1
= tcg_temp_new();
17880 gen_load_gpr(t0
, rt
);
17881 gen_load_gpr(t1
, rs
);
17882 gen_helper_fork(t0
, t1
);
17888 check_insn(ctx
, ASE_MT
);
17890 TCGv t0
= tcg_temp_new();
17892 gen_load_gpr(t0
, rs
);
17893 gen_helper_yield(t0
, cpu_env
, t0
);
17894 gen_store_gpr(t0
, rd
);
17899 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17900 decode_opc_special3_r6(env
, ctx
);
17902 decode_opc_special3_legacy(env
, ctx
);
17907 /* MIPS SIMD Architecture (MSA) */
17908 static inline int check_msa_access(DisasContext
*ctx
)
17910 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17911 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17912 generate_exception_end(ctx
, EXCP_RI
);
17916 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17917 if (ctx
->insn_flags
& ASE_MSA
) {
17918 generate_exception_end(ctx
, EXCP_MSADIS
);
17921 generate_exception_end(ctx
, EXCP_RI
);
17928 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17930 /* generates tcg ops to check if any element is 0 */
17931 /* Note this function only works with MSA_WRLEN = 128 */
17932 uint64_t eval_zero_or_big
= 0;
17933 uint64_t eval_big
= 0;
17934 TCGv_i64 t0
= tcg_temp_new_i64();
17935 TCGv_i64 t1
= tcg_temp_new_i64();
17938 eval_zero_or_big
= 0x0101010101010101ULL
;
17939 eval_big
= 0x8080808080808080ULL
;
17942 eval_zero_or_big
= 0x0001000100010001ULL
;
17943 eval_big
= 0x8000800080008000ULL
;
17946 eval_zero_or_big
= 0x0000000100000001ULL
;
17947 eval_big
= 0x8000000080000000ULL
;
17950 eval_zero_or_big
= 0x0000000000000001ULL
;
17951 eval_big
= 0x8000000000000000ULL
;
17954 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17955 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17956 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17957 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17958 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17959 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17960 tcg_gen_or_i64(t0
, t0
, t1
);
17961 /* if all bits are zero then all elements are not zero */
17962 /* if some bit is non-zero then some element is zero */
17963 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17964 tcg_gen_trunc_i64_tl(tresult
, t0
);
17965 tcg_temp_free_i64(t0
);
17966 tcg_temp_free_i64(t1
);
17969 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17971 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17972 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17973 int64_t s16
= (int16_t)ctx
->opcode
;
17975 check_msa_access(ctx
);
17977 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17978 generate_exception_end(ctx
, EXCP_RI
);
17985 TCGv_i64 t0
= tcg_temp_new_i64();
17986 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17987 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17988 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17989 tcg_gen_trunc_i64_tl(bcond
, t0
);
17990 tcg_temp_free_i64(t0
);
17997 gen_check_zero_element(bcond
, df
, wt
);
18003 gen_check_zero_element(bcond
, df
, wt
);
18004 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18008 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18010 ctx
->hflags
|= MIPS_HFLAG_BC
;
18011 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18014 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18016 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18017 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18018 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18019 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18021 TCGv_i32 twd
= tcg_const_i32(wd
);
18022 TCGv_i32 tws
= tcg_const_i32(ws
);
18023 TCGv_i32 ti8
= tcg_const_i32(i8
);
18025 switch (MASK_MSA_I8(ctx
->opcode
)) {
18027 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18030 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18033 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18036 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18039 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18042 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18045 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18051 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18052 if (df
== DF_DOUBLE
) {
18053 generate_exception_end(ctx
, EXCP_RI
);
18055 TCGv_i32 tdf
= tcg_const_i32(df
);
18056 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18057 tcg_temp_free_i32(tdf
);
18062 MIPS_INVAL("MSA instruction");
18063 generate_exception_end(ctx
, EXCP_RI
);
18067 tcg_temp_free_i32(twd
);
18068 tcg_temp_free_i32(tws
);
18069 tcg_temp_free_i32(ti8
);
18072 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18074 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18075 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18076 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18077 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18078 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18079 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18081 TCGv_i32 tdf
= tcg_const_i32(df
);
18082 TCGv_i32 twd
= tcg_const_i32(wd
);
18083 TCGv_i32 tws
= tcg_const_i32(ws
);
18084 TCGv_i32 timm
= tcg_temp_new_i32();
18085 tcg_gen_movi_i32(timm
, u5
);
18087 switch (MASK_MSA_I5(ctx
->opcode
)) {
18089 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18092 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18094 case OPC_MAXI_S_df
:
18095 tcg_gen_movi_i32(timm
, s5
);
18096 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18098 case OPC_MAXI_U_df
:
18099 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18101 case OPC_MINI_S_df
:
18102 tcg_gen_movi_i32(timm
, s5
);
18103 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18105 case OPC_MINI_U_df
:
18106 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18109 tcg_gen_movi_i32(timm
, s5
);
18110 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18112 case OPC_CLTI_S_df
:
18113 tcg_gen_movi_i32(timm
, s5
);
18114 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18116 case OPC_CLTI_U_df
:
18117 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18119 case OPC_CLEI_S_df
:
18120 tcg_gen_movi_i32(timm
, s5
);
18121 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18123 case OPC_CLEI_U_df
:
18124 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18128 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18129 tcg_gen_movi_i32(timm
, s10
);
18130 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18134 MIPS_INVAL("MSA instruction");
18135 generate_exception_end(ctx
, EXCP_RI
);
18139 tcg_temp_free_i32(tdf
);
18140 tcg_temp_free_i32(twd
);
18141 tcg_temp_free_i32(tws
);
18142 tcg_temp_free_i32(timm
);
18145 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18147 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18148 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18149 uint32_t df
= 0, m
= 0;
18150 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18151 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18158 if ((dfm
& 0x40) == 0x00) {
18161 } else if ((dfm
& 0x60) == 0x40) {
18164 } else if ((dfm
& 0x70) == 0x60) {
18167 } else if ((dfm
& 0x78) == 0x70) {
18171 generate_exception_end(ctx
, EXCP_RI
);
18175 tdf
= tcg_const_i32(df
);
18176 tm
= tcg_const_i32(m
);
18177 twd
= tcg_const_i32(wd
);
18178 tws
= tcg_const_i32(ws
);
18180 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18182 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18185 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18188 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18191 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18194 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18197 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18199 case OPC_BINSLI_df
:
18200 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18202 case OPC_BINSRI_df
:
18203 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18206 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18209 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18212 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18215 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18218 MIPS_INVAL("MSA instruction");
18219 generate_exception_end(ctx
, EXCP_RI
);
18223 tcg_temp_free_i32(tdf
);
18224 tcg_temp_free_i32(tm
);
18225 tcg_temp_free_i32(twd
);
18226 tcg_temp_free_i32(tws
);
18229 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18231 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18232 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18233 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18234 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18235 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18237 TCGv_i32 tdf
= tcg_const_i32(df
);
18238 TCGv_i32 twd
= tcg_const_i32(wd
);
18239 TCGv_i32 tws
= tcg_const_i32(ws
);
18240 TCGv_i32 twt
= tcg_const_i32(wt
);
18242 switch (MASK_MSA_3R(ctx
->opcode
)) {
18244 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18247 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18250 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18253 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18255 case OPC_SUBS_S_df
:
18256 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18259 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18262 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18265 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18268 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18271 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18273 case OPC_ADDS_A_df
:
18274 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18276 case OPC_SUBS_U_df
:
18277 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18280 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18283 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18286 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18289 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18292 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18295 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18297 case OPC_ADDS_S_df
:
18298 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18300 case OPC_SUBSUS_U_df
:
18301 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18304 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18307 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18310 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18313 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18316 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18319 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18321 case OPC_ADDS_U_df
:
18322 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18324 case OPC_SUBSUU_S_df
:
18325 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18328 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18331 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18334 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18337 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18340 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18342 case OPC_ASUB_S_df
:
18343 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18346 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18349 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18352 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18355 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18358 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18361 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18363 case OPC_ASUB_U_df
:
18364 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18367 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18370 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18373 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18376 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18378 case OPC_AVER_S_df
:
18379 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18382 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18385 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18388 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18391 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18393 case OPC_AVER_U_df
:
18394 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18397 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18400 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18403 case OPC_DOTP_S_df
:
18404 case OPC_DOTP_U_df
:
18405 case OPC_DPADD_S_df
:
18406 case OPC_DPADD_U_df
:
18407 case OPC_DPSUB_S_df
:
18408 case OPC_HADD_S_df
:
18409 case OPC_DPSUB_U_df
:
18410 case OPC_HADD_U_df
:
18411 case OPC_HSUB_S_df
:
18412 case OPC_HSUB_U_df
:
18413 if (df
== DF_BYTE
) {
18414 generate_exception_end(ctx
, EXCP_RI
);
18417 switch (MASK_MSA_3R(ctx
->opcode
)) {
18418 case OPC_DOTP_S_df
:
18419 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18421 case OPC_DOTP_U_df
:
18422 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18424 case OPC_DPADD_S_df
:
18425 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18427 case OPC_DPADD_U_df
:
18428 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18430 case OPC_DPSUB_S_df
:
18431 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18433 case OPC_HADD_S_df
:
18434 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18436 case OPC_DPSUB_U_df
:
18437 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18439 case OPC_HADD_U_df
:
18440 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18442 case OPC_HSUB_S_df
:
18443 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18445 case OPC_HSUB_U_df
:
18446 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18451 MIPS_INVAL("MSA instruction");
18452 generate_exception_end(ctx
, EXCP_RI
);
18455 tcg_temp_free_i32(twd
);
18456 tcg_temp_free_i32(tws
);
18457 tcg_temp_free_i32(twt
);
18458 tcg_temp_free_i32(tdf
);
18461 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18463 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18464 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18465 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18466 TCGv telm
= tcg_temp_new();
18467 TCGv_i32 tsr
= tcg_const_i32(source
);
18468 TCGv_i32 tdt
= tcg_const_i32(dest
);
18470 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18472 gen_load_gpr(telm
, source
);
18473 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18476 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18477 gen_store_gpr(telm
, dest
);
18480 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18483 MIPS_INVAL("MSA instruction");
18484 generate_exception_end(ctx
, EXCP_RI
);
18488 tcg_temp_free(telm
);
18489 tcg_temp_free_i32(tdt
);
18490 tcg_temp_free_i32(tsr
);
18493 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18496 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18497 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18498 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18500 TCGv_i32 tws
= tcg_const_i32(ws
);
18501 TCGv_i32 twd
= tcg_const_i32(wd
);
18502 TCGv_i32 tn
= tcg_const_i32(n
);
18503 TCGv_i32 tdf
= tcg_const_i32(df
);
18505 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18507 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18509 case OPC_SPLATI_df
:
18510 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18513 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18515 case OPC_COPY_S_df
:
18516 case OPC_COPY_U_df
:
18517 case OPC_INSERT_df
:
18518 #if !defined(TARGET_MIPS64)
18519 /* Double format valid only for MIPS64 */
18520 if (df
== DF_DOUBLE
) {
18521 generate_exception_end(ctx
, EXCP_RI
);
18525 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18526 case OPC_COPY_S_df
:
18527 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18529 case OPC_COPY_U_df
:
18530 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18532 case OPC_INSERT_df
:
18533 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18538 MIPS_INVAL("MSA instruction");
18539 generate_exception_end(ctx
, EXCP_RI
);
18541 tcg_temp_free_i32(twd
);
18542 tcg_temp_free_i32(tws
);
18543 tcg_temp_free_i32(tn
);
18544 tcg_temp_free_i32(tdf
);
18547 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18549 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18550 uint32_t df
= 0, n
= 0;
18552 if ((dfn
& 0x30) == 0x00) {
18555 } else if ((dfn
& 0x38) == 0x20) {
18558 } else if ((dfn
& 0x3c) == 0x30) {
18561 } else if ((dfn
& 0x3e) == 0x38) {
18564 } else if (dfn
== 0x3E) {
18565 /* CTCMSA, CFCMSA, MOVE.V */
18566 gen_msa_elm_3e(env
, ctx
);
18569 generate_exception_end(ctx
, EXCP_RI
);
18573 gen_msa_elm_df(env
, ctx
, df
, n
);
18576 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18578 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18579 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18580 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18581 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18582 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18584 TCGv_i32 twd
= tcg_const_i32(wd
);
18585 TCGv_i32 tws
= tcg_const_i32(ws
);
18586 TCGv_i32 twt
= tcg_const_i32(wt
);
18587 TCGv_i32 tdf
= tcg_temp_new_i32();
18589 /* adjust df value for floating-point instruction */
18590 tcg_gen_movi_i32(tdf
, df
+ 2);
18592 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18594 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18597 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18600 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18603 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18606 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18612 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18618 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18621 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18624 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18627 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18630 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18633 tcg_gen_movi_i32(tdf
, df
+ 1);
18634 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18637 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18640 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18642 case OPC_MADD_Q_df
:
18643 tcg_gen_movi_i32(tdf
, df
+ 1);
18644 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18647 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18649 case OPC_MSUB_Q_df
:
18650 tcg_gen_movi_i32(tdf
, df
+ 1);
18651 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18654 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18657 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18660 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18663 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18666 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18669 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18672 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18675 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18678 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18681 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18684 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18687 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18690 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18692 case OPC_MULR_Q_df
:
18693 tcg_gen_movi_i32(tdf
, df
+ 1);
18694 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18697 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18699 case OPC_FMIN_A_df
:
18700 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18702 case OPC_MADDR_Q_df
:
18703 tcg_gen_movi_i32(tdf
, df
+ 1);
18704 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18707 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18710 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18712 case OPC_MSUBR_Q_df
:
18713 tcg_gen_movi_i32(tdf
, df
+ 1);
18714 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18717 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18719 case OPC_FMAX_A_df
:
18720 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18723 MIPS_INVAL("MSA instruction");
18724 generate_exception_end(ctx
, EXCP_RI
);
18728 tcg_temp_free_i32(twd
);
18729 tcg_temp_free_i32(tws
);
18730 tcg_temp_free_i32(twt
);
18731 tcg_temp_free_i32(tdf
);
18734 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18736 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18737 (op & (0x7 << 18)))
18738 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18739 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18740 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18741 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18742 TCGv_i32 twd
= tcg_const_i32(wd
);
18743 TCGv_i32 tws
= tcg_const_i32(ws
);
18744 TCGv_i32 twt
= tcg_const_i32(wt
);
18745 TCGv_i32 tdf
= tcg_const_i32(df
);
18747 switch (MASK_MSA_2R(ctx
->opcode
)) {
18749 #if !defined(TARGET_MIPS64)
18750 /* Double format valid only for MIPS64 */
18751 if (df
== DF_DOUBLE
) {
18752 generate_exception_end(ctx
, EXCP_RI
);
18756 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18759 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18762 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18765 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18768 MIPS_INVAL("MSA instruction");
18769 generate_exception_end(ctx
, EXCP_RI
);
18773 tcg_temp_free_i32(twd
);
18774 tcg_temp_free_i32(tws
);
18775 tcg_temp_free_i32(twt
);
18776 tcg_temp_free_i32(tdf
);
18779 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18781 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18782 (op & (0xf << 17)))
18783 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18784 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18785 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18786 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18787 TCGv_i32 twd
= tcg_const_i32(wd
);
18788 TCGv_i32 tws
= tcg_const_i32(ws
);
18789 TCGv_i32 twt
= tcg_const_i32(wt
);
18790 /* adjust df value for floating-point instruction */
18791 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18793 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18794 case OPC_FCLASS_df
:
18795 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18797 case OPC_FTRUNC_S_df
:
18798 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18800 case OPC_FTRUNC_U_df
:
18801 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18804 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18806 case OPC_FRSQRT_df
:
18807 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18810 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18813 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18816 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18818 case OPC_FEXUPL_df
:
18819 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18821 case OPC_FEXUPR_df
:
18822 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18825 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18828 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18830 case OPC_FTINT_S_df
:
18831 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18833 case OPC_FTINT_U_df
:
18834 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18836 case OPC_FFINT_S_df
:
18837 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18839 case OPC_FFINT_U_df
:
18840 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18844 tcg_temp_free_i32(twd
);
18845 tcg_temp_free_i32(tws
);
18846 tcg_temp_free_i32(twt
);
18847 tcg_temp_free_i32(tdf
);
18850 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18852 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18853 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18854 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18855 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18856 TCGv_i32 twd
= tcg_const_i32(wd
);
18857 TCGv_i32 tws
= tcg_const_i32(ws
);
18858 TCGv_i32 twt
= tcg_const_i32(wt
);
18860 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18862 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18865 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18868 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18871 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18874 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18877 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18880 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18883 MIPS_INVAL("MSA instruction");
18884 generate_exception_end(ctx
, EXCP_RI
);
18888 tcg_temp_free_i32(twd
);
18889 tcg_temp_free_i32(tws
);
18890 tcg_temp_free_i32(twt
);
18893 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18895 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18903 gen_msa_vec_v(env
, ctx
);
18906 gen_msa_2r(env
, ctx
);
18909 gen_msa_2rf(env
, ctx
);
18912 MIPS_INVAL("MSA instruction");
18913 generate_exception_end(ctx
, EXCP_RI
);
18918 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18920 uint32_t opcode
= ctx
->opcode
;
18921 check_insn(ctx
, ASE_MSA
);
18922 check_msa_access(ctx
);
18924 switch (MASK_MSA_MINOR(opcode
)) {
18925 case OPC_MSA_I8_00
:
18926 case OPC_MSA_I8_01
:
18927 case OPC_MSA_I8_02
:
18928 gen_msa_i8(env
, ctx
);
18930 case OPC_MSA_I5_06
:
18931 case OPC_MSA_I5_07
:
18932 gen_msa_i5(env
, ctx
);
18934 case OPC_MSA_BIT_09
:
18935 case OPC_MSA_BIT_0A
:
18936 gen_msa_bit(env
, ctx
);
18938 case OPC_MSA_3R_0D
:
18939 case OPC_MSA_3R_0E
:
18940 case OPC_MSA_3R_0F
:
18941 case OPC_MSA_3R_10
:
18942 case OPC_MSA_3R_11
:
18943 case OPC_MSA_3R_12
:
18944 case OPC_MSA_3R_13
:
18945 case OPC_MSA_3R_14
:
18946 case OPC_MSA_3R_15
:
18947 gen_msa_3r(env
, ctx
);
18950 gen_msa_elm(env
, ctx
);
18952 case OPC_MSA_3RF_1A
:
18953 case OPC_MSA_3RF_1B
:
18954 case OPC_MSA_3RF_1C
:
18955 gen_msa_3rf(env
, ctx
);
18958 gen_msa_vec(env
, ctx
);
18969 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18970 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18971 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18972 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18974 TCGv_i32 twd
= tcg_const_i32(wd
);
18975 TCGv taddr
= tcg_temp_new();
18976 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18978 switch (MASK_MSA_MINOR(opcode
)) {
18980 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18983 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18986 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18989 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18992 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18995 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18998 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19001 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19005 tcg_temp_free_i32(twd
);
19006 tcg_temp_free(taddr
);
19010 MIPS_INVAL("MSA instruction");
19011 generate_exception_end(ctx
, EXCP_RI
);
19017 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19020 int rs
, rt
, rd
, sa
;
19024 /* make sure instructions are on a word boundary */
19025 if (ctx
->pc
& 0x3) {
19026 env
->CP0_BadVAddr
= ctx
->pc
;
19027 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19031 /* Handle blikely not taken case */
19032 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19033 TCGLabel
*l1
= gen_new_label();
19035 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19036 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19037 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19041 op
= MASK_OP_MAJOR(ctx
->opcode
);
19042 rs
= (ctx
->opcode
>> 21) & 0x1f;
19043 rt
= (ctx
->opcode
>> 16) & 0x1f;
19044 rd
= (ctx
->opcode
>> 11) & 0x1f;
19045 sa
= (ctx
->opcode
>> 6) & 0x1f;
19046 imm
= (int16_t)ctx
->opcode
;
19049 decode_opc_special(env
, ctx
);
19052 decode_opc_special2_legacy(env
, ctx
);
19055 decode_opc_special3(env
, ctx
);
19058 op1
= MASK_REGIMM(ctx
->opcode
);
19060 case OPC_BLTZL
: /* REGIMM branches */
19064 check_insn(ctx
, ISA_MIPS2
);
19065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19069 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19073 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19075 /* OPC_NAL, OPC_BAL */
19076 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19078 generate_exception_end(ctx
, EXCP_RI
);
19081 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19084 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19086 check_insn(ctx
, ISA_MIPS2
);
19087 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19088 gen_trap(ctx
, op1
, rs
, -1, imm
);
19091 check_insn(ctx
, ISA_MIPS32R6
);
19092 generate_exception_end(ctx
, EXCP_RI
);
19095 check_insn(ctx
, ISA_MIPS32R2
);
19096 /* Break the TB to be able to sync copied instructions
19098 ctx
->bstate
= BS_STOP
;
19100 case OPC_BPOSGE32
: /* MIPS DSP branch */
19101 #if defined(TARGET_MIPS64)
19105 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19107 #if defined(TARGET_MIPS64)
19109 check_insn(ctx
, ISA_MIPS32R6
);
19110 check_mips_64(ctx
);
19112 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19116 check_insn(ctx
, ISA_MIPS32R6
);
19117 check_mips_64(ctx
);
19119 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19123 default: /* Invalid */
19124 MIPS_INVAL("regimm");
19125 generate_exception_end(ctx
, EXCP_RI
);
19130 check_cp0_enabled(ctx
);
19131 op1
= MASK_CP0(ctx
->opcode
);
19139 #if defined(TARGET_MIPS64)
19143 #ifndef CONFIG_USER_ONLY
19144 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19145 #endif /* !CONFIG_USER_ONLY */
19147 case OPC_C0_FIRST
... OPC_C0_LAST
:
19148 #ifndef CONFIG_USER_ONLY
19149 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19150 #endif /* !CONFIG_USER_ONLY */
19153 #ifndef CONFIG_USER_ONLY
19156 TCGv t0
= tcg_temp_new();
19158 op2
= MASK_MFMC0(ctx
->opcode
);
19161 check_insn(ctx
, ASE_MT
);
19162 gen_helper_dmt(t0
);
19163 gen_store_gpr(t0
, rt
);
19166 check_insn(ctx
, ASE_MT
);
19167 gen_helper_emt(t0
);
19168 gen_store_gpr(t0
, rt
);
19171 check_insn(ctx
, ASE_MT
);
19172 gen_helper_dvpe(t0
, cpu_env
);
19173 gen_store_gpr(t0
, rt
);
19176 check_insn(ctx
, ASE_MT
);
19177 gen_helper_evpe(t0
, cpu_env
);
19178 gen_store_gpr(t0
, rt
);
19181 check_insn(ctx
, ISA_MIPS32R6
);
19183 gen_helper_dvp(t0
, cpu_env
);
19184 gen_store_gpr(t0
, rt
);
19188 check_insn(ctx
, ISA_MIPS32R6
);
19190 gen_helper_evp(t0
, cpu_env
);
19191 gen_store_gpr(t0
, rt
);
19195 check_insn(ctx
, ISA_MIPS32R2
);
19196 save_cpu_state(ctx
, 1);
19197 gen_helper_di(t0
, cpu_env
);
19198 gen_store_gpr(t0
, rt
);
19199 /* Stop translation as we may have switched
19200 the execution mode. */
19201 ctx
->bstate
= BS_STOP
;
19204 check_insn(ctx
, ISA_MIPS32R2
);
19205 save_cpu_state(ctx
, 1);
19206 gen_helper_ei(t0
, cpu_env
);
19207 gen_store_gpr(t0
, rt
);
19208 /* Stop translation as we may have switched
19209 the execution mode. */
19210 ctx
->bstate
= BS_STOP
;
19212 default: /* Invalid */
19213 MIPS_INVAL("mfmc0");
19214 generate_exception_end(ctx
, EXCP_RI
);
19219 #endif /* !CONFIG_USER_ONLY */
19222 check_insn(ctx
, ISA_MIPS32R2
);
19223 gen_load_srsgpr(rt
, rd
);
19226 check_insn(ctx
, ISA_MIPS32R2
);
19227 gen_store_srsgpr(rt
, rd
);
19231 generate_exception_end(ctx
, EXCP_RI
);
19235 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19236 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19237 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19238 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19241 /* Arithmetic with immediate opcode */
19242 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19246 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19248 case OPC_SLTI
: /* Set on less than with immediate opcode */
19250 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19252 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19253 case OPC_LUI
: /* OPC_AUI */
19256 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19258 case OPC_J
... OPC_JAL
: /* Jump */
19259 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19260 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19263 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19264 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19266 generate_exception_end(ctx
, EXCP_RI
);
19269 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19270 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19273 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19276 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19277 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19279 generate_exception_end(ctx
, EXCP_RI
);
19282 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19283 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19286 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19289 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19292 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19294 check_insn(ctx
, ISA_MIPS32R6
);
19295 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19296 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19299 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19302 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19304 check_insn(ctx
, ISA_MIPS32R6
);
19305 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19306 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19311 check_insn(ctx
, ISA_MIPS2
);
19312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19316 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19318 case OPC_LL
: /* Load and stores */
19319 check_insn(ctx
, ISA_MIPS2
);
19323 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19325 case OPC_LB
... OPC_LH
:
19326 case OPC_LW
... OPC_LHU
:
19327 gen_ld(ctx
, op
, rt
, rs
, imm
);
19331 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19333 case OPC_SB
... OPC_SH
:
19335 gen_st(ctx
, op
, rt
, rs
, imm
);
19338 check_insn(ctx
, ISA_MIPS2
);
19339 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19340 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19343 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19344 check_cp0_enabled(ctx
);
19345 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19346 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19347 gen_cache_operation(ctx
, rt
, rs
, imm
);
19349 /* Treat as NOP. */
19352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19353 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19354 /* Treat as NOP. */
19357 /* Floating point (COP1). */
19362 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19366 op1
= MASK_CP1(ctx
->opcode
);
19371 check_cp1_enabled(ctx
);
19372 check_insn(ctx
, ISA_MIPS32R2
);
19377 check_cp1_enabled(ctx
);
19378 gen_cp1(ctx
, op1
, rt
, rd
);
19380 #if defined(TARGET_MIPS64)
19383 check_cp1_enabled(ctx
);
19384 check_insn(ctx
, ISA_MIPS3
);
19385 check_mips_64(ctx
);
19386 gen_cp1(ctx
, op1
, rt
, rd
);
19389 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19390 check_cp1_enabled(ctx
);
19391 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19393 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19398 check_insn(ctx
, ASE_MIPS3D
);
19399 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19400 (rt
>> 2) & 0x7, imm
<< 2);
19404 check_cp1_enabled(ctx
);
19405 check_insn(ctx
, ISA_MIPS32R6
);
19406 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19410 check_cp1_enabled(ctx
);
19411 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19413 check_insn(ctx
, ASE_MIPS3D
);
19416 check_cp1_enabled(ctx
);
19417 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19418 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19419 (rt
>> 2) & 0x7, imm
<< 2);
19426 check_cp1_enabled(ctx
);
19427 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19433 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19434 check_cp1_enabled(ctx
);
19435 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19437 case R6_OPC_CMP_AF_S
:
19438 case R6_OPC_CMP_UN_S
:
19439 case R6_OPC_CMP_EQ_S
:
19440 case R6_OPC_CMP_UEQ_S
:
19441 case R6_OPC_CMP_LT_S
:
19442 case R6_OPC_CMP_ULT_S
:
19443 case R6_OPC_CMP_LE_S
:
19444 case R6_OPC_CMP_ULE_S
:
19445 case R6_OPC_CMP_SAF_S
:
19446 case R6_OPC_CMP_SUN_S
:
19447 case R6_OPC_CMP_SEQ_S
:
19448 case R6_OPC_CMP_SEUQ_S
:
19449 case R6_OPC_CMP_SLT_S
:
19450 case R6_OPC_CMP_SULT_S
:
19451 case R6_OPC_CMP_SLE_S
:
19452 case R6_OPC_CMP_SULE_S
:
19453 case R6_OPC_CMP_OR_S
:
19454 case R6_OPC_CMP_UNE_S
:
19455 case R6_OPC_CMP_NE_S
:
19456 case R6_OPC_CMP_SOR_S
:
19457 case R6_OPC_CMP_SUNE_S
:
19458 case R6_OPC_CMP_SNE_S
:
19459 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19461 case R6_OPC_CMP_AF_D
:
19462 case R6_OPC_CMP_UN_D
:
19463 case R6_OPC_CMP_EQ_D
:
19464 case R6_OPC_CMP_UEQ_D
:
19465 case R6_OPC_CMP_LT_D
:
19466 case R6_OPC_CMP_ULT_D
:
19467 case R6_OPC_CMP_LE_D
:
19468 case R6_OPC_CMP_ULE_D
:
19469 case R6_OPC_CMP_SAF_D
:
19470 case R6_OPC_CMP_SUN_D
:
19471 case R6_OPC_CMP_SEQ_D
:
19472 case R6_OPC_CMP_SEUQ_D
:
19473 case R6_OPC_CMP_SLT_D
:
19474 case R6_OPC_CMP_SULT_D
:
19475 case R6_OPC_CMP_SLE_D
:
19476 case R6_OPC_CMP_SULE_D
:
19477 case R6_OPC_CMP_OR_D
:
19478 case R6_OPC_CMP_UNE_D
:
19479 case R6_OPC_CMP_NE_D
:
19480 case R6_OPC_CMP_SOR_D
:
19481 case R6_OPC_CMP_SUNE_D
:
19482 case R6_OPC_CMP_SNE_D
:
19483 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19486 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19487 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19492 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19507 check_insn(ctx
, ASE_MSA
);
19508 gen_msa_branch(env
, ctx
, op1
);
19512 generate_exception_end(ctx
, EXCP_RI
);
19517 /* Compact branches [R6] and COP2 [non-R6] */
19518 case OPC_BC
: /* OPC_LWC2 */
19519 case OPC_BALC
: /* OPC_SWC2 */
19520 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19521 /* OPC_BC, OPC_BALC */
19522 gen_compute_compact_branch(ctx
, op
, 0, 0,
19523 sextract32(ctx
->opcode
<< 2, 0, 28));
19525 /* OPC_LWC2, OPC_SWC2 */
19526 /* COP2: Not implemented. */
19527 generate_exception_err(ctx
, EXCP_CpU
, 2);
19530 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19531 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19532 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19534 /* OPC_BEQZC, OPC_BNEZC */
19535 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19536 sextract32(ctx
->opcode
<< 2, 0, 23));
19538 /* OPC_JIC, OPC_JIALC */
19539 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19542 /* OPC_LWC2, OPC_SWC2 */
19543 /* COP2: Not implemented. */
19544 generate_exception_err(ctx
, EXCP_CpU
, 2);
19548 check_insn(ctx
, INSN_LOONGSON2F
);
19549 /* Note that these instructions use different fields. */
19550 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19554 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19555 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19556 check_cp1_enabled(ctx
);
19557 op1
= MASK_CP3(ctx
->opcode
);
19561 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19567 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19568 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19571 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19572 /* Treat as NOP. */
19575 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19589 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19590 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19594 generate_exception_end(ctx
, EXCP_RI
);
19598 generate_exception_err(ctx
, EXCP_CpU
, 1);
19602 #if defined(TARGET_MIPS64)
19603 /* MIPS64 opcodes */
19604 case OPC_LDL
... OPC_LDR
:
19606 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19610 check_insn(ctx
, ISA_MIPS3
);
19611 check_mips_64(ctx
);
19612 gen_ld(ctx
, op
, rt
, rs
, imm
);
19614 case OPC_SDL
... OPC_SDR
:
19615 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19618 check_insn(ctx
, ISA_MIPS3
);
19619 check_mips_64(ctx
);
19620 gen_st(ctx
, op
, rt
, rs
, imm
);
19623 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19624 check_insn(ctx
, ISA_MIPS3
);
19625 check_mips_64(ctx
);
19626 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19628 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19629 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19630 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19631 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19634 check_insn(ctx
, ISA_MIPS3
);
19635 check_mips_64(ctx
);
19636 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19640 check_insn(ctx
, ISA_MIPS3
);
19641 check_mips_64(ctx
);
19642 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19645 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19646 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19647 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19649 MIPS_INVAL("major opcode");
19650 generate_exception_end(ctx
, EXCP_RI
);
19654 case OPC_DAUI
: /* OPC_JALX */
19655 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19656 #if defined(TARGET_MIPS64)
19658 check_mips_64(ctx
);
19660 generate_exception(ctx
, EXCP_RI
);
19661 } else if (rt
!= 0) {
19662 TCGv t0
= tcg_temp_new();
19663 gen_load_gpr(t0
, rs
);
19664 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19668 generate_exception_end(ctx
, EXCP_RI
);
19669 MIPS_INVAL("major opcode");
19673 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19674 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19675 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19678 case OPC_MSA
: /* OPC_MDMX */
19679 /* MDMX: Not implemented. */
19683 check_insn(ctx
, ISA_MIPS32R6
);
19684 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19686 default: /* Invalid */
19687 MIPS_INVAL("major opcode");
19688 generate_exception_end(ctx
, EXCP_RI
);
19693 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19695 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19696 CPUState
*cs
= CPU(cpu
);
19698 target_ulong pc_start
;
19699 target_ulong next_page_start
;
19706 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19709 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19710 ctx
.insn_flags
= env
->insn_flags
;
19711 ctx
.CP0_Config1
= env
->CP0_Config1
;
19713 ctx
.bstate
= BS_NONE
;
19715 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19716 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19717 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19718 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19719 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19720 ctx
.PAMask
= env
->PAMask
;
19721 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19722 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19723 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19724 /* Restore delay slot state from the tb context. */
19725 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19726 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19727 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19728 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19729 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19730 restore_cpu_state(env
, &ctx
);
19731 #ifdef CONFIG_USER_ONLY
19732 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19734 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19736 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19737 MO_UNALN
: MO_ALIGN
;
19739 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19740 if (max_insns
== 0) {
19741 max_insns
= CF_COUNT_MASK
;
19743 if (max_insns
> TCG_MAX_INSNS
) {
19744 max_insns
= TCG_MAX_INSNS
;
19747 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19749 while (ctx
.bstate
== BS_NONE
) {
19750 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19753 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19754 save_cpu_state(&ctx
, 1);
19755 ctx
.bstate
= BS_BRANCH
;
19756 gen_helper_raise_exception_debug(cpu_env
);
19757 /* The address covered by the breakpoint must be included in
19758 [tb->pc, tb->pc + tb->size) in order to for it to be
19759 properly cleared -- thus we increment the PC here so that
19760 the logic setting tb->size below does the right thing. */
19762 goto done_generating
;
19765 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19769 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19770 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19771 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19773 decode_opc(env
, &ctx
);
19774 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19775 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19776 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19777 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19778 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19779 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19781 generate_exception_end(&ctx
, EXCP_RI
);
19785 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19786 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19787 MIPS_HFLAG_FBNSLOT
))) {
19788 /* force to generate branch as there is neither delay nor
19792 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19793 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19794 /* Force to generate branch as microMIPS R6 doesn't restrict
19795 branches in the forbidden slot. */
19800 gen_branch(&ctx
, insn_bytes
);
19802 ctx
.pc
+= insn_bytes
;
19804 /* Execute a branch and its delay slot as a single instruction.
19805 This is what GDB expects and is consistent with what the
19806 hardware does (e.g. if a delay slot instruction faults, the
19807 reported PC is the PC of the branch). */
19808 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19812 if (ctx
.pc
>= next_page_start
) {
19816 if (tcg_op_buf_full()) {
19820 if (num_insns
>= max_insns
)
19826 if (tb
->cflags
& CF_LAST_IO
) {
19829 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19830 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19831 gen_helper_raise_exception_debug(cpu_env
);
19833 switch (ctx
.bstate
) {
19835 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19838 save_cpu_state(&ctx
, 0);
19839 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19842 tcg_gen_exit_tb(0);
19850 gen_tb_end(tb
, num_insns
);
19852 tb
->size
= ctx
.pc
- pc_start
;
19853 tb
->icount
= num_insns
;
19857 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19858 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19859 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19865 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19869 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19871 #define printfpr(fp) \
19874 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19875 " fd:%13g fs:%13g psu: %13g\n", \
19876 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19877 (double)(fp)->fd, \
19878 (double)(fp)->fs[FP_ENDIAN_IDX], \
19879 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19882 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19883 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19884 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19885 " fd:%13g fs:%13g psu:%13g\n", \
19886 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19888 (double)tmp.fs[FP_ENDIAN_IDX], \
19889 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19894 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19895 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19896 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19897 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19898 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19899 printfpr(&env
->active_fpu
.fpr
[i
]);
19905 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19908 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19909 CPUMIPSState
*env
= &cpu
->env
;
19912 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19913 " LO=0x" TARGET_FMT_lx
" ds %04x "
19914 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19915 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19916 env
->hflags
, env
->btarget
, env
->bcond
);
19917 for (i
= 0; i
< 32; i
++) {
19919 cpu_fprintf(f
, "GPR%02d:", i
);
19920 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19922 cpu_fprintf(f
, "\n");
19925 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19926 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19927 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19929 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19930 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19931 env
->CP0_Config2
, env
->CP0_Config3
);
19932 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19933 env
->CP0_Config4
, env
->CP0_Config5
);
19934 if (env
->hflags
& MIPS_HFLAG_FPU
)
19935 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19938 void mips_tcg_init(void)
19943 /* Initialize various static tables. */
19947 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19949 TCGV_UNUSED(cpu_gpr
[0]);
19950 for (i
= 1; i
< 32; i
++)
19951 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
19952 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19955 for (i
= 0; i
< 32; i
++) {
19956 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19958 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
19959 /* The scalar floating-point unit (FPU) registers are mapped on
19960 * the MSA vector registers. */
19961 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19962 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19963 msa_wr_d
[i
* 2 + 1] =
19964 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
19967 cpu_PC
= tcg_global_mem_new(cpu_env
,
19968 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19969 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19970 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
19971 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19973 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
19974 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19977 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
19978 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19980 bcond
= tcg_global_mem_new(cpu_env
,
19981 offsetof(CPUMIPSState
, bcond
), "bcond");
19982 btarget
= tcg_global_mem_new(cpu_env
,
19983 offsetof(CPUMIPSState
, btarget
), "btarget");
19984 hflags
= tcg_global_mem_new_i32(cpu_env
,
19985 offsetof(CPUMIPSState
, hflags
), "hflags");
19987 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
19988 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19990 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
19991 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19997 #include "translate_init.c"
19999 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20003 const mips_def_t
*def
;
20005 def
= cpu_mips_find_by_name(cpu_model
);
20008 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20010 env
->cpu_model
= def
;
20012 #ifndef CONFIG_USER_ONLY
20013 mmu_init(env
, def
);
20015 fpu_init(env
, def
);
20016 mvp_init(env
, def
);
20018 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20023 bool cpu_supports_cps_smp(const char *cpu_model
)
20025 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20030 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20033 void cpu_state_reset(CPUMIPSState
*env
)
20035 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20036 CPUState
*cs
= CPU(cpu
);
20038 /* Reset registers to their default values */
20039 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20040 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20041 #ifdef TARGET_WORDS_BIGENDIAN
20042 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20044 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20045 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20046 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20047 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20048 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20049 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20050 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20051 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20052 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20053 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20054 << env
->cpu_model
->CP0_LLAddr_shift
;
20055 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20056 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20057 env
->CCRes
= env
->cpu_model
->CCRes
;
20058 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20059 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20060 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20061 env
->current_tc
= 0;
20062 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20063 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20064 #if defined(TARGET_MIPS64)
20065 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20066 env
->SEGMask
|= 3ULL << 62;
20069 env
->PABITS
= env
->cpu_model
->PABITS
;
20070 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20071 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20072 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20073 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20074 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20075 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20076 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20077 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20078 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20079 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20080 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20081 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20082 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20083 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20084 env
->msair
= env
->cpu_model
->MSAIR
;
20085 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20087 #if defined(CONFIG_USER_ONLY)
20088 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20089 # ifdef TARGET_MIPS64
20090 /* Enable 64-bit register mode. */
20091 env
->CP0_Status
|= (1 << CP0St_PX
);
20093 # ifdef TARGET_ABI_MIPSN64
20094 /* Enable 64-bit address mode. */
20095 env
->CP0_Status
|= (1 << CP0St_UX
);
20097 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20098 hardware registers. */
20099 env
->CP0_HWREna
|= 0x0000000F;
20100 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20101 env
->CP0_Status
|= (1 << CP0St_CU1
);
20103 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20104 env
->CP0_Status
|= (1 << CP0St_MX
);
20106 # if defined(TARGET_MIPS64)
20107 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20108 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20109 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20110 env
->CP0_Status
|= (1 << CP0St_FR
);
20114 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20115 /* If the exception was raised from a delay slot,
20116 come back to the jump. */
20117 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20118 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20120 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20122 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20123 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20124 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20125 env
->CP0_Wired
= 0;
20126 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20127 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20128 if (kvm_enabled()) {
20129 env
->CP0_EBase
|= 0x40000000;
20131 env
->CP0_EBase
|= 0x80000000;
20133 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20134 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20136 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20137 /* vectored interrupts not implemented, timer on int 7,
20138 no performance counters. */
20139 env
->CP0_IntCtl
= 0xe0000000;
20143 for (i
= 0; i
< 7; i
++) {
20144 env
->CP0_WatchLo
[i
] = 0;
20145 env
->CP0_WatchHi
[i
] = 0x80000000;
20147 env
->CP0_WatchLo
[7] = 0;
20148 env
->CP0_WatchHi
[7] = 0;
20150 /* Count register increments in debug mode, EJTAG version 1 */
20151 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20153 cpu_mips_store_count(env
, 1);
20155 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20158 /* Only TC0 on VPE 0 starts as active. */
20159 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20160 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20161 env
->tcs
[i
].CP0_TCHalt
= 1;
20163 env
->active_tc
.CP0_TCHalt
= 1;
20166 if (cs
->cpu_index
== 0) {
20167 /* VPE0 starts up enabled. */
20168 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20169 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20171 /* TC0 starts up unhalted. */
20173 env
->active_tc
.CP0_TCHalt
= 0;
20174 env
->tcs
[0].CP0_TCHalt
= 0;
20175 /* With thread 0 active. */
20176 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20177 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20181 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20182 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20183 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20184 env
->CP0_Status
|= (1 << CP0St_FR
);
20188 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20192 compute_hflags(env
);
20193 restore_rounding_mode(env
);
20194 restore_flush_mode(env
);
20195 restore_pamask(env
);
20196 cs
->exception_index
= EXCP_NONE
;
20198 if (semihosting_get_argc()) {
20199 /* UHI interface can be used to obtain argc and argv */
20200 env
->active_tc
.gpr
[4] = -1;
20204 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20205 target_ulong
*data
)
20207 env
->active_tc
.PC
= data
[0];
20208 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20209 env
->hflags
|= data
[1];
20210 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20211 case MIPS_HFLAG_BR
:
20213 case MIPS_HFLAG_BC
:
20214 case MIPS_HFLAG_BL
:
20216 env
->btarget
= data
[2];