2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
898 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
901 /* Coprocessor 0 (with rs == C0) */
902 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
905 OPC_TLBR
= 0x01 | OPC_C0
,
906 OPC_TLBWI
= 0x02 | OPC_C0
,
907 OPC_TLBINV
= 0x03 | OPC_C0
,
908 OPC_TLBINVF
= 0x04 | OPC_C0
,
909 OPC_TLBWR
= 0x06 | OPC_C0
,
910 OPC_TLBP
= 0x08 | OPC_C0
,
911 OPC_RFE
= 0x10 | OPC_C0
,
912 OPC_ERET
= 0x18 | OPC_C0
,
913 OPC_DERET
= 0x1F | OPC_C0
,
914 OPC_WAIT
= 0x20 | OPC_C0
,
917 /* Coprocessor 1 (rs field) */
918 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
920 /* Values for the fmt field in FP instructions */
922 /* 0 - 15 are reserved */
923 FMT_S
= 16, /* single fp */
924 FMT_D
= 17, /* double fp */
925 FMT_E
= 18, /* extended fp */
926 FMT_Q
= 19, /* quad fp */
927 FMT_W
= 20, /* 32-bit fixed */
928 FMT_L
= 21, /* 64-bit fixed */
929 FMT_PS
= 22, /* paired single fp */
930 /* 23 - 31 are reserved */
934 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
935 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
936 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
937 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
938 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
939 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
940 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
941 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
942 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
943 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
944 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
945 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
946 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
947 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
948 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
949 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
950 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
951 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
952 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
953 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
954 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
955 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
956 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
957 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
958 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
959 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
960 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
961 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
962 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
963 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
966 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
967 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
970 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
971 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
972 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
973 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
977 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
978 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
982 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
983 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
986 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1127 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1128 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1129 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1130 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1131 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1132 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1133 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1134 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1135 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1136 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1137 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1138 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1139 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1140 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1141 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1142 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1143 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1144 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1145 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1146 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1147 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1149 /* MI10 instruction */
1150 OPC_LD_B
= (0x20) | OPC_MSA
,
1151 OPC_LD_H
= (0x21) | OPC_MSA
,
1152 OPC_LD_W
= (0x22) | OPC_MSA
,
1153 OPC_LD_D
= (0x23) | OPC_MSA
,
1154 OPC_ST_B
= (0x24) | OPC_MSA
,
1155 OPC_ST_H
= (0x25) | OPC_MSA
,
1156 OPC_ST_W
= (0x26) | OPC_MSA
,
1157 OPC_ST_D
= (0x27) | OPC_MSA
,
1161 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1162 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1163 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1164 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1165 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1167 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1173 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1175 /* I8 instruction */
1176 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1179 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1180 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1182 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1183 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1184 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1185 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1187 /* VEC/2R/2RF instruction */
1188 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1189 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1190 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1191 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1192 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1193 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1194 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1196 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1199 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1200 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1201 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1202 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1203 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1205 /* 2RF instruction df(bit 16) = _w, _d */
1206 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1207 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1209 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1210 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1211 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1212 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1213 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1214 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1216 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1217 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1218 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1220 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1223 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1224 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1225 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1226 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1227 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1228 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1229 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1230 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1231 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1232 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1233 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1234 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1235 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1236 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1237 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1238 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1239 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1240 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1241 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1242 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1243 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1244 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1245 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1246 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1247 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1248 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1249 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1250 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1251 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1252 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1253 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1254 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1255 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1256 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1257 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1258 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1259 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1260 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1261 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1262 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1263 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1264 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1265 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1266 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1267 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1268 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1269 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1270 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1271 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1272 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1273 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1274 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1275 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1276 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1277 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1278 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1279 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1280 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1281 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1282 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1283 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1284 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1285 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1286 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1288 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1289 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 /* 3RF instruction _df(bit 21) = _w, _d */
1300 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1342 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1343 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1357 /* global register indices */
1358 static TCGv_env cpu_env
;
1359 static TCGv cpu_gpr
[32], cpu_PC
;
1360 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1361 static TCGv cpu_dspctrl
, btarget
, bcond
;
1362 static TCGv_i32 hflags
;
1363 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1364 static TCGv_i64 fpu_f64
[32];
1365 static TCGv_i64 msa_wr_d
[64];
1367 #include "exec/gen-icount.h"
1369 #define gen_helper_0e0i(name, arg) do { \
1370 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1371 gen_helper_##name(cpu_env, helper_tmp); \
1372 tcg_temp_free_i32(helper_tmp); \
1375 #define gen_helper_0e1i(name, arg1, arg2) do { \
1376 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1377 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1378 tcg_temp_free_i32(helper_tmp); \
1381 #define gen_helper_1e0i(name, ret, arg1) do { \
1382 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1383 gen_helper_##name(ret, cpu_env, helper_tmp); \
1384 tcg_temp_free_i32(helper_tmp); \
1387 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1388 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1389 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1390 tcg_temp_free_i32(helper_tmp); \
1393 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1394 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1395 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1396 tcg_temp_free_i32(helper_tmp); \
1399 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1400 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1401 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1402 tcg_temp_free_i32(helper_tmp); \
1405 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1406 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1407 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1408 tcg_temp_free_i32(helper_tmp); \
1411 typedef struct DisasContext
{
1412 struct TranslationBlock
*tb
;
1413 target_ulong pc
, saved_pc
;
1415 int singlestep_enabled
;
1417 int32_t CP0_Config1
;
1418 /* Routine used to access memory */
1420 TCGMemOp default_tcg_memop_mask
;
1421 uint32_t hflags
, saved_hflags
;
1423 target_ulong btarget
;
1432 int CP0_LLAddr_shift
;
1440 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1441 * exception condition */
1442 BS_STOP
= 1, /* We want to stop translation for any reason */
1443 BS_BRANCH
= 2, /* We reached a branch condition */
1444 BS_EXCP
= 3, /* We reached an exception condition */
1447 static const char * const regnames
[] = {
1448 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1449 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1450 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1451 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1454 static const char * const regnames_HI
[] = {
1455 "HI0", "HI1", "HI2", "HI3",
1458 static const char * const regnames_LO
[] = {
1459 "LO0", "LO1", "LO2", "LO3",
1462 static const char * const fregnames
[] = {
1463 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1464 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1465 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1466 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1469 static const char * const msaregnames
[] = {
1470 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1471 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1472 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1473 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1474 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1475 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1476 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1477 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1478 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1479 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1480 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1481 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1482 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1483 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1484 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1485 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1488 #define LOG_DISAS(...) \
1490 if (MIPS_DEBUG_DISAS) { \
1491 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1495 #define MIPS_INVAL(op) \
1497 if (MIPS_DEBUG_DISAS) { \
1498 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1499 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1500 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1501 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1505 /* General purpose registers moves. */
1506 static inline void gen_load_gpr (TCGv t
, int reg
)
1509 tcg_gen_movi_tl(t
, 0);
1511 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1514 static inline void gen_store_gpr (TCGv t
, int reg
)
1517 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1520 /* Moves to/from shadow registers. */
1521 static inline void gen_load_srsgpr (int from
, int to
)
1523 TCGv t0
= tcg_temp_new();
1526 tcg_gen_movi_tl(t0
, 0);
1528 TCGv_i32 t2
= tcg_temp_new_i32();
1529 TCGv_ptr addr
= tcg_temp_new_ptr();
1531 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1532 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1533 tcg_gen_andi_i32(t2
, t2
, 0xf);
1534 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1535 tcg_gen_ext_i32_ptr(addr
, t2
);
1536 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1538 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1539 tcg_temp_free_ptr(addr
);
1540 tcg_temp_free_i32(t2
);
1542 gen_store_gpr(t0
, to
);
1546 static inline void gen_store_srsgpr (int from
, int to
)
1549 TCGv t0
= tcg_temp_new();
1550 TCGv_i32 t2
= tcg_temp_new_i32();
1551 TCGv_ptr addr
= tcg_temp_new_ptr();
1553 gen_load_gpr(t0
, from
);
1554 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1555 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1556 tcg_gen_andi_i32(t2
, t2
, 0xf);
1557 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1558 tcg_gen_ext_i32_ptr(addr
, t2
);
1559 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1561 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1562 tcg_temp_free_ptr(addr
);
1563 tcg_temp_free_i32(t2
);
1569 static inline void gen_save_pc(target_ulong pc
)
1571 tcg_gen_movi_tl(cpu_PC
, pc
);
1574 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1576 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1577 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1578 gen_save_pc(ctx
->pc
);
1579 ctx
->saved_pc
= ctx
->pc
;
1581 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1582 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1583 ctx
->saved_hflags
= ctx
->hflags
;
1584 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1590 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1596 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1598 ctx
->saved_hflags
= ctx
->hflags
;
1599 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1605 ctx
->btarget
= env
->btarget
;
1610 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1612 TCGv_i32 texcp
= tcg_const_i32(excp
);
1613 TCGv_i32 terr
= tcg_const_i32(err
);
1614 save_cpu_state(ctx
, 1);
1615 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1616 tcg_temp_free_i32(terr
);
1617 tcg_temp_free_i32(texcp
);
1618 ctx
->bstate
= BS_EXCP
;
1621 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1623 gen_helper_0e0i(raise_exception
, excp
);
1626 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1628 generate_exception_err(ctx
, excp
, 0);
1631 /* Floating point register moves. */
1632 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1634 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1635 generate_exception(ctx
, EXCP_RI
);
1637 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1640 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1643 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1644 generate_exception(ctx
, EXCP_RI
);
1646 t64
= tcg_temp_new_i64();
1647 tcg_gen_extu_i32_i64(t64
, t
);
1648 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1649 tcg_temp_free_i64(t64
);
1652 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1654 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1655 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1657 gen_load_fpr32(ctx
, t
, reg
| 1);
1661 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1663 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1664 TCGv_i64 t64
= tcg_temp_new_i64();
1665 tcg_gen_extu_i32_i64(t64
, t
);
1666 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1667 tcg_temp_free_i64(t64
);
1669 gen_store_fpr32(ctx
, t
, reg
| 1);
1673 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1675 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1676 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1678 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1682 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1684 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1685 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1688 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1689 t0
= tcg_temp_new_i64();
1690 tcg_gen_shri_i64(t0
, t
, 32);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1692 tcg_temp_free_i64(t0
);
1696 static inline int get_fp_bit (int cc
)
1704 /* Addresses computation */
1705 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1707 tcg_gen_add_tl(ret
, arg0
, arg1
);
1709 #if defined(TARGET_MIPS64)
1710 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1711 tcg_gen_ext32s_i64(ret
, ret
);
1716 /* Addresses computation (translation time) */
1717 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1720 target_long sum
= base
+ offset
;
1722 #if defined(TARGET_MIPS64)
1723 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1730 /* Sign-extract the low 32-bits to a target_long. */
1731 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1733 #if defined(TARGET_MIPS64)
1734 tcg_gen_ext32s_i64(ret
, arg
);
1736 tcg_gen_extrl_i64_i32(ret
, arg
);
1740 /* Sign-extract the high 32-bits to a target_long. */
1741 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1743 #if defined(TARGET_MIPS64)
1744 tcg_gen_sari_i64(ret
, arg
, 32);
1746 tcg_gen_extrh_i64_i32(ret
, arg
);
1750 static inline void check_cp0_enabled(DisasContext
*ctx
)
1752 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1753 generate_exception_err(ctx
, EXCP_CpU
, 0);
1756 static inline void check_cp1_enabled(DisasContext
*ctx
)
1758 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1759 generate_exception_err(ctx
, EXCP_CpU
, 1);
1762 /* Verify that the processor is running with COP1X instructions enabled.
1763 This is associated with the nabla symbol in the MIPS32 and MIPS64
1766 static inline void check_cop1x(DisasContext
*ctx
)
1768 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1769 generate_exception_end(ctx
, EXCP_RI
);
1772 /* Verify that the processor is running with 64-bit floating-point
1773 operations enabled. */
1775 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1777 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1778 generate_exception_end(ctx
, EXCP_RI
);
1782 * Verify if floating point register is valid; an operation is not defined
1783 * if bit 0 of any register specification is set and the FR bit in the
1784 * Status register equals zero, since the register numbers specify an
1785 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1786 * in the Status register equals one, both even and odd register numbers
1787 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1789 * Multiple 64 bit wide registers can be checked by calling
1790 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1792 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1794 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1795 generate_exception_end(ctx
, EXCP_RI
);
1798 /* Verify that the processor is running with DSP instructions enabled.
1799 This is enabled by CP0 Status register MX(24) bit.
1802 static inline void check_dsp(DisasContext
*ctx
)
1804 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1805 if (ctx
->insn_flags
& ASE_DSP
) {
1806 generate_exception_end(ctx
, EXCP_DSPDIS
);
1808 generate_exception_end(ctx
, EXCP_RI
);
1813 static inline void check_dspr2(DisasContext
*ctx
)
1815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1816 if (ctx
->insn_flags
& ASE_DSP
) {
1817 generate_exception_end(ctx
, EXCP_DSPDIS
);
1819 generate_exception_end(ctx
, EXCP_RI
);
1824 /* This code generates a "reserved instruction" exception if the
1825 CPU does not support the instruction set corresponding to flags. */
1826 static inline void check_insn(DisasContext
*ctx
, int flags
)
1828 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1829 generate_exception_end(ctx
, EXCP_RI
);
1833 /* This code generates a "reserved instruction" exception if the
1834 CPU has corresponding flag set which indicates that the instruction
1835 has been removed. */
1836 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1838 if (unlikely(ctx
->insn_flags
& flags
)) {
1839 generate_exception_end(ctx
, EXCP_RI
);
1843 /* This code generates a "reserved instruction" exception if the
1844 CPU does not support 64-bit paired-single (PS) floating point data type */
1845 static inline void check_ps(DisasContext
*ctx
)
1847 if (unlikely(!ctx
->ps
)) {
1848 generate_exception(ctx
, EXCP_RI
);
1850 check_cp1_64bitmode(ctx
);
1853 #ifdef TARGET_MIPS64
1854 /* This code generates a "reserved instruction" exception if 64-bit
1855 instructions are not enabled. */
1856 static inline void check_mips_64(DisasContext
*ctx
)
1858 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1859 generate_exception_end(ctx
, EXCP_RI
);
1863 #ifndef CONFIG_USER_ONLY
1864 static inline void check_mvh(DisasContext
*ctx
)
1866 if (unlikely(!ctx
->mvh
)) {
1867 generate_exception(ctx
, EXCP_RI
);
1872 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1873 calling interface for 32 and 64-bit FPRs. No sense in changing
1874 all callers for gen_load_fpr32 when we need the CTX parameter for
1876 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1877 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1878 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1879 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1880 int ft, int fs, int cc) \
1882 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1883 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1892 check_cp1_registers(ctx, fs | ft); \
1900 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1901 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1903 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1904 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1905 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1906 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1907 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1908 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1909 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1910 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1911 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1912 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1913 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1914 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1915 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1916 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1917 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1918 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1921 tcg_temp_free_i##bits (fp0); \
1922 tcg_temp_free_i##bits (fp1); \
1925 FOP_CONDS(, 0, d
, FMT_D
, 64)
1926 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1927 FOP_CONDS(, 0, s
, FMT_S
, 32)
1928 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1929 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1930 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1933 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1934 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1935 int ft, int fs, int fd) \
1937 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1938 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1939 if (ifmt == FMT_D) { \
1940 check_cp1_registers(ctx, fs | ft | fd); \
1942 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1943 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1946 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1949 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2015 tcg_temp_free_i ## bits (fp0); \
2016 tcg_temp_free_i ## bits (fp1); \
2019 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2020 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2022 #undef gen_ldcmp_fpr32
2023 #undef gen_ldcmp_fpr64
2025 /* load/store instructions. */
2026 #ifdef CONFIG_USER_ONLY
2027 #define OP_LD_ATOMIC(insn,fname) \
2028 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2030 TCGv t0 = tcg_temp_new(); \
2031 tcg_gen_mov_tl(t0, arg1); \
2032 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2033 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2034 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2035 tcg_temp_free(t0); \
2038 #define OP_LD_ATOMIC(insn,fname) \
2039 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2041 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2044 OP_LD_ATOMIC(ll
,ld32s
);
2045 #if defined(TARGET_MIPS64)
2046 OP_LD_ATOMIC(lld
,ld64
);
2050 #ifdef CONFIG_USER_ONLY
2051 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2052 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2054 TCGv t0 = tcg_temp_new(); \
2055 TCGLabel *l1 = gen_new_label(); \
2056 TCGLabel *l2 = gen_new_label(); \
2058 tcg_gen_andi_tl(t0, arg2, almask); \
2059 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2060 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2061 generate_exception(ctx, EXCP_AdES); \
2062 gen_set_label(l1); \
2063 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2064 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2065 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2066 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2067 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2068 generate_exception_end(ctx, EXCP_SC); \
2069 gen_set_label(l2); \
2070 tcg_gen_movi_tl(t0, 0); \
2071 gen_store_gpr(t0, rt); \
2072 tcg_temp_free(t0); \
2075 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2076 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2078 TCGv t0 = tcg_temp_new(); \
2079 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2080 gen_store_gpr(t0, rt); \
2081 tcg_temp_free(t0); \
2084 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2085 #if defined(TARGET_MIPS64)
2086 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2090 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2091 int base
, int16_t offset
)
2094 tcg_gen_movi_tl(addr
, offset
);
2095 } else if (offset
== 0) {
2096 gen_load_gpr(addr
, base
);
2098 tcg_gen_movi_tl(addr
, offset
);
2099 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2103 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2105 target_ulong pc
= ctx
->pc
;
2107 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2108 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2113 pc
&= ~(target_ulong
)3;
2118 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2119 int rt
, int base
, int16_t offset
)
2123 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2124 /* Loongson CPU uses a load to zero register for prefetch.
2125 We emulate it as a NOP. On other CPU we must perform the
2126 actual memory access. */
2130 t0
= tcg_temp_new();
2131 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2134 #if defined(TARGET_MIPS64)
2136 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2137 ctx
->default_tcg_memop_mask
);
2138 gen_store_gpr(t0
, rt
);
2141 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2142 ctx
->default_tcg_memop_mask
);
2143 gen_store_gpr(t0
, rt
);
2147 op_ld_lld(t0
, t0
, ctx
);
2148 gen_store_gpr(t0
, rt
);
2151 t1
= tcg_temp_new();
2152 /* Do a byte access to possibly trigger a page
2153 fault with the unaligned address. */
2154 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2155 tcg_gen_andi_tl(t1
, t0
, 7);
2156 #ifndef TARGET_WORDS_BIGENDIAN
2157 tcg_gen_xori_tl(t1
, t1
, 7);
2159 tcg_gen_shli_tl(t1
, t1
, 3);
2160 tcg_gen_andi_tl(t0
, t0
, ~7);
2161 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2162 tcg_gen_shl_tl(t0
, t0
, t1
);
2163 t2
= tcg_const_tl(-1);
2164 tcg_gen_shl_tl(t2
, t2
, t1
);
2165 gen_load_gpr(t1
, rt
);
2166 tcg_gen_andc_tl(t1
, t1
, t2
);
2168 tcg_gen_or_tl(t0
, t0
, t1
);
2170 gen_store_gpr(t0
, rt
);
2173 t1
= tcg_temp_new();
2174 /* Do a byte access to possibly trigger a page
2175 fault with the unaligned address. */
2176 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2177 tcg_gen_andi_tl(t1
, t0
, 7);
2178 #ifdef TARGET_WORDS_BIGENDIAN
2179 tcg_gen_xori_tl(t1
, t1
, 7);
2181 tcg_gen_shli_tl(t1
, t1
, 3);
2182 tcg_gen_andi_tl(t0
, t0
, ~7);
2183 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2184 tcg_gen_shr_tl(t0
, t0
, t1
);
2185 tcg_gen_xori_tl(t1
, t1
, 63);
2186 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2187 tcg_gen_shl_tl(t2
, t2
, t1
);
2188 gen_load_gpr(t1
, rt
);
2189 tcg_gen_and_tl(t1
, t1
, t2
);
2191 tcg_gen_or_tl(t0
, t0
, t1
);
2193 gen_store_gpr(t0
, rt
);
2196 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2197 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2200 gen_store_gpr(t0
, rt
);
2204 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2205 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2207 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2208 gen_store_gpr(t0
, rt
);
2211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2212 ctx
->default_tcg_memop_mask
);
2213 gen_store_gpr(t0
, rt
);
2216 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2217 ctx
->default_tcg_memop_mask
);
2218 gen_store_gpr(t0
, rt
);
2221 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2222 ctx
->default_tcg_memop_mask
);
2223 gen_store_gpr(t0
, rt
);
2226 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2227 gen_store_gpr(t0
, rt
);
2230 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2231 gen_store_gpr(t0
, rt
);
2234 t1
= tcg_temp_new();
2235 /* Do a byte access to possibly trigger a page
2236 fault with the unaligned address. */
2237 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2238 tcg_gen_andi_tl(t1
, t0
, 3);
2239 #ifndef TARGET_WORDS_BIGENDIAN
2240 tcg_gen_xori_tl(t1
, t1
, 3);
2242 tcg_gen_shli_tl(t1
, t1
, 3);
2243 tcg_gen_andi_tl(t0
, t0
, ~3);
2244 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2245 tcg_gen_shl_tl(t0
, t0
, t1
);
2246 t2
= tcg_const_tl(-1);
2247 tcg_gen_shl_tl(t2
, t2
, t1
);
2248 gen_load_gpr(t1
, rt
);
2249 tcg_gen_andc_tl(t1
, t1
, t2
);
2251 tcg_gen_or_tl(t0
, t0
, t1
);
2253 tcg_gen_ext32s_tl(t0
, t0
);
2254 gen_store_gpr(t0
, rt
);
2257 t1
= tcg_temp_new();
2258 /* Do a byte access to possibly trigger a page
2259 fault with the unaligned address. */
2260 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2261 tcg_gen_andi_tl(t1
, t0
, 3);
2262 #ifdef TARGET_WORDS_BIGENDIAN
2263 tcg_gen_xori_tl(t1
, t1
, 3);
2265 tcg_gen_shli_tl(t1
, t1
, 3);
2266 tcg_gen_andi_tl(t0
, t0
, ~3);
2267 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2268 tcg_gen_shr_tl(t0
, t0
, t1
);
2269 tcg_gen_xori_tl(t1
, t1
, 31);
2270 t2
= tcg_const_tl(0xfffffffeull
);
2271 tcg_gen_shl_tl(t2
, t2
, t1
);
2272 gen_load_gpr(t1
, rt
);
2273 tcg_gen_and_tl(t1
, t1
, t2
);
2275 tcg_gen_or_tl(t0
, t0
, t1
);
2277 tcg_gen_ext32s_tl(t0
, t0
);
2278 gen_store_gpr(t0
, rt
);
2282 op_ld_ll(t0
, t0
, ctx
);
2283 gen_store_gpr(t0
, rt
);
2290 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2291 int base
, int16_t offset
)
2293 TCGv t0
= tcg_temp_new();
2294 TCGv t1
= tcg_temp_new();
2296 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2297 gen_load_gpr(t1
, rt
);
2299 #if defined(TARGET_MIPS64)
2301 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2302 ctx
->default_tcg_memop_mask
);
2305 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2308 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2312 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2313 ctx
->default_tcg_memop_mask
);
2316 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2317 ctx
->default_tcg_memop_mask
);
2320 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2323 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2326 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2334 /* Store conditional */
2335 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2336 int base
, int16_t offset
)
2340 #ifdef CONFIG_USER_ONLY
2341 t0
= tcg_temp_local_new();
2342 t1
= tcg_temp_local_new();
2344 t0
= tcg_temp_new();
2345 t1
= tcg_temp_new();
2347 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2348 gen_load_gpr(t1
, rt
);
2350 #if defined(TARGET_MIPS64)
2353 op_st_scd(t1
, t0
, rt
, ctx
);
2358 op_st_sc(t1
, t0
, rt
, ctx
);
2365 /* Load and store */
2366 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2367 int base
, int16_t offset
)
2369 TCGv t0
= tcg_temp_new();
2371 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2372 /* Don't do NOP if destination is zero: we must perform the actual
2377 TCGv_i32 fp0
= tcg_temp_new_i32();
2378 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2379 ctx
->default_tcg_memop_mask
);
2380 gen_store_fpr32(ctx
, fp0
, ft
);
2381 tcg_temp_free_i32(fp0
);
2386 TCGv_i32 fp0
= tcg_temp_new_i32();
2387 gen_load_fpr32(ctx
, fp0
, ft
);
2388 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2389 ctx
->default_tcg_memop_mask
);
2390 tcg_temp_free_i32(fp0
);
2395 TCGv_i64 fp0
= tcg_temp_new_i64();
2396 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2397 ctx
->default_tcg_memop_mask
);
2398 gen_store_fpr64(ctx
, fp0
, ft
);
2399 tcg_temp_free_i64(fp0
);
2404 TCGv_i64 fp0
= tcg_temp_new_i64();
2405 gen_load_fpr64(ctx
, fp0
, ft
);
2406 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2407 ctx
->default_tcg_memop_mask
);
2408 tcg_temp_free_i64(fp0
);
2412 MIPS_INVAL("flt_ldst");
2413 generate_exception_end(ctx
, EXCP_RI
);
2420 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2421 int rs
, int16_t imm
)
2423 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2424 check_cp1_enabled(ctx
);
2428 check_insn(ctx
, ISA_MIPS2
);
2431 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2434 generate_exception_err(ctx
, EXCP_CpU
, 1);
2438 /* Arithmetic with immediate operand */
2439 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2440 int rt
, int rs
, int16_t imm
)
2442 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2444 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2445 /* If no destination, treat it as a NOP.
2446 For addi, we must generate the overflow exception when needed. */
2452 TCGv t0
= tcg_temp_local_new();
2453 TCGv t1
= tcg_temp_new();
2454 TCGv t2
= tcg_temp_new();
2455 TCGLabel
*l1
= gen_new_label();
2457 gen_load_gpr(t1
, rs
);
2458 tcg_gen_addi_tl(t0
, t1
, uimm
);
2459 tcg_gen_ext32s_tl(t0
, t0
);
2461 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2462 tcg_gen_xori_tl(t2
, t0
, uimm
);
2463 tcg_gen_and_tl(t1
, t1
, t2
);
2465 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2467 /* operands of same sign, result different sign */
2468 generate_exception(ctx
, EXCP_OVERFLOW
);
2470 tcg_gen_ext32s_tl(t0
, t0
);
2471 gen_store_gpr(t0
, rt
);
2477 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2478 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2480 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2483 #if defined(TARGET_MIPS64)
2486 TCGv t0
= tcg_temp_local_new();
2487 TCGv t1
= tcg_temp_new();
2488 TCGv t2
= tcg_temp_new();
2489 TCGLabel
*l1
= gen_new_label();
2491 gen_load_gpr(t1
, rs
);
2492 tcg_gen_addi_tl(t0
, t1
, uimm
);
2494 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2495 tcg_gen_xori_tl(t2
, t0
, uimm
);
2496 tcg_gen_and_tl(t1
, t1
, t2
);
2498 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2500 /* operands of same sign, result different sign */
2501 generate_exception(ctx
, EXCP_OVERFLOW
);
2503 gen_store_gpr(t0
, rt
);
2509 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2511 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2518 /* Logic with immediate operand */
2519 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2520 int rt
, int rs
, int16_t imm
)
2525 /* If no destination, treat it as a NOP. */
2528 uimm
= (uint16_t)imm
;
2531 if (likely(rs
!= 0))
2532 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2534 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2538 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2540 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2543 if (likely(rs
!= 0))
2544 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2546 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2549 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2551 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2552 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2554 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2563 /* Set on less than with immediate operand */
2564 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2565 int rt
, int rs
, int16_t imm
)
2567 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2571 /* If no destination, treat it as a NOP. */
2574 t0
= tcg_temp_new();
2575 gen_load_gpr(t0
, rs
);
2578 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2581 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2587 /* Shifts with immediate operand */
2588 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2589 int rt
, int rs
, int16_t imm
)
2591 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2595 /* If no destination, treat it as a NOP. */
2599 t0
= tcg_temp_new();
2600 gen_load_gpr(t0
, rs
);
2603 tcg_gen_shli_tl(t0
, t0
, uimm
);
2604 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2607 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2611 tcg_gen_ext32u_tl(t0
, t0
);
2612 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2614 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2619 TCGv_i32 t1
= tcg_temp_new_i32();
2621 tcg_gen_trunc_tl_i32(t1
, t0
);
2622 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2623 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2624 tcg_temp_free_i32(t1
);
2626 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2629 #if defined(TARGET_MIPS64)
2631 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2634 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2643 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2647 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2650 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2653 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2664 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2665 int rd
, int rs
, int rt
)
2667 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2668 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2669 /* If no destination, treat it as a NOP.
2670 For add & sub, we must generate the overflow exception when needed. */
2677 TCGv t0
= tcg_temp_local_new();
2678 TCGv t1
= tcg_temp_new();
2679 TCGv t2
= tcg_temp_new();
2680 TCGLabel
*l1
= gen_new_label();
2682 gen_load_gpr(t1
, rs
);
2683 gen_load_gpr(t2
, rt
);
2684 tcg_gen_add_tl(t0
, t1
, t2
);
2685 tcg_gen_ext32s_tl(t0
, t0
);
2686 tcg_gen_xor_tl(t1
, t1
, t2
);
2687 tcg_gen_xor_tl(t2
, t0
, t2
);
2688 tcg_gen_andc_tl(t1
, t2
, t1
);
2690 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2692 /* operands of same sign, result different sign */
2693 generate_exception(ctx
, EXCP_OVERFLOW
);
2695 gen_store_gpr(t0
, rd
);
2700 if (rs
!= 0 && rt
!= 0) {
2701 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2702 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2703 } else if (rs
== 0 && rt
!= 0) {
2704 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2705 } else if (rs
!= 0 && rt
== 0) {
2706 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2708 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2713 TCGv t0
= tcg_temp_local_new();
2714 TCGv t1
= tcg_temp_new();
2715 TCGv t2
= tcg_temp_new();
2716 TCGLabel
*l1
= gen_new_label();
2718 gen_load_gpr(t1
, rs
);
2719 gen_load_gpr(t2
, rt
);
2720 tcg_gen_sub_tl(t0
, t1
, t2
);
2721 tcg_gen_ext32s_tl(t0
, t0
);
2722 tcg_gen_xor_tl(t2
, t1
, t2
);
2723 tcg_gen_xor_tl(t1
, t0
, t1
);
2724 tcg_gen_and_tl(t1
, t1
, t2
);
2726 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2728 /* operands of different sign, first operand and result different sign */
2729 generate_exception(ctx
, EXCP_OVERFLOW
);
2731 gen_store_gpr(t0
, rd
);
2736 if (rs
!= 0 && rt
!= 0) {
2737 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2738 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2739 } else if (rs
== 0 && rt
!= 0) {
2740 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2741 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2742 } else if (rs
!= 0 && rt
== 0) {
2743 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2745 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2748 #if defined(TARGET_MIPS64)
2751 TCGv t0
= tcg_temp_local_new();
2752 TCGv t1
= tcg_temp_new();
2753 TCGv t2
= tcg_temp_new();
2754 TCGLabel
*l1
= gen_new_label();
2756 gen_load_gpr(t1
, rs
);
2757 gen_load_gpr(t2
, rt
);
2758 tcg_gen_add_tl(t0
, t1
, t2
);
2759 tcg_gen_xor_tl(t1
, t1
, t2
);
2760 tcg_gen_xor_tl(t2
, t0
, t2
);
2761 tcg_gen_andc_tl(t1
, t2
, t1
);
2763 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2765 /* operands of same sign, result different sign */
2766 generate_exception(ctx
, EXCP_OVERFLOW
);
2768 gen_store_gpr(t0
, rd
);
2773 if (rs
!= 0 && rt
!= 0) {
2774 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2775 } else if (rs
== 0 && rt
!= 0) {
2776 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2777 } else if (rs
!= 0 && rt
== 0) {
2778 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2785 TCGv t0
= tcg_temp_local_new();
2786 TCGv t1
= tcg_temp_new();
2787 TCGv t2
= tcg_temp_new();
2788 TCGLabel
*l1
= gen_new_label();
2790 gen_load_gpr(t1
, rs
);
2791 gen_load_gpr(t2
, rt
);
2792 tcg_gen_sub_tl(t0
, t1
, t2
);
2793 tcg_gen_xor_tl(t2
, t1
, t2
);
2794 tcg_gen_xor_tl(t1
, t0
, t1
);
2795 tcg_gen_and_tl(t1
, t1
, t2
);
2797 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2799 /* operands of different sign, first operand and result different sign */
2800 generate_exception(ctx
, EXCP_OVERFLOW
);
2802 gen_store_gpr(t0
, rd
);
2807 if (rs
!= 0 && rt
!= 0) {
2808 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2809 } else if (rs
== 0 && rt
!= 0) {
2810 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2811 } else if (rs
!= 0 && rt
== 0) {
2812 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2814 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2819 if (likely(rs
!= 0 && rt
!= 0)) {
2820 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2821 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2823 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2829 /* Conditional move */
2830 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2831 int rd
, int rs
, int rt
)
2836 /* If no destination, treat it as a NOP. */
2840 t0
= tcg_temp_new();
2841 gen_load_gpr(t0
, rt
);
2842 t1
= tcg_const_tl(0);
2843 t2
= tcg_temp_new();
2844 gen_load_gpr(t2
, rs
);
2847 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2850 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2853 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2856 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2865 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2866 int rd
, int rs
, int rt
)
2869 /* If no destination, treat it as a NOP. */
2875 if (likely(rs
!= 0 && rt
!= 0)) {
2876 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2878 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2882 if (rs
!= 0 && rt
!= 0) {
2883 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2884 } else if (rs
== 0 && rt
!= 0) {
2885 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2886 } else if (rs
!= 0 && rt
== 0) {
2887 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2889 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2893 if (likely(rs
!= 0 && rt
!= 0)) {
2894 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2895 } else if (rs
== 0 && rt
!= 0) {
2896 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2897 } else if (rs
!= 0 && rt
== 0) {
2898 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2900 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2904 if (likely(rs
!= 0 && rt
!= 0)) {
2905 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2906 } else if (rs
== 0 && rt
!= 0) {
2907 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2908 } else if (rs
!= 0 && rt
== 0) {
2909 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2911 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2917 /* Set on lower than */
2918 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2919 int rd
, int rs
, int rt
)
2924 /* If no destination, treat it as a NOP. */
2928 t0
= tcg_temp_new();
2929 t1
= tcg_temp_new();
2930 gen_load_gpr(t0
, rs
);
2931 gen_load_gpr(t1
, rt
);
2934 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2937 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2945 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2946 int rd
, int rs
, int rt
)
2951 /* If no destination, treat it as a NOP.
2952 For add & sub, we must generate the overflow exception when needed. */
2956 t0
= tcg_temp_new();
2957 t1
= tcg_temp_new();
2958 gen_load_gpr(t0
, rs
);
2959 gen_load_gpr(t1
, rt
);
2962 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2963 tcg_gen_shl_tl(t0
, t1
, t0
);
2964 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2967 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2968 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2971 tcg_gen_ext32u_tl(t1
, t1
);
2972 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2973 tcg_gen_shr_tl(t0
, t1
, t0
);
2974 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2978 TCGv_i32 t2
= tcg_temp_new_i32();
2979 TCGv_i32 t3
= tcg_temp_new_i32();
2981 tcg_gen_trunc_tl_i32(t2
, t0
);
2982 tcg_gen_trunc_tl_i32(t3
, t1
);
2983 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2984 tcg_gen_rotr_i32(t2
, t3
, t2
);
2985 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2986 tcg_temp_free_i32(t2
);
2987 tcg_temp_free_i32(t3
);
2990 #if defined(TARGET_MIPS64)
2992 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2993 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3000 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3001 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3004 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3005 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3013 /* Arithmetic on HI/LO registers */
3014 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3016 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3027 #if defined(TARGET_MIPS64)
3029 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3033 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3037 #if defined(TARGET_MIPS64)
3039 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3043 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3048 #if defined(TARGET_MIPS64)
3050 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3054 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3057 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3062 #if defined(TARGET_MIPS64)
3064 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3068 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3071 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3077 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3080 TCGv t0
= tcg_const_tl(addr
);
3081 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3082 gen_store_gpr(t0
, reg
);
3086 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3092 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3095 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3096 addr
= addr_add(ctx
, pc
, offset
);
3097 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3101 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3102 addr
= addr_add(ctx
, pc
, offset
);
3103 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3105 #if defined(TARGET_MIPS64)
3108 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3109 addr
= addr_add(ctx
, pc
, offset
);
3110 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3114 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3117 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3118 addr
= addr_add(ctx
, pc
, offset
);
3119 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3124 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3125 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3126 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3129 #if defined(TARGET_MIPS64)
3130 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3131 case R6_OPC_LDPC
+ (1 << 16):
3132 case R6_OPC_LDPC
+ (2 << 16):
3133 case R6_OPC_LDPC
+ (3 << 16):
3135 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3136 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3137 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3141 MIPS_INVAL("OPC_PCREL");
3142 generate_exception_end(ctx
, EXCP_RI
);
3149 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3158 t0
= tcg_temp_new();
3159 t1
= tcg_temp_new();
3161 gen_load_gpr(t0
, rs
);
3162 gen_load_gpr(t1
, rt
);
3167 TCGv t2
= tcg_temp_new();
3168 TCGv t3
= tcg_temp_new();
3169 tcg_gen_ext32s_tl(t0
, t0
);
3170 tcg_gen_ext32s_tl(t1
, t1
);
3171 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3172 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3173 tcg_gen_and_tl(t2
, t2
, t3
);
3174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3175 tcg_gen_or_tl(t2
, t2
, t3
);
3176 tcg_gen_movi_tl(t3
, 0);
3177 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3178 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3179 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3186 TCGv t2
= tcg_temp_new();
3187 TCGv t3
= tcg_temp_new();
3188 tcg_gen_ext32s_tl(t0
, t0
);
3189 tcg_gen_ext32s_tl(t1
, t1
);
3190 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3191 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3192 tcg_gen_and_tl(t2
, t2
, t3
);
3193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3194 tcg_gen_or_tl(t2
, t2
, t3
);
3195 tcg_gen_movi_tl(t3
, 0);
3196 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3197 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3198 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3205 TCGv t2
= tcg_const_tl(0);
3206 TCGv t3
= tcg_const_tl(1);
3207 tcg_gen_ext32u_tl(t0
, t0
);
3208 tcg_gen_ext32u_tl(t1
, t1
);
3209 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3210 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3211 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3218 TCGv t2
= tcg_const_tl(0);
3219 TCGv t3
= tcg_const_tl(1);
3220 tcg_gen_ext32u_tl(t0
, t0
);
3221 tcg_gen_ext32u_tl(t1
, t1
);
3222 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3223 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3224 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3231 TCGv_i32 t2
= tcg_temp_new_i32();
3232 TCGv_i32 t3
= tcg_temp_new_i32();
3233 tcg_gen_trunc_tl_i32(t2
, t0
);
3234 tcg_gen_trunc_tl_i32(t3
, t1
);
3235 tcg_gen_mul_i32(t2
, t2
, t3
);
3236 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3237 tcg_temp_free_i32(t2
);
3238 tcg_temp_free_i32(t3
);
3243 TCGv_i32 t2
= tcg_temp_new_i32();
3244 TCGv_i32 t3
= tcg_temp_new_i32();
3245 tcg_gen_trunc_tl_i32(t2
, t0
);
3246 tcg_gen_trunc_tl_i32(t3
, t1
);
3247 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3248 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3249 tcg_temp_free_i32(t2
);
3250 tcg_temp_free_i32(t3
);
3255 TCGv_i32 t2
= tcg_temp_new_i32();
3256 TCGv_i32 t3
= tcg_temp_new_i32();
3257 tcg_gen_trunc_tl_i32(t2
, t0
);
3258 tcg_gen_trunc_tl_i32(t3
, t1
);
3259 tcg_gen_mul_i32(t2
, t2
, t3
);
3260 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3261 tcg_temp_free_i32(t2
);
3262 tcg_temp_free_i32(t3
);
3267 TCGv_i32 t2
= tcg_temp_new_i32();
3268 TCGv_i32 t3
= tcg_temp_new_i32();
3269 tcg_gen_trunc_tl_i32(t2
, t0
);
3270 tcg_gen_trunc_tl_i32(t3
, t1
);
3271 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3272 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3273 tcg_temp_free_i32(t2
);
3274 tcg_temp_free_i32(t3
);
3277 #if defined(TARGET_MIPS64)
3280 TCGv t2
= tcg_temp_new();
3281 TCGv t3
= tcg_temp_new();
3282 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3283 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3284 tcg_gen_and_tl(t2
, t2
, t3
);
3285 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3286 tcg_gen_or_tl(t2
, t2
, t3
);
3287 tcg_gen_movi_tl(t3
, 0);
3288 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3289 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3296 TCGv t2
= tcg_temp_new();
3297 TCGv t3
= tcg_temp_new();
3298 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3299 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3300 tcg_gen_and_tl(t2
, t2
, t3
);
3301 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3302 tcg_gen_or_tl(t2
, t2
, t3
);
3303 tcg_gen_movi_tl(t3
, 0);
3304 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3305 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3312 TCGv t2
= tcg_const_tl(0);
3313 TCGv t3
= tcg_const_tl(1);
3314 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3315 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3322 TCGv t2
= tcg_const_tl(0);
3323 TCGv t3
= tcg_const_tl(1);
3324 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3325 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3331 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3335 TCGv t2
= tcg_temp_new();
3336 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3341 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3345 TCGv t2
= tcg_temp_new();
3346 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3352 MIPS_INVAL("r6 mul/div");
3353 generate_exception_end(ctx
, EXCP_RI
);
3361 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3362 int acc
, int rs
, int rt
)
3366 t0
= tcg_temp_new();
3367 t1
= tcg_temp_new();
3369 gen_load_gpr(t0
, rs
);
3370 gen_load_gpr(t1
, rt
);
3379 TCGv t2
= tcg_temp_new();
3380 TCGv t3
= tcg_temp_new();
3381 tcg_gen_ext32s_tl(t0
, t0
);
3382 tcg_gen_ext32s_tl(t1
, t1
);
3383 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3384 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3385 tcg_gen_and_tl(t2
, t2
, t3
);
3386 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3387 tcg_gen_or_tl(t2
, t2
, t3
);
3388 tcg_gen_movi_tl(t3
, 0);
3389 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3390 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3391 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3392 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3393 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3400 TCGv t2
= tcg_const_tl(0);
3401 TCGv t3
= tcg_const_tl(1);
3402 tcg_gen_ext32u_tl(t0
, t0
);
3403 tcg_gen_ext32u_tl(t1
, t1
);
3404 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3405 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3406 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3407 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3408 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3415 TCGv_i32 t2
= tcg_temp_new_i32();
3416 TCGv_i32 t3
= tcg_temp_new_i32();
3417 tcg_gen_trunc_tl_i32(t2
, t0
);
3418 tcg_gen_trunc_tl_i32(t3
, t1
);
3419 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3420 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3421 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3422 tcg_temp_free_i32(t2
);
3423 tcg_temp_free_i32(t3
);
3428 TCGv_i32 t2
= tcg_temp_new_i32();
3429 TCGv_i32 t3
= tcg_temp_new_i32();
3430 tcg_gen_trunc_tl_i32(t2
, t0
);
3431 tcg_gen_trunc_tl_i32(t3
, t1
);
3432 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3433 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3434 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3435 tcg_temp_free_i32(t2
);
3436 tcg_temp_free_i32(t3
);
3439 #if defined(TARGET_MIPS64)
3442 TCGv t2
= tcg_temp_new();
3443 TCGv t3
= tcg_temp_new();
3444 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3445 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3446 tcg_gen_and_tl(t2
, t2
, t3
);
3447 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3448 tcg_gen_or_tl(t2
, t2
, t3
);
3449 tcg_gen_movi_tl(t3
, 0);
3450 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3451 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3452 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3459 TCGv t2
= tcg_const_tl(0);
3460 TCGv t3
= tcg_const_tl(1);
3461 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3462 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3463 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3469 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3472 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3477 TCGv_i64 t2
= tcg_temp_new_i64();
3478 TCGv_i64 t3
= tcg_temp_new_i64();
3480 tcg_gen_ext_tl_i64(t2
, t0
);
3481 tcg_gen_ext_tl_i64(t3
, t1
);
3482 tcg_gen_mul_i64(t2
, t2
, t3
);
3483 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3484 tcg_gen_add_i64(t2
, t2
, t3
);
3485 tcg_temp_free_i64(t3
);
3486 gen_move_low32(cpu_LO
[acc
], t2
);
3487 gen_move_high32(cpu_HI
[acc
], t2
);
3488 tcg_temp_free_i64(t2
);
3493 TCGv_i64 t2
= tcg_temp_new_i64();
3494 TCGv_i64 t3
= tcg_temp_new_i64();
3496 tcg_gen_ext32u_tl(t0
, t0
);
3497 tcg_gen_ext32u_tl(t1
, t1
);
3498 tcg_gen_extu_tl_i64(t2
, t0
);
3499 tcg_gen_extu_tl_i64(t3
, t1
);
3500 tcg_gen_mul_i64(t2
, t2
, t3
);
3501 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3502 tcg_gen_add_i64(t2
, t2
, t3
);
3503 tcg_temp_free_i64(t3
);
3504 gen_move_low32(cpu_LO
[acc
], t2
);
3505 gen_move_high32(cpu_HI
[acc
], t2
);
3506 tcg_temp_free_i64(t2
);
3511 TCGv_i64 t2
= tcg_temp_new_i64();
3512 TCGv_i64 t3
= tcg_temp_new_i64();
3514 tcg_gen_ext_tl_i64(t2
, t0
);
3515 tcg_gen_ext_tl_i64(t3
, t1
);
3516 tcg_gen_mul_i64(t2
, t2
, t3
);
3517 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3518 tcg_gen_sub_i64(t2
, t3
, t2
);
3519 tcg_temp_free_i64(t3
);
3520 gen_move_low32(cpu_LO
[acc
], t2
);
3521 gen_move_high32(cpu_HI
[acc
], t2
);
3522 tcg_temp_free_i64(t2
);
3527 TCGv_i64 t2
= tcg_temp_new_i64();
3528 TCGv_i64 t3
= tcg_temp_new_i64();
3530 tcg_gen_ext32u_tl(t0
, t0
);
3531 tcg_gen_ext32u_tl(t1
, t1
);
3532 tcg_gen_extu_tl_i64(t2
, t0
);
3533 tcg_gen_extu_tl_i64(t3
, t1
);
3534 tcg_gen_mul_i64(t2
, t2
, t3
);
3535 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3536 tcg_gen_sub_i64(t2
, t3
, t2
);
3537 tcg_temp_free_i64(t3
);
3538 gen_move_low32(cpu_LO
[acc
], t2
);
3539 gen_move_high32(cpu_HI
[acc
], t2
);
3540 tcg_temp_free_i64(t2
);
3544 MIPS_INVAL("mul/div");
3545 generate_exception_end(ctx
, EXCP_RI
);
3553 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3554 int rd
, int rs
, int rt
)
3556 TCGv t0
= tcg_temp_new();
3557 TCGv t1
= tcg_temp_new();
3559 gen_load_gpr(t0
, rs
);
3560 gen_load_gpr(t1
, rt
);
3563 case OPC_VR54XX_MULS
:
3564 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3566 case OPC_VR54XX_MULSU
:
3567 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MACC
:
3570 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MACCU
:
3573 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MSAC
:
3576 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MSACU
:
3579 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MULHI
:
3582 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULHIU
:
3585 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MULSHI
:
3588 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MULSHIU
:
3591 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MACCHI
:
3594 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MACCHIU
:
3597 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3599 case OPC_VR54XX_MSACHI
:
3600 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3602 case OPC_VR54XX_MSACHIU
:
3603 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3606 MIPS_INVAL("mul vr54xx");
3607 generate_exception_end(ctx
, EXCP_RI
);
3610 gen_store_gpr(t0
, rd
);
3617 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3626 t0
= tcg_temp_new();
3627 gen_load_gpr(t0
, rs
);
3631 gen_helper_clo(cpu_gpr
[rd
], t0
);
3635 gen_helper_clz(cpu_gpr
[rd
], t0
);
3637 #if defined(TARGET_MIPS64)
3640 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3644 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3651 /* Godson integer instructions */
3652 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3653 int rd
, int rs
, int rt
)
3665 case OPC_MULTU_G_2E
:
3666 case OPC_MULTU_G_2F
:
3667 #if defined(TARGET_MIPS64)
3668 case OPC_DMULT_G_2E
:
3669 case OPC_DMULT_G_2F
:
3670 case OPC_DMULTU_G_2E
:
3671 case OPC_DMULTU_G_2F
:
3673 t0
= tcg_temp_new();
3674 t1
= tcg_temp_new();
3677 t0
= tcg_temp_local_new();
3678 t1
= tcg_temp_local_new();
3682 gen_load_gpr(t0
, rs
);
3683 gen_load_gpr(t1
, rt
);
3688 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3689 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3691 case OPC_MULTU_G_2E
:
3692 case OPC_MULTU_G_2F
:
3693 tcg_gen_ext32u_tl(t0
, t0
);
3694 tcg_gen_ext32u_tl(t1
, t1
);
3695 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3696 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3701 TCGLabel
*l1
= gen_new_label();
3702 TCGLabel
*l2
= gen_new_label();
3703 TCGLabel
*l3
= gen_new_label();
3704 tcg_gen_ext32s_tl(t0
, t0
);
3705 tcg_gen_ext32s_tl(t1
, t1
);
3706 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3707 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3710 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3711 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3712 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3715 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3716 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3723 TCGLabel
*l1
= gen_new_label();
3724 TCGLabel
*l2
= gen_new_label();
3725 tcg_gen_ext32u_tl(t0
, t0
);
3726 tcg_gen_ext32u_tl(t1
, t1
);
3727 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3728 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3731 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3732 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3739 TCGLabel
*l1
= gen_new_label();
3740 TCGLabel
*l2
= gen_new_label();
3741 TCGLabel
*l3
= gen_new_label();
3742 tcg_gen_ext32u_tl(t0
, t0
);
3743 tcg_gen_ext32u_tl(t1
, t1
);
3744 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3745 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3746 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3751 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3752 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3759 TCGLabel
*l1
= gen_new_label();
3760 TCGLabel
*l2
= gen_new_label();
3761 tcg_gen_ext32u_tl(t0
, t0
);
3762 tcg_gen_ext32u_tl(t1
, t1
);
3763 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3764 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3767 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3768 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3772 #if defined(TARGET_MIPS64)
3773 case OPC_DMULT_G_2E
:
3774 case OPC_DMULT_G_2F
:
3775 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3777 case OPC_DMULTU_G_2E
:
3778 case OPC_DMULTU_G_2F
:
3779 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3784 TCGLabel
*l1
= gen_new_label();
3785 TCGLabel
*l2
= gen_new_label();
3786 TCGLabel
*l3
= gen_new_label();
3787 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3788 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3791 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3793 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3796 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3800 case OPC_DDIVU_G_2E
:
3801 case OPC_DDIVU_G_2F
:
3803 TCGLabel
*l1
= gen_new_label();
3804 TCGLabel
*l2
= gen_new_label();
3805 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3809 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3816 TCGLabel
*l1
= gen_new_label();
3817 TCGLabel
*l2
= gen_new_label();
3818 TCGLabel
*l3
= gen_new_label();
3819 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3820 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3821 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3823 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3826 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3830 case OPC_DMODU_G_2E
:
3831 case OPC_DMODU_G_2F
:
3833 TCGLabel
*l1
= gen_new_label();
3834 TCGLabel
*l2
= gen_new_label();
3835 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3836 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3839 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3850 /* Loongson multimedia instructions */
3851 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3853 uint32_t opc
, shift_max
;
3856 opc
= MASK_LMI(ctx
->opcode
);
3862 t0
= tcg_temp_local_new_i64();
3863 t1
= tcg_temp_local_new_i64();
3866 t0
= tcg_temp_new_i64();
3867 t1
= tcg_temp_new_i64();
3871 gen_load_fpr64(ctx
, t0
, rs
);
3872 gen_load_fpr64(ctx
, t1
, rt
);
3874 #define LMI_HELPER(UP, LO) \
3875 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3876 #define LMI_HELPER_1(UP, LO) \
3877 case OPC_##UP: gen_helper_##LO(t0, t0); break
3878 #define LMI_DIRECT(UP, LO, OP) \
3879 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3882 LMI_HELPER(PADDSH
, paddsh
);
3883 LMI_HELPER(PADDUSH
, paddush
);
3884 LMI_HELPER(PADDH
, paddh
);
3885 LMI_HELPER(PADDW
, paddw
);
3886 LMI_HELPER(PADDSB
, paddsb
);
3887 LMI_HELPER(PADDUSB
, paddusb
);
3888 LMI_HELPER(PADDB
, paddb
);
3890 LMI_HELPER(PSUBSH
, psubsh
);
3891 LMI_HELPER(PSUBUSH
, psubush
);
3892 LMI_HELPER(PSUBH
, psubh
);
3893 LMI_HELPER(PSUBW
, psubw
);
3894 LMI_HELPER(PSUBSB
, psubsb
);
3895 LMI_HELPER(PSUBUSB
, psubusb
);
3896 LMI_HELPER(PSUBB
, psubb
);
3898 LMI_HELPER(PSHUFH
, pshufh
);
3899 LMI_HELPER(PACKSSWH
, packsswh
);
3900 LMI_HELPER(PACKSSHB
, packsshb
);
3901 LMI_HELPER(PACKUSHB
, packushb
);
3903 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3904 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3905 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3906 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3907 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3908 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3910 LMI_HELPER(PAVGH
, pavgh
);
3911 LMI_HELPER(PAVGB
, pavgb
);
3912 LMI_HELPER(PMAXSH
, pmaxsh
);
3913 LMI_HELPER(PMINSH
, pminsh
);
3914 LMI_HELPER(PMAXUB
, pmaxub
);
3915 LMI_HELPER(PMINUB
, pminub
);
3917 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3918 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3919 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3920 LMI_HELPER(PCMPGTH
, pcmpgth
);
3921 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3922 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3924 LMI_HELPER(PSLLW
, psllw
);
3925 LMI_HELPER(PSLLH
, psllh
);
3926 LMI_HELPER(PSRLW
, psrlw
);
3927 LMI_HELPER(PSRLH
, psrlh
);
3928 LMI_HELPER(PSRAW
, psraw
);
3929 LMI_HELPER(PSRAH
, psrah
);
3931 LMI_HELPER(PMULLH
, pmullh
);
3932 LMI_HELPER(PMULHH
, pmulhh
);
3933 LMI_HELPER(PMULHUH
, pmulhuh
);
3934 LMI_HELPER(PMADDHW
, pmaddhw
);
3936 LMI_HELPER(PASUBUB
, pasubub
);
3937 LMI_HELPER_1(BIADD
, biadd
);
3938 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3940 LMI_DIRECT(PADDD
, paddd
, add
);
3941 LMI_DIRECT(PSUBD
, psubd
, sub
);
3942 LMI_DIRECT(XOR_CP2
, xor, xor);
3943 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3944 LMI_DIRECT(AND_CP2
, and, and);
3945 LMI_DIRECT(PANDN
, pandn
, andc
);
3946 LMI_DIRECT(OR
, or, or);
3949 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3952 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3955 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3958 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3962 tcg_gen_andi_i64(t1
, t1
, 3);
3963 tcg_gen_shli_i64(t1
, t1
, 4);
3964 tcg_gen_shr_i64(t0
, t0
, t1
);
3965 tcg_gen_ext16u_i64(t0
, t0
);
3969 tcg_gen_add_i64(t0
, t0
, t1
);
3970 tcg_gen_ext32s_i64(t0
, t0
);
3973 tcg_gen_sub_i64(t0
, t0
, t1
);
3974 tcg_gen_ext32s_i64(t0
, t0
);
3996 /* Make sure shift count isn't TCG undefined behaviour. */
3997 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4002 tcg_gen_shl_i64(t0
, t0
, t1
);
4006 /* Since SRA is UndefinedResult without sign-extended inputs,
4007 we can treat SRA and DSRA the same. */
4008 tcg_gen_sar_i64(t0
, t0
, t1
);
4011 /* We want to shift in zeros for SRL; zero-extend first. */
4012 tcg_gen_ext32u_i64(t0
, t0
);
4015 tcg_gen_shr_i64(t0
, t0
, t1
);
4019 if (shift_max
== 32) {
4020 tcg_gen_ext32s_i64(t0
, t0
);
4023 /* Shifts larger than MAX produce zero. */
4024 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4025 tcg_gen_neg_i64(t1
, t1
);
4026 tcg_gen_and_i64(t0
, t0
, t1
);
4032 TCGv_i64 t2
= tcg_temp_new_i64();
4033 TCGLabel
*lab
= gen_new_label();
4035 tcg_gen_mov_i64(t2
, t0
);
4036 tcg_gen_add_i64(t0
, t1
, t2
);
4037 if (opc
== OPC_ADD_CP2
) {
4038 tcg_gen_ext32s_i64(t0
, t0
);
4040 tcg_gen_xor_i64(t1
, t1
, t2
);
4041 tcg_gen_xor_i64(t2
, t2
, t0
);
4042 tcg_gen_andc_i64(t1
, t2
, t1
);
4043 tcg_temp_free_i64(t2
);
4044 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4045 generate_exception(ctx
, EXCP_OVERFLOW
);
4053 TCGv_i64 t2
= tcg_temp_new_i64();
4054 TCGLabel
*lab
= gen_new_label();
4056 tcg_gen_mov_i64(t2
, t0
);
4057 tcg_gen_sub_i64(t0
, t1
, t2
);
4058 if (opc
== OPC_SUB_CP2
) {
4059 tcg_gen_ext32s_i64(t0
, t0
);
4061 tcg_gen_xor_i64(t1
, t1
, t2
);
4062 tcg_gen_xor_i64(t2
, t2
, t0
);
4063 tcg_gen_and_i64(t1
, t1
, t2
);
4064 tcg_temp_free_i64(t2
);
4065 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4066 generate_exception(ctx
, EXCP_OVERFLOW
);
4072 tcg_gen_ext32u_i64(t0
, t0
);
4073 tcg_gen_ext32u_i64(t1
, t1
);
4074 tcg_gen_mul_i64(t0
, t0
, t1
);
4083 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4084 FD field is the CC field? */
4086 MIPS_INVAL("loongson_cp2");
4087 generate_exception_end(ctx
, EXCP_RI
);
4094 gen_store_fpr64(ctx
, t0
, rd
);
4096 tcg_temp_free_i64(t0
);
4097 tcg_temp_free_i64(t1
);
4101 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4102 int rs
, int rt
, int16_t imm
)
4105 TCGv t0
= tcg_temp_new();
4106 TCGv t1
= tcg_temp_new();
4109 /* Load needed operands */
4117 /* Compare two registers */
4119 gen_load_gpr(t0
, rs
);
4120 gen_load_gpr(t1
, rt
);
4130 /* Compare register to immediate */
4131 if (rs
!= 0 || imm
!= 0) {
4132 gen_load_gpr(t0
, rs
);
4133 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4140 case OPC_TEQ
: /* rs == rs */
4141 case OPC_TEQI
: /* r0 == 0 */
4142 case OPC_TGE
: /* rs >= rs */
4143 case OPC_TGEI
: /* r0 >= 0 */
4144 case OPC_TGEU
: /* rs >= rs unsigned */
4145 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4147 generate_exception_end(ctx
, EXCP_TRAP
);
4149 case OPC_TLT
: /* rs < rs */
4150 case OPC_TLTI
: /* r0 < 0 */
4151 case OPC_TLTU
: /* rs < rs unsigned */
4152 case OPC_TLTIU
: /* r0 < 0 unsigned */
4153 case OPC_TNE
: /* rs != rs */
4154 case OPC_TNEI
: /* r0 != 0 */
4155 /* Never trap: treat as NOP. */
4159 TCGLabel
*l1
= gen_new_label();
4164 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4168 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4172 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4176 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4180 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4184 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4187 generate_exception(ctx
, EXCP_TRAP
);
4194 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4196 TranslationBlock
*tb
;
4198 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4199 likely(!ctx
->singlestep_enabled
)) {
4202 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4205 if (ctx
->singlestep_enabled
) {
4206 save_cpu_state(ctx
, 0);
4207 gen_helper_raise_exception_debug(cpu_env
);
4213 /* Branches (before delay slot) */
4214 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4216 int rs
, int rt
, int32_t offset
,
4219 target_ulong btgt
= -1;
4221 int bcond_compute
= 0;
4222 TCGv t0
= tcg_temp_new();
4223 TCGv t1
= tcg_temp_new();
4225 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4226 #ifdef MIPS_DEBUG_DISAS
4227 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4228 TARGET_FMT_lx
"\n", ctx
->pc
);
4230 generate_exception_end(ctx
, EXCP_RI
);
4234 /* Load needed operands */
4240 /* Compare two registers */
4242 gen_load_gpr(t0
, rs
);
4243 gen_load_gpr(t1
, rt
);
4246 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4260 /* Compare to zero */
4262 gen_load_gpr(t0
, rs
);
4265 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4268 #if defined(TARGET_MIPS64)
4270 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4272 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4275 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4280 /* Jump to immediate */
4281 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4285 /* Jump to register */
4286 if (offset
!= 0 && offset
!= 16) {
4287 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4288 others are reserved. */
4289 MIPS_INVAL("jump hint");
4290 generate_exception_end(ctx
, EXCP_RI
);
4293 gen_load_gpr(btarget
, rs
);
4296 MIPS_INVAL("branch/jump");
4297 generate_exception_end(ctx
, EXCP_RI
);
4300 if (bcond_compute
== 0) {
4301 /* No condition to be computed */
4303 case OPC_BEQ
: /* rx == rx */
4304 case OPC_BEQL
: /* rx == rx likely */
4305 case OPC_BGEZ
: /* 0 >= 0 */
4306 case OPC_BGEZL
: /* 0 >= 0 likely */
4307 case OPC_BLEZ
: /* 0 <= 0 */
4308 case OPC_BLEZL
: /* 0 <= 0 likely */
4310 ctx
->hflags
|= MIPS_HFLAG_B
;
4312 case OPC_BGEZAL
: /* 0 >= 0 */
4313 case OPC_BGEZALL
: /* 0 >= 0 likely */
4314 /* Always take and link */
4316 ctx
->hflags
|= MIPS_HFLAG_B
;
4318 case OPC_BNE
: /* rx != rx */
4319 case OPC_BGTZ
: /* 0 > 0 */
4320 case OPC_BLTZ
: /* 0 < 0 */
4323 case OPC_BLTZAL
: /* 0 < 0 */
4324 /* Handle as an unconditional branch to get correct delay
4327 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4328 ctx
->hflags
|= MIPS_HFLAG_B
;
4330 case OPC_BLTZALL
: /* 0 < 0 likely */
4331 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4332 /* Skip the instruction in the delay slot */
4335 case OPC_BNEL
: /* rx != rx likely */
4336 case OPC_BGTZL
: /* 0 > 0 likely */
4337 case OPC_BLTZL
: /* 0 < 0 likely */
4338 /* Skip the instruction in the delay slot */
4342 ctx
->hflags
|= MIPS_HFLAG_B
;
4345 ctx
->hflags
|= MIPS_HFLAG_BX
;
4349 ctx
->hflags
|= MIPS_HFLAG_B
;
4352 ctx
->hflags
|= MIPS_HFLAG_BR
;
4356 ctx
->hflags
|= MIPS_HFLAG_BR
;
4359 MIPS_INVAL("branch/jump");
4360 generate_exception_end(ctx
, EXCP_RI
);
4366 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4369 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4372 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4375 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4378 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4381 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4384 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4388 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4392 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4395 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4398 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4401 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4404 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4407 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4410 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4412 #if defined(TARGET_MIPS64)
4414 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4418 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4421 ctx
->hflags
|= MIPS_HFLAG_BC
;
4424 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4427 ctx
->hflags
|= MIPS_HFLAG_BL
;
4430 MIPS_INVAL("conditional branch/jump");
4431 generate_exception_end(ctx
, EXCP_RI
);
4436 ctx
->btarget
= btgt
;
4438 switch (delayslot_size
) {
4440 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4443 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4448 int post_delay
= insn_bytes
+ delayslot_size
;
4449 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4451 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4455 if (insn_bytes
== 2)
4456 ctx
->hflags
|= MIPS_HFLAG_B16
;
4461 /* special3 bitfield operations */
4462 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4463 int rs
, int lsb
, int msb
)
4465 TCGv t0
= tcg_temp_new();
4466 TCGv t1
= tcg_temp_new();
4468 gen_load_gpr(t1
, rs
);
4471 if (lsb
+ msb
> 31) {
4474 tcg_gen_shri_tl(t0
, t1
, lsb
);
4476 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4478 tcg_gen_ext32s_tl(t0
, t0
);
4481 #if defined(TARGET_MIPS64)
4490 if (lsb
+ msb
> 63) {
4493 tcg_gen_shri_tl(t0
, t1
, lsb
);
4495 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4503 gen_load_gpr(t0
, rt
);
4504 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4505 tcg_gen_ext32s_tl(t0
, t0
);
4507 #if defined(TARGET_MIPS64)
4518 gen_load_gpr(t0
, rt
);
4519 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4524 MIPS_INVAL("bitops");
4525 generate_exception_end(ctx
, EXCP_RI
);
4530 gen_store_gpr(t0
, rt
);
4535 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4540 /* If no destination, treat it as a NOP. */
4544 t0
= tcg_temp_new();
4545 gen_load_gpr(t0
, rt
);
4549 TCGv t1
= tcg_temp_new();
4551 tcg_gen_shri_tl(t1
, t0
, 8);
4552 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4553 tcg_gen_shli_tl(t0
, t0
, 8);
4554 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4555 tcg_gen_or_tl(t0
, t0
, t1
);
4557 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4561 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4564 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4566 #if defined(TARGET_MIPS64)
4569 TCGv t1
= tcg_temp_new();
4571 tcg_gen_shri_tl(t1
, t0
, 8);
4572 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4573 tcg_gen_shli_tl(t0
, t0
, 8);
4574 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4575 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4581 TCGv t1
= tcg_temp_new();
4583 tcg_gen_shri_tl(t1
, t0
, 16);
4584 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4585 tcg_gen_shli_tl(t0
, t0
, 16);
4586 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4587 tcg_gen_or_tl(t0
, t0
, t1
);
4588 tcg_gen_shri_tl(t1
, t0
, 32);
4589 tcg_gen_shli_tl(t0
, t0
, 32);
4590 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4596 MIPS_INVAL("bsfhl");
4597 generate_exception_end(ctx
, EXCP_RI
);
4604 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4613 t0
= tcg_temp_new();
4614 t1
= tcg_temp_new();
4615 gen_load_gpr(t0
, rs
);
4616 gen_load_gpr(t1
, rt
);
4617 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4618 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4619 if (opc
== OPC_LSA
) {
4620 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4629 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4637 t0
= tcg_temp_new();
4638 gen_load_gpr(t0
, rt
);
4642 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4644 #if defined(TARGET_MIPS64)
4646 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4651 TCGv t1
= tcg_temp_new();
4652 gen_load_gpr(t1
, rs
);
4656 TCGv_i64 t2
= tcg_temp_new_i64();
4657 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4658 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4659 gen_move_low32(cpu_gpr
[rd
], t2
);
4660 tcg_temp_free_i64(t2
);
4663 #if defined(TARGET_MIPS64)
4665 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4666 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4667 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4677 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4684 t0
= tcg_temp_new();
4685 gen_load_gpr(t0
, rt
);
4688 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4690 #if defined(TARGET_MIPS64)
4692 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4699 #ifndef CONFIG_USER_ONLY
4700 /* CP0 (MMU and control) */
4701 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4703 TCGv_i64 t0
= tcg_temp_new_i64();
4704 TCGv_i64 t1
= tcg_temp_new_i64();
4706 tcg_gen_ext_tl_i64(t0
, arg
);
4707 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4708 #if defined(TARGET_MIPS64)
4709 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4711 tcg_gen_concat32_i64(t1
, t1
, t0
);
4713 tcg_gen_st_i64(t1
, cpu_env
, off
);
4714 tcg_temp_free_i64(t1
);
4715 tcg_temp_free_i64(t0
);
4718 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4720 TCGv_i64 t0
= tcg_temp_new_i64();
4721 TCGv_i64 t1
= tcg_temp_new_i64();
4723 tcg_gen_ext_tl_i64(t0
, arg
);
4724 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4725 tcg_gen_concat32_i64(t1
, t1
, t0
);
4726 tcg_gen_st_i64(t1
, cpu_env
, off
);
4727 tcg_temp_free_i64(t1
);
4728 tcg_temp_free_i64(t0
);
4731 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4733 TCGv_i64 t0
= tcg_temp_new_i64();
4735 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4736 #if defined(TARGET_MIPS64)
4737 tcg_gen_shri_i64(t0
, t0
, 30);
4739 tcg_gen_shri_i64(t0
, t0
, 32);
4741 gen_move_low32(arg
, t0
);
4742 tcg_temp_free_i64(t0
);
4745 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4747 TCGv_i64 t0
= tcg_temp_new_i64();
4749 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4750 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4751 gen_move_low32(arg
, t0
);
4752 tcg_temp_free_i64(t0
);
4755 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4757 TCGv_i32 t0
= tcg_temp_new_i32();
4759 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4760 tcg_gen_ext_i32_tl(arg
, t0
);
4761 tcg_temp_free_i32(t0
);
4764 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4766 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4767 tcg_gen_ext32s_tl(arg
, arg
);
4770 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4772 TCGv_i32 t0
= tcg_temp_new_i32();
4774 tcg_gen_trunc_tl_i32(t0
, arg
);
4775 tcg_gen_st_i32(t0
, cpu_env
, off
);
4776 tcg_temp_free_i32(t0
);
4779 #define CP0_CHECK(c) \
4782 goto cp0_unimplemented; \
4786 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4788 const char *rn
= "invalid";
4790 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4796 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4800 goto cp0_unimplemented
;
4806 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4810 goto cp0_unimplemented
;
4816 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4817 ctx
->CP0_LLAddr_shift
);
4821 CP0_CHECK(ctx
->mrp
);
4822 gen_helper_mfhc0_maar(arg
, cpu_env
);
4826 goto cp0_unimplemented
;
4835 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4839 goto cp0_unimplemented
;
4843 goto cp0_unimplemented
;
4846 (void)rn
; /* avoid a compiler warning */
4847 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4851 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4852 tcg_gen_movi_tl(arg
, 0);
4855 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4857 const char *rn
= "invalid";
4858 uint64_t mask
= ctx
->PAMask
>> 36;
4860 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4866 tcg_gen_andi_tl(arg
, arg
, mask
);
4867 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4871 goto cp0_unimplemented
;
4877 tcg_gen_andi_tl(arg
, arg
, mask
);
4878 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4882 goto cp0_unimplemented
;
4888 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4889 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4890 relevant for modern MIPS cores supporting MTHC0, therefore
4891 treating MTHC0 to LLAddr as NOP. */
4895 CP0_CHECK(ctx
->mrp
);
4896 gen_helper_mthc0_maar(cpu_env
, arg
);
4900 goto cp0_unimplemented
;
4909 tcg_gen_andi_tl(arg
, arg
, mask
);
4910 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4914 goto cp0_unimplemented
;
4918 goto cp0_unimplemented
;
4921 (void)rn
; /* avoid a compiler warning */
4923 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4926 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4928 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4929 tcg_gen_movi_tl(arg
, 0);
4931 tcg_gen_movi_tl(arg
, ~0);
4935 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4937 const char *rn
= "invalid";
4940 check_insn(ctx
, ISA_MIPS32
);
4946 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4950 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4951 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4955 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4956 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4960 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4961 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4970 goto cp0_unimplemented
;
4976 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4977 gen_helper_mfc0_random(arg
, cpu_env
);
4981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4982 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4987 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4992 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4997 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5002 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5007 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5008 rn
= "VPEScheFBack";
5011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5012 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5016 goto cp0_unimplemented
;
5023 TCGv_i64 tmp
= tcg_temp_new_i64();
5024 tcg_gen_ld_i64(tmp
, cpu_env
,
5025 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5026 #if defined(TARGET_MIPS64)
5028 /* Move RI/XI fields to bits 31:30 */
5029 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5030 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5033 gen_move_low32(arg
, tmp
);
5034 tcg_temp_free_i64(tmp
);
5039 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5040 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5044 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5045 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5050 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5054 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5055 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5059 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5060 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5064 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5065 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5069 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5070 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5074 goto cp0_unimplemented
;
5081 TCGv_i64 tmp
= tcg_temp_new_i64();
5082 tcg_gen_ld_i64(tmp
, cpu_env
,
5083 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5084 #if defined(TARGET_MIPS64)
5086 /* Move RI/XI fields to bits 31:30 */
5087 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5088 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5091 gen_move_low32(arg
, tmp
);
5092 tcg_temp_free_i64(tmp
);
5098 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5099 rn
= "GlobalNumber";
5102 goto cp0_unimplemented
;
5108 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5109 tcg_gen_ext32s_tl(arg
, arg
);
5113 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5114 rn
= "ContextConfig";
5115 goto cp0_unimplemented
;
5118 CP0_CHECK(ctx
->ulri
);
5119 tcg_gen_ld32s_tl(arg
, cpu_env
,
5120 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5124 goto cp0_unimplemented
;
5130 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5134 check_insn(ctx
, ISA_MIPS32R2
);
5135 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5139 goto cp0_unimplemented
;
5145 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5149 check_insn(ctx
, ISA_MIPS32R2
);
5150 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5154 check_insn(ctx
, ISA_MIPS32R2
);
5155 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5159 check_insn(ctx
, ISA_MIPS32R2
);
5160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5164 check_insn(ctx
, ISA_MIPS32R2
);
5165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5169 check_insn(ctx
, ISA_MIPS32R2
);
5170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5174 goto cp0_unimplemented
;
5180 check_insn(ctx
, ISA_MIPS32R2
);
5181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5185 goto cp0_unimplemented
;
5191 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5192 tcg_gen_ext32s_tl(arg
, arg
);
5197 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5202 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5206 goto cp0_unimplemented
;
5212 /* Mark as an IO operation because we read the time. */
5213 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5216 gen_helper_mfc0_count(arg
, cpu_env
);
5217 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5220 /* Break the TB to be able to take timer interrupts immediately
5221 after reading count. */
5222 ctx
->bstate
= BS_STOP
;
5225 /* 6,7 are implementation dependent */
5227 goto cp0_unimplemented
;
5233 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5234 tcg_gen_ext32s_tl(arg
, arg
);
5238 goto cp0_unimplemented
;
5244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5247 /* 6,7 are implementation dependent */
5249 goto cp0_unimplemented
;
5255 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5259 check_insn(ctx
, ISA_MIPS32R2
);
5260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5264 check_insn(ctx
, ISA_MIPS32R2
);
5265 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5269 check_insn(ctx
, ISA_MIPS32R2
);
5270 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5274 goto cp0_unimplemented
;
5280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5284 goto cp0_unimplemented
;
5290 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5291 tcg_gen_ext32s_tl(arg
, arg
);
5295 goto cp0_unimplemented
;
5301 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5305 check_insn(ctx
, ISA_MIPS32R2
);
5306 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5310 check_insn(ctx
, ISA_MIPS32R2
);
5311 CP0_CHECK(ctx
->cmgcr
);
5312 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5313 tcg_gen_ext32s_tl(arg
, arg
);
5317 goto cp0_unimplemented
;
5323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5327 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5331 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5335 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5343 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5346 /* 6,7 are implementation dependent */
5348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5356 goto cp0_unimplemented
;
5362 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5366 CP0_CHECK(ctx
->mrp
);
5367 gen_helper_mfc0_maar(arg
, cpu_env
);
5371 CP0_CHECK(ctx
->mrp
);
5372 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5376 goto cp0_unimplemented
;
5382 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5386 goto cp0_unimplemented
;
5392 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5396 goto cp0_unimplemented
;
5402 #if defined(TARGET_MIPS64)
5403 check_insn(ctx
, ISA_MIPS3
);
5404 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5405 tcg_gen_ext32s_tl(arg
, arg
);
5410 goto cp0_unimplemented
;
5414 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5415 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5418 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5422 goto cp0_unimplemented
;
5426 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5427 rn
= "'Diagnostic"; /* implementation dependent */
5432 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5436 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5437 rn
= "TraceControl";
5440 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5441 rn
= "TraceControl2";
5444 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5445 rn
= "UserTraceData";
5448 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5452 goto cp0_unimplemented
;
5459 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5460 tcg_gen_ext32s_tl(arg
, arg
);
5464 goto cp0_unimplemented
;
5470 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5471 rn
= "Performance0";
5474 // gen_helper_mfc0_performance1(arg);
5475 rn
= "Performance1";
5478 // gen_helper_mfc0_performance2(arg);
5479 rn
= "Performance2";
5482 // gen_helper_mfc0_performance3(arg);
5483 rn
= "Performance3";
5486 // gen_helper_mfc0_performance4(arg);
5487 rn
= "Performance4";
5490 // gen_helper_mfc0_performance5(arg);
5491 rn
= "Performance5";
5494 // gen_helper_mfc0_performance6(arg);
5495 rn
= "Performance6";
5498 // gen_helper_mfc0_performance7(arg);
5499 rn
= "Performance7";
5502 goto cp0_unimplemented
;
5508 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5512 goto cp0_unimplemented
;
5518 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5522 goto cp0_unimplemented
;
5532 TCGv_i64 tmp
= tcg_temp_new_i64();
5533 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5534 gen_move_low32(arg
, tmp
);
5535 tcg_temp_free_i64(tmp
);
5543 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5547 goto cp0_unimplemented
;
5556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5563 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5567 goto cp0_unimplemented
;
5573 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5574 tcg_gen_ext32s_tl(arg
, arg
);
5578 goto cp0_unimplemented
;
5585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5589 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5590 tcg_gen_ld_tl(arg
, cpu_env
,
5591 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5592 tcg_gen_ext32s_tl(arg
, arg
);
5596 goto cp0_unimplemented
;
5600 goto cp0_unimplemented
;
5602 (void)rn
; /* avoid a compiler warning */
5603 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5607 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5608 gen_mfc0_unimplemented(ctx
, arg
);
5611 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5613 const char *rn
= "invalid";
5616 check_insn(ctx
, ISA_MIPS32
);
5618 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5626 gen_helper_mtc0_index(cpu_env
, arg
);
5630 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5631 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5635 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5640 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5650 goto cp0_unimplemented
;
5660 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5661 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5665 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5666 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5670 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5671 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5675 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5676 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5680 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5681 tcg_gen_st_tl(arg
, cpu_env
,
5682 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5687 tcg_gen_st_tl(arg
, cpu_env
,
5688 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5689 rn
= "VPEScheFBack";
5692 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5693 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5697 goto cp0_unimplemented
;
5703 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5707 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5708 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5712 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5713 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5717 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5718 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5722 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5723 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5727 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5728 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5732 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5733 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5737 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5738 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5742 goto cp0_unimplemented
;
5748 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5754 rn
= "GlobalNumber";
5757 goto cp0_unimplemented
;
5763 gen_helper_mtc0_context(cpu_env
, arg
);
5767 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5768 rn
= "ContextConfig";
5769 goto cp0_unimplemented
;
5772 CP0_CHECK(ctx
->ulri
);
5773 tcg_gen_st_tl(arg
, cpu_env
,
5774 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5778 goto cp0_unimplemented
;
5784 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5788 check_insn(ctx
, ISA_MIPS32R2
);
5789 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5791 ctx
->bstate
= BS_STOP
;
5794 goto cp0_unimplemented
;
5800 gen_helper_mtc0_wired(cpu_env
, arg
);
5804 check_insn(ctx
, ISA_MIPS32R2
);
5805 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5809 check_insn(ctx
, ISA_MIPS32R2
);
5810 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5814 check_insn(ctx
, ISA_MIPS32R2
);
5815 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5819 check_insn(ctx
, ISA_MIPS32R2
);
5820 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5824 check_insn(ctx
, ISA_MIPS32R2
);
5825 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5829 goto cp0_unimplemented
;
5835 check_insn(ctx
, ISA_MIPS32R2
);
5836 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5837 ctx
->bstate
= BS_STOP
;
5841 goto cp0_unimplemented
;
5859 goto cp0_unimplemented
;
5865 gen_helper_mtc0_count(cpu_env
, arg
);
5868 /* 6,7 are implementation dependent */
5870 goto cp0_unimplemented
;
5876 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5880 goto cp0_unimplemented
;
5886 gen_helper_mtc0_compare(cpu_env
, arg
);
5889 /* 6,7 are implementation dependent */
5891 goto cp0_unimplemented
;
5897 save_cpu_state(ctx
, 1);
5898 gen_helper_mtc0_status(cpu_env
, arg
);
5899 /* BS_STOP isn't good enough here, hflags may have changed. */
5900 gen_save_pc(ctx
->pc
+ 4);
5901 ctx
->bstate
= BS_EXCP
;
5905 check_insn(ctx
, ISA_MIPS32R2
);
5906 gen_helper_mtc0_intctl(cpu_env
, arg
);
5907 /* Stop translation as we may have switched the execution mode */
5908 ctx
->bstate
= BS_STOP
;
5912 check_insn(ctx
, ISA_MIPS32R2
);
5913 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5914 /* Stop translation as we may have switched the execution mode */
5915 ctx
->bstate
= BS_STOP
;
5919 check_insn(ctx
, ISA_MIPS32R2
);
5920 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5921 /* Stop translation as we may have switched the execution mode */
5922 ctx
->bstate
= BS_STOP
;
5926 goto cp0_unimplemented
;
5932 save_cpu_state(ctx
, 1);
5933 gen_helper_mtc0_cause(cpu_env
, arg
);
5937 goto cp0_unimplemented
;
5943 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5947 goto cp0_unimplemented
;
5957 check_insn(ctx
, ISA_MIPS32R2
);
5958 gen_helper_mtc0_ebase(cpu_env
, arg
);
5962 goto cp0_unimplemented
;
5968 gen_helper_mtc0_config0(cpu_env
, arg
);
5970 /* Stop translation as we may have switched the execution mode */
5971 ctx
->bstate
= BS_STOP
;
5974 /* ignored, read only */
5978 gen_helper_mtc0_config2(cpu_env
, arg
);
5980 /* Stop translation as we may have switched the execution mode */
5981 ctx
->bstate
= BS_STOP
;
5984 gen_helper_mtc0_config3(cpu_env
, arg
);
5986 /* Stop translation as we may have switched the execution mode */
5987 ctx
->bstate
= BS_STOP
;
5990 gen_helper_mtc0_config4(cpu_env
, arg
);
5992 ctx
->bstate
= BS_STOP
;
5995 gen_helper_mtc0_config5(cpu_env
, arg
);
5997 /* Stop translation as we may have switched the execution mode */
5998 ctx
->bstate
= BS_STOP
;
6000 /* 6,7 are implementation dependent */
6010 rn
= "Invalid config selector";
6011 goto cp0_unimplemented
;
6017 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6021 CP0_CHECK(ctx
->mrp
);
6022 gen_helper_mtc0_maar(cpu_env
, arg
);
6026 CP0_CHECK(ctx
->mrp
);
6027 gen_helper_mtc0_maari(cpu_env
, arg
);
6031 goto cp0_unimplemented
;
6037 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6041 goto cp0_unimplemented
;
6047 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6051 goto cp0_unimplemented
;
6057 #if defined(TARGET_MIPS64)
6058 check_insn(ctx
, ISA_MIPS3
);
6059 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6064 goto cp0_unimplemented
;
6068 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6069 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6072 gen_helper_mtc0_framemask(cpu_env
, arg
);
6076 goto cp0_unimplemented
;
6081 rn
= "Diagnostic"; /* implementation dependent */
6086 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6087 /* BS_STOP isn't good enough here, hflags may have changed. */
6088 gen_save_pc(ctx
->pc
+ 4);
6089 ctx
->bstate
= BS_EXCP
;
6093 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6094 rn
= "TraceControl";
6095 /* Stop translation as we may have switched the execution mode */
6096 ctx
->bstate
= BS_STOP
;
6099 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6100 rn
= "TraceControl2";
6101 /* Stop translation as we may have switched the execution mode */
6102 ctx
->bstate
= BS_STOP
;
6105 /* Stop translation as we may have switched the execution mode */
6106 ctx
->bstate
= BS_STOP
;
6107 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6108 rn
= "UserTraceData";
6109 /* Stop translation as we may have switched the execution mode */
6110 ctx
->bstate
= BS_STOP
;
6113 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6114 /* Stop translation as we may have switched the execution mode */
6115 ctx
->bstate
= BS_STOP
;
6119 goto cp0_unimplemented
;
6126 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6130 goto cp0_unimplemented
;
6136 gen_helper_mtc0_performance0(cpu_env
, arg
);
6137 rn
= "Performance0";
6140 // gen_helper_mtc0_performance1(arg);
6141 rn
= "Performance1";
6144 // gen_helper_mtc0_performance2(arg);
6145 rn
= "Performance2";
6148 // gen_helper_mtc0_performance3(arg);
6149 rn
= "Performance3";
6152 // gen_helper_mtc0_performance4(arg);
6153 rn
= "Performance4";
6156 // gen_helper_mtc0_performance5(arg);
6157 rn
= "Performance5";
6160 // gen_helper_mtc0_performance6(arg);
6161 rn
= "Performance6";
6164 // gen_helper_mtc0_performance7(arg);
6165 rn
= "Performance7";
6168 goto cp0_unimplemented
;
6174 gen_helper_mtc0_errctl(cpu_env
, arg
);
6175 ctx
->bstate
= BS_STOP
;
6179 goto cp0_unimplemented
;
6189 goto cp0_unimplemented
;
6198 gen_helper_mtc0_taglo(cpu_env
, arg
);
6205 gen_helper_mtc0_datalo(cpu_env
, arg
);
6209 goto cp0_unimplemented
;
6218 gen_helper_mtc0_taghi(cpu_env
, arg
);
6225 gen_helper_mtc0_datahi(cpu_env
, arg
);
6230 goto cp0_unimplemented
;
6236 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6240 goto cp0_unimplemented
;
6247 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6251 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6252 tcg_gen_st_tl(arg
, cpu_env
,
6253 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6257 goto cp0_unimplemented
;
6259 /* Stop translation as we may have switched the execution mode */
6260 ctx
->bstate
= BS_STOP
;
6263 goto cp0_unimplemented
;
6265 (void)rn
; /* avoid a compiler warning */
6266 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6267 /* For simplicity assume that all writes can cause interrupts. */
6268 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6270 ctx
->bstate
= BS_STOP
;
6275 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6278 #if defined(TARGET_MIPS64)
6279 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6281 const char *rn
= "invalid";
6284 check_insn(ctx
, ISA_MIPS64
);
6290 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6294 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6295 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6299 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6300 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6304 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6305 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6314 goto cp0_unimplemented
;
6320 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6321 gen_helper_mfc0_random(arg
, cpu_env
);
6325 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6326 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6330 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6331 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6335 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6340 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6341 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6345 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6346 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6350 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6351 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6352 rn
= "VPEScheFBack";
6355 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6360 goto cp0_unimplemented
;
6366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6370 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6371 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6375 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6376 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6380 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6381 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6385 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6386 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6391 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6395 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6396 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6400 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6401 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6405 goto cp0_unimplemented
;
6411 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6416 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6417 rn
= "GlobalNumber";
6420 goto cp0_unimplemented
;
6426 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6430 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6431 rn
= "ContextConfig";
6432 goto cp0_unimplemented
;
6435 CP0_CHECK(ctx
->ulri
);
6436 tcg_gen_ld_tl(arg
, cpu_env
,
6437 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6441 goto cp0_unimplemented
;
6447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6451 check_insn(ctx
, ISA_MIPS32R2
);
6452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6456 goto cp0_unimplemented
;
6462 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6466 check_insn(ctx
, ISA_MIPS32R2
);
6467 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6471 check_insn(ctx
, ISA_MIPS32R2
);
6472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6476 check_insn(ctx
, ISA_MIPS32R2
);
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6481 check_insn(ctx
, ISA_MIPS32R2
);
6482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6486 check_insn(ctx
, ISA_MIPS32R2
);
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6491 goto cp0_unimplemented
;
6497 check_insn(ctx
, ISA_MIPS32R2
);
6498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6502 goto cp0_unimplemented
;
6508 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6513 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6522 goto cp0_unimplemented
;
6528 /* Mark as an IO operation because we read the time. */
6529 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6532 gen_helper_mfc0_count(arg
, cpu_env
);
6533 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6536 /* Break the TB to be able to take timer interrupts immediately
6537 after reading count. */
6538 ctx
->bstate
= BS_STOP
;
6541 /* 6,7 are implementation dependent */
6543 goto cp0_unimplemented
;
6549 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6553 goto cp0_unimplemented
;
6559 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6562 /* 6,7 are implementation dependent */
6564 goto cp0_unimplemented
;
6570 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6574 check_insn(ctx
, ISA_MIPS32R2
);
6575 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6579 check_insn(ctx
, ISA_MIPS32R2
);
6580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6584 check_insn(ctx
, ISA_MIPS32R2
);
6585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6589 goto cp0_unimplemented
;
6595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6599 goto cp0_unimplemented
;
6605 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6609 goto cp0_unimplemented
;
6615 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6619 check_insn(ctx
, ISA_MIPS32R2
);
6620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6624 check_insn(ctx
, ISA_MIPS32R2
);
6625 CP0_CHECK(ctx
->cmgcr
);
6626 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6630 goto cp0_unimplemented
;
6636 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6644 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6648 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6652 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6656 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6659 /* 6,7 are implementation dependent */
6661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6669 goto cp0_unimplemented
;
6675 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6679 CP0_CHECK(ctx
->mrp
);
6680 gen_helper_dmfc0_maar(arg
, cpu_env
);
6684 CP0_CHECK(ctx
->mrp
);
6685 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6689 goto cp0_unimplemented
;
6695 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6699 goto cp0_unimplemented
;
6705 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6709 goto cp0_unimplemented
;
6715 check_insn(ctx
, ISA_MIPS3
);
6716 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6720 goto cp0_unimplemented
;
6724 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6725 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6728 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6732 goto cp0_unimplemented
;
6736 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6737 rn
= "'Diagnostic"; /* implementation dependent */
6742 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6746 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6747 rn
= "TraceControl";
6750 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6751 rn
= "TraceControl2";
6754 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6755 rn
= "UserTraceData";
6758 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6762 goto cp0_unimplemented
;
6769 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6773 goto cp0_unimplemented
;
6779 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6780 rn
= "Performance0";
6783 // gen_helper_dmfc0_performance1(arg);
6784 rn
= "Performance1";
6787 // gen_helper_dmfc0_performance2(arg);
6788 rn
= "Performance2";
6791 // gen_helper_dmfc0_performance3(arg);
6792 rn
= "Performance3";
6795 // gen_helper_dmfc0_performance4(arg);
6796 rn
= "Performance4";
6799 // gen_helper_dmfc0_performance5(arg);
6800 rn
= "Performance5";
6803 // gen_helper_dmfc0_performance6(arg);
6804 rn
= "Performance6";
6807 // gen_helper_dmfc0_performance7(arg);
6808 rn
= "Performance7";
6811 goto cp0_unimplemented
;
6817 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6821 goto cp0_unimplemented
;
6828 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6832 goto cp0_unimplemented
;
6841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6848 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6852 goto cp0_unimplemented
;
6861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6868 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6872 goto cp0_unimplemented
;
6878 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6882 goto cp0_unimplemented
;
6889 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6893 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6894 tcg_gen_ld_tl(arg
, cpu_env
,
6895 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6899 goto cp0_unimplemented
;
6903 goto cp0_unimplemented
;
6905 (void)rn
; /* avoid a compiler warning */
6906 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6910 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6911 gen_mfc0_unimplemented(ctx
, arg
);
6914 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6916 const char *rn
= "invalid";
6919 check_insn(ctx
, ISA_MIPS64
);
6921 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6929 gen_helper_mtc0_index(cpu_env
, arg
);
6933 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6934 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6938 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6953 goto cp0_unimplemented
;
6963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6964 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6969 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6974 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6979 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6984 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6988 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6989 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6990 rn
= "VPEScheFBack";
6993 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6994 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6998 goto cp0_unimplemented
;
7004 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7008 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7009 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7013 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7014 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7018 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7019 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7024 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7028 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7029 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7033 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7034 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7038 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7039 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7043 goto cp0_unimplemented
;
7049 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7055 rn
= "GlobalNumber";
7058 goto cp0_unimplemented
;
7064 gen_helper_mtc0_context(cpu_env
, arg
);
7068 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7069 rn
= "ContextConfig";
7070 goto cp0_unimplemented
;
7073 CP0_CHECK(ctx
->ulri
);
7074 tcg_gen_st_tl(arg
, cpu_env
,
7075 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7079 goto cp0_unimplemented
;
7085 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7089 check_insn(ctx
, ISA_MIPS32R2
);
7090 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7094 goto cp0_unimplemented
;
7100 gen_helper_mtc0_wired(cpu_env
, arg
);
7104 check_insn(ctx
, ISA_MIPS32R2
);
7105 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7109 check_insn(ctx
, ISA_MIPS32R2
);
7110 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7114 check_insn(ctx
, ISA_MIPS32R2
);
7115 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7119 check_insn(ctx
, ISA_MIPS32R2
);
7120 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7124 check_insn(ctx
, ISA_MIPS32R2
);
7125 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7129 goto cp0_unimplemented
;
7135 check_insn(ctx
, ISA_MIPS32R2
);
7136 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7137 ctx
->bstate
= BS_STOP
;
7141 goto cp0_unimplemented
;
7159 goto cp0_unimplemented
;
7165 gen_helper_mtc0_count(cpu_env
, arg
);
7168 /* 6,7 are implementation dependent */
7170 goto cp0_unimplemented
;
7172 /* Stop translation as we may have switched the execution mode */
7173 ctx
->bstate
= BS_STOP
;
7178 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7182 goto cp0_unimplemented
;
7188 gen_helper_mtc0_compare(cpu_env
, arg
);
7191 /* 6,7 are implementation dependent */
7193 goto cp0_unimplemented
;
7195 /* Stop translation as we may have switched the execution mode */
7196 ctx
->bstate
= BS_STOP
;
7201 save_cpu_state(ctx
, 1);
7202 gen_helper_mtc0_status(cpu_env
, arg
);
7203 /* BS_STOP isn't good enough here, hflags may have changed. */
7204 gen_save_pc(ctx
->pc
+ 4);
7205 ctx
->bstate
= BS_EXCP
;
7209 check_insn(ctx
, ISA_MIPS32R2
);
7210 gen_helper_mtc0_intctl(cpu_env
, arg
);
7211 /* Stop translation as we may have switched the execution mode */
7212 ctx
->bstate
= BS_STOP
;
7216 check_insn(ctx
, ISA_MIPS32R2
);
7217 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7218 /* Stop translation as we may have switched the execution mode */
7219 ctx
->bstate
= BS_STOP
;
7223 check_insn(ctx
, ISA_MIPS32R2
);
7224 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7225 /* Stop translation as we may have switched the execution mode */
7226 ctx
->bstate
= BS_STOP
;
7230 goto cp0_unimplemented
;
7236 save_cpu_state(ctx
, 1);
7237 /* Mark as an IO operation because we may trigger a software
7239 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7242 gen_helper_mtc0_cause(cpu_env
, arg
);
7243 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7246 /* Stop translation as we may have triggered an intetrupt */
7247 ctx
->bstate
= BS_STOP
;
7251 goto cp0_unimplemented
;
7257 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7261 goto cp0_unimplemented
;
7271 check_insn(ctx
, ISA_MIPS32R2
);
7272 gen_helper_mtc0_ebase(cpu_env
, arg
);
7276 goto cp0_unimplemented
;
7282 gen_helper_mtc0_config0(cpu_env
, arg
);
7284 /* Stop translation as we may have switched the execution mode */
7285 ctx
->bstate
= BS_STOP
;
7288 /* ignored, read only */
7292 gen_helper_mtc0_config2(cpu_env
, arg
);
7294 /* Stop translation as we may have switched the execution mode */
7295 ctx
->bstate
= BS_STOP
;
7298 gen_helper_mtc0_config3(cpu_env
, arg
);
7300 /* Stop translation as we may have switched the execution mode */
7301 ctx
->bstate
= BS_STOP
;
7304 /* currently ignored */
7308 gen_helper_mtc0_config5(cpu_env
, arg
);
7310 /* Stop translation as we may have switched the execution mode */
7311 ctx
->bstate
= BS_STOP
;
7313 /* 6,7 are implementation dependent */
7315 rn
= "Invalid config selector";
7316 goto cp0_unimplemented
;
7322 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7326 CP0_CHECK(ctx
->mrp
);
7327 gen_helper_mtc0_maar(cpu_env
, arg
);
7331 CP0_CHECK(ctx
->mrp
);
7332 gen_helper_mtc0_maari(cpu_env
, arg
);
7336 goto cp0_unimplemented
;
7342 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7346 goto cp0_unimplemented
;
7352 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7356 goto cp0_unimplemented
;
7362 check_insn(ctx
, ISA_MIPS3
);
7363 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7367 goto cp0_unimplemented
;
7371 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7372 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7375 gen_helper_mtc0_framemask(cpu_env
, arg
);
7379 goto cp0_unimplemented
;
7384 rn
= "Diagnostic"; /* implementation dependent */
7389 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7390 /* BS_STOP isn't good enough here, hflags may have changed. */
7391 gen_save_pc(ctx
->pc
+ 4);
7392 ctx
->bstate
= BS_EXCP
;
7396 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7397 /* Stop translation as we may have switched the execution mode */
7398 ctx
->bstate
= BS_STOP
;
7399 rn
= "TraceControl";
7402 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7403 /* Stop translation as we may have switched the execution mode */
7404 ctx
->bstate
= BS_STOP
;
7405 rn
= "TraceControl2";
7408 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7409 /* Stop translation as we may have switched the execution mode */
7410 ctx
->bstate
= BS_STOP
;
7411 rn
= "UserTraceData";
7414 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7415 /* Stop translation as we may have switched the execution mode */
7416 ctx
->bstate
= BS_STOP
;
7420 goto cp0_unimplemented
;
7427 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7431 goto cp0_unimplemented
;
7437 gen_helper_mtc0_performance0(cpu_env
, arg
);
7438 rn
= "Performance0";
7441 // gen_helper_mtc0_performance1(cpu_env, arg);
7442 rn
= "Performance1";
7445 // gen_helper_mtc0_performance2(cpu_env, arg);
7446 rn
= "Performance2";
7449 // gen_helper_mtc0_performance3(cpu_env, arg);
7450 rn
= "Performance3";
7453 // gen_helper_mtc0_performance4(cpu_env, arg);
7454 rn
= "Performance4";
7457 // gen_helper_mtc0_performance5(cpu_env, arg);
7458 rn
= "Performance5";
7461 // gen_helper_mtc0_performance6(cpu_env, arg);
7462 rn
= "Performance6";
7465 // gen_helper_mtc0_performance7(cpu_env, arg);
7466 rn
= "Performance7";
7469 goto cp0_unimplemented
;
7475 gen_helper_mtc0_errctl(cpu_env
, arg
);
7476 ctx
->bstate
= BS_STOP
;
7480 goto cp0_unimplemented
;
7490 goto cp0_unimplemented
;
7499 gen_helper_mtc0_taglo(cpu_env
, arg
);
7506 gen_helper_mtc0_datalo(cpu_env
, arg
);
7510 goto cp0_unimplemented
;
7519 gen_helper_mtc0_taghi(cpu_env
, arg
);
7526 gen_helper_mtc0_datahi(cpu_env
, arg
);
7531 goto cp0_unimplemented
;
7537 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7541 goto cp0_unimplemented
;
7548 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7552 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7553 tcg_gen_st_tl(arg
, cpu_env
,
7554 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7558 goto cp0_unimplemented
;
7560 /* Stop translation as we may have switched the execution mode */
7561 ctx
->bstate
= BS_STOP
;
7564 goto cp0_unimplemented
;
7566 (void)rn
; /* avoid a compiler warning */
7567 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7568 /* For simplicity assume that all writes can cause interrupts. */
7569 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7571 ctx
->bstate
= BS_STOP
;
7576 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7578 #endif /* TARGET_MIPS64 */
7580 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7581 int u
, int sel
, int h
)
7583 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7584 TCGv t0
= tcg_temp_local_new();
7586 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7587 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7588 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7589 tcg_gen_movi_tl(t0
, -1);
7590 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7591 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7592 tcg_gen_movi_tl(t0
, -1);
7598 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7601 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7611 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7614 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7617 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7620 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7623 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7626 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7629 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7632 gen_mfc0(ctx
, t0
, rt
, sel
);
7639 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7642 gen_mfc0(ctx
, t0
, rt
, sel
);
7648 gen_helper_mftc0_status(t0
, cpu_env
);
7651 gen_mfc0(ctx
, t0
, rt
, sel
);
7657 gen_helper_mftc0_cause(t0
, cpu_env
);
7667 gen_helper_mftc0_epc(t0
, cpu_env
);
7677 gen_helper_mftc0_ebase(t0
, cpu_env
);
7687 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7697 gen_helper_mftc0_debug(t0
, cpu_env
);
7700 gen_mfc0(ctx
, t0
, rt
, sel
);
7705 gen_mfc0(ctx
, t0
, rt
, sel
);
7707 } else switch (sel
) {
7708 /* GPR registers. */
7710 gen_helper_1e0i(mftgpr
, t0
, rt
);
7712 /* Auxiliary CPU registers */
7716 gen_helper_1e0i(mftlo
, t0
, 0);
7719 gen_helper_1e0i(mfthi
, t0
, 0);
7722 gen_helper_1e0i(mftacx
, t0
, 0);
7725 gen_helper_1e0i(mftlo
, t0
, 1);
7728 gen_helper_1e0i(mfthi
, t0
, 1);
7731 gen_helper_1e0i(mftacx
, t0
, 1);
7734 gen_helper_1e0i(mftlo
, t0
, 2);
7737 gen_helper_1e0i(mfthi
, t0
, 2);
7740 gen_helper_1e0i(mftacx
, t0
, 2);
7743 gen_helper_1e0i(mftlo
, t0
, 3);
7746 gen_helper_1e0i(mfthi
, t0
, 3);
7749 gen_helper_1e0i(mftacx
, t0
, 3);
7752 gen_helper_mftdsp(t0
, cpu_env
);
7758 /* Floating point (COP1). */
7760 /* XXX: For now we support only a single FPU context. */
7762 TCGv_i32 fp0
= tcg_temp_new_i32();
7764 gen_load_fpr32(ctx
, fp0
, rt
);
7765 tcg_gen_ext_i32_tl(t0
, fp0
);
7766 tcg_temp_free_i32(fp0
);
7768 TCGv_i32 fp0
= tcg_temp_new_i32();
7770 gen_load_fpr32h(ctx
, fp0
, rt
);
7771 tcg_gen_ext_i32_tl(t0
, fp0
);
7772 tcg_temp_free_i32(fp0
);
7776 /* XXX: For now we support only a single FPU context. */
7777 gen_helper_1e0i(cfc1
, t0
, rt
);
7779 /* COP2: Not implemented. */
7786 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7787 gen_store_gpr(t0
, rd
);
7793 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7794 generate_exception_end(ctx
, EXCP_RI
);
7797 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7798 int u
, int sel
, int h
)
7800 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7801 TCGv t0
= tcg_temp_local_new();
7803 gen_load_gpr(t0
, rt
);
7804 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7805 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7806 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7808 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7809 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7816 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7819 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7829 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7832 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7835 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7838 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7841 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7844 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7847 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7850 gen_mtc0(ctx
, t0
, rd
, sel
);
7857 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7860 gen_mtc0(ctx
, t0
, rd
, sel
);
7866 gen_helper_mttc0_status(cpu_env
, t0
);
7869 gen_mtc0(ctx
, t0
, rd
, sel
);
7875 gen_helper_mttc0_cause(cpu_env
, t0
);
7885 gen_helper_mttc0_ebase(cpu_env
, t0
);
7895 gen_helper_mttc0_debug(cpu_env
, t0
);
7898 gen_mtc0(ctx
, t0
, rd
, sel
);
7903 gen_mtc0(ctx
, t0
, rd
, sel
);
7905 } else switch (sel
) {
7906 /* GPR registers. */
7908 gen_helper_0e1i(mttgpr
, t0
, rd
);
7910 /* Auxiliary CPU registers */
7914 gen_helper_0e1i(mttlo
, t0
, 0);
7917 gen_helper_0e1i(mtthi
, t0
, 0);
7920 gen_helper_0e1i(mttacx
, t0
, 0);
7923 gen_helper_0e1i(mttlo
, t0
, 1);
7926 gen_helper_0e1i(mtthi
, t0
, 1);
7929 gen_helper_0e1i(mttacx
, t0
, 1);
7932 gen_helper_0e1i(mttlo
, t0
, 2);
7935 gen_helper_0e1i(mtthi
, t0
, 2);
7938 gen_helper_0e1i(mttacx
, t0
, 2);
7941 gen_helper_0e1i(mttlo
, t0
, 3);
7944 gen_helper_0e1i(mtthi
, t0
, 3);
7947 gen_helper_0e1i(mttacx
, t0
, 3);
7950 gen_helper_mttdsp(cpu_env
, t0
);
7956 /* Floating point (COP1). */
7958 /* XXX: For now we support only a single FPU context. */
7960 TCGv_i32 fp0
= tcg_temp_new_i32();
7962 tcg_gen_trunc_tl_i32(fp0
, t0
);
7963 gen_store_fpr32(ctx
, fp0
, rd
);
7964 tcg_temp_free_i32(fp0
);
7966 TCGv_i32 fp0
= tcg_temp_new_i32();
7968 tcg_gen_trunc_tl_i32(fp0
, t0
);
7969 gen_store_fpr32h(ctx
, fp0
, rd
);
7970 tcg_temp_free_i32(fp0
);
7974 /* XXX: For now we support only a single FPU context. */
7976 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7978 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7979 tcg_temp_free_i32(fs_tmp
);
7981 /* Stop translation as we may have changed hflags */
7982 ctx
->bstate
= BS_STOP
;
7984 /* COP2: Not implemented. */
7991 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7997 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7998 generate_exception_end(ctx
, EXCP_RI
);
8001 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8003 const char *opn
= "ldst";
8005 check_cp0_enabled(ctx
);
8012 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8017 TCGv t0
= tcg_temp_new();
8019 gen_load_gpr(t0
, rt
);
8020 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8025 #if defined(TARGET_MIPS64)
8027 check_insn(ctx
, ISA_MIPS3
);
8032 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8036 check_insn(ctx
, ISA_MIPS3
);
8038 TCGv t0
= tcg_temp_new();
8040 gen_load_gpr(t0
, rt
);
8041 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8053 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8059 TCGv t0
= tcg_temp_new();
8060 gen_load_gpr(t0
, rt
);
8061 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8067 check_insn(ctx
, ASE_MT
);
8072 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8073 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8077 check_insn(ctx
, ASE_MT
);
8078 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8079 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8084 if (!env
->tlb
->helper_tlbwi
)
8086 gen_helper_tlbwi(cpu_env
);
8091 if (!env
->tlb
->helper_tlbinv
) {
8094 gen_helper_tlbinv(cpu_env
);
8095 } /* treat as nop if TLBINV not supported */
8100 if (!env
->tlb
->helper_tlbinvf
) {
8103 gen_helper_tlbinvf(cpu_env
);
8104 } /* treat as nop if TLBINV not supported */
8108 if (!env
->tlb
->helper_tlbwr
)
8110 gen_helper_tlbwr(cpu_env
);
8114 if (!env
->tlb
->helper_tlbp
)
8116 gen_helper_tlbp(cpu_env
);
8120 if (!env
->tlb
->helper_tlbr
)
8122 gen_helper_tlbr(cpu_env
);
8124 case OPC_ERET
: /* OPC_ERETNC */
8125 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8126 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8129 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8130 if (ctx
->opcode
& (1 << bit_shift
)) {
8133 check_insn(ctx
, ISA_MIPS32R5
);
8134 gen_helper_eretnc(cpu_env
);
8138 check_insn(ctx
, ISA_MIPS2
);
8139 gen_helper_eret(cpu_env
);
8141 ctx
->bstate
= BS_EXCP
;
8146 check_insn(ctx
, ISA_MIPS32
);
8147 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8148 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8151 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8153 generate_exception_end(ctx
, EXCP_RI
);
8155 gen_helper_deret(cpu_env
);
8156 ctx
->bstate
= BS_EXCP
;
8161 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8162 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8163 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8166 /* If we get an exception, we want to restart at next instruction */
8168 save_cpu_state(ctx
, 1);
8170 gen_helper_wait(cpu_env
);
8171 ctx
->bstate
= BS_EXCP
;
8176 generate_exception_end(ctx
, EXCP_RI
);
8179 (void)opn
; /* avoid a compiler warning */
8181 #endif /* !CONFIG_USER_ONLY */
8183 /* CP1 Branches (before delay slot) */
8184 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8185 int32_t cc
, int32_t offset
)
8187 target_ulong btarget
;
8188 TCGv_i32 t0
= tcg_temp_new_i32();
8190 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8191 generate_exception_end(ctx
, EXCP_RI
);
8196 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8198 btarget
= ctx
->pc
+ 4 + offset
;
8202 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8203 tcg_gen_not_i32(t0
, t0
);
8204 tcg_gen_andi_i32(t0
, t0
, 1);
8205 tcg_gen_extu_i32_tl(bcond
, t0
);
8208 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8209 tcg_gen_not_i32(t0
, t0
);
8210 tcg_gen_andi_i32(t0
, t0
, 1);
8211 tcg_gen_extu_i32_tl(bcond
, t0
);
8214 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8215 tcg_gen_andi_i32(t0
, t0
, 1);
8216 tcg_gen_extu_i32_tl(bcond
, t0
);
8219 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8220 tcg_gen_andi_i32(t0
, t0
, 1);
8221 tcg_gen_extu_i32_tl(bcond
, t0
);
8223 ctx
->hflags
|= MIPS_HFLAG_BL
;
8227 TCGv_i32 t1
= tcg_temp_new_i32();
8228 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8229 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8230 tcg_gen_nand_i32(t0
, t0
, t1
);
8231 tcg_temp_free_i32(t1
);
8232 tcg_gen_andi_i32(t0
, t0
, 1);
8233 tcg_gen_extu_i32_tl(bcond
, t0
);
8238 TCGv_i32 t1
= tcg_temp_new_i32();
8239 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8240 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8241 tcg_gen_or_i32(t0
, t0
, t1
);
8242 tcg_temp_free_i32(t1
);
8243 tcg_gen_andi_i32(t0
, t0
, 1);
8244 tcg_gen_extu_i32_tl(bcond
, t0
);
8249 TCGv_i32 t1
= tcg_temp_new_i32();
8250 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8251 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8252 tcg_gen_and_i32(t0
, t0
, t1
);
8253 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8254 tcg_gen_and_i32(t0
, t0
, t1
);
8255 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8256 tcg_gen_nand_i32(t0
, t0
, t1
);
8257 tcg_temp_free_i32(t1
);
8258 tcg_gen_andi_i32(t0
, t0
, 1);
8259 tcg_gen_extu_i32_tl(bcond
, t0
);
8264 TCGv_i32 t1
= tcg_temp_new_i32();
8265 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8266 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8267 tcg_gen_or_i32(t0
, t0
, t1
);
8268 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8269 tcg_gen_or_i32(t0
, t0
, t1
);
8270 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8271 tcg_gen_or_i32(t0
, t0
, t1
);
8272 tcg_temp_free_i32(t1
);
8273 tcg_gen_andi_i32(t0
, t0
, 1);
8274 tcg_gen_extu_i32_tl(bcond
, t0
);
8277 ctx
->hflags
|= MIPS_HFLAG_BC
;
8280 MIPS_INVAL("cp1 cond branch");
8281 generate_exception_end(ctx
, EXCP_RI
);
8284 ctx
->btarget
= btarget
;
8285 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8287 tcg_temp_free_i32(t0
);
8290 /* R6 CP1 Branches */
8291 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8292 int32_t ft
, int32_t offset
,
8295 target_ulong btarget
;
8296 TCGv_i64 t0
= tcg_temp_new_i64();
8298 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8299 #ifdef MIPS_DEBUG_DISAS
8300 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8303 generate_exception_end(ctx
, EXCP_RI
);
8307 gen_load_fpr64(ctx
, t0
, ft
);
8308 tcg_gen_andi_i64(t0
, t0
, 1);
8310 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8314 tcg_gen_xori_i64(t0
, t0
, 1);
8315 ctx
->hflags
|= MIPS_HFLAG_BC
;
8318 /* t0 already set */
8319 ctx
->hflags
|= MIPS_HFLAG_BC
;
8322 MIPS_INVAL("cp1 cond branch");
8323 generate_exception_end(ctx
, EXCP_RI
);
8327 tcg_gen_trunc_i64_tl(bcond
, t0
);
8329 ctx
->btarget
= btarget
;
8331 switch (delayslot_size
) {
8333 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8336 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8341 tcg_temp_free_i64(t0
);
8344 /* Coprocessor 1 (FPU) */
8346 #define FOP(func, fmt) (((fmt) << 21) | (func))
8349 OPC_ADD_S
= FOP(0, FMT_S
),
8350 OPC_SUB_S
= FOP(1, FMT_S
),
8351 OPC_MUL_S
= FOP(2, FMT_S
),
8352 OPC_DIV_S
= FOP(3, FMT_S
),
8353 OPC_SQRT_S
= FOP(4, FMT_S
),
8354 OPC_ABS_S
= FOP(5, FMT_S
),
8355 OPC_MOV_S
= FOP(6, FMT_S
),
8356 OPC_NEG_S
= FOP(7, FMT_S
),
8357 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8358 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8359 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8360 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8361 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8362 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8363 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8364 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8365 OPC_SEL_S
= FOP(16, FMT_S
),
8366 OPC_MOVCF_S
= FOP(17, FMT_S
),
8367 OPC_MOVZ_S
= FOP(18, FMT_S
),
8368 OPC_MOVN_S
= FOP(19, FMT_S
),
8369 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8370 OPC_RECIP_S
= FOP(21, FMT_S
),
8371 OPC_RSQRT_S
= FOP(22, FMT_S
),
8372 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8373 OPC_MADDF_S
= FOP(24, FMT_S
),
8374 OPC_MSUBF_S
= FOP(25, FMT_S
),
8375 OPC_RINT_S
= FOP(26, FMT_S
),
8376 OPC_CLASS_S
= FOP(27, FMT_S
),
8377 OPC_MIN_S
= FOP(28, FMT_S
),
8378 OPC_RECIP2_S
= FOP(28, FMT_S
),
8379 OPC_MINA_S
= FOP(29, FMT_S
),
8380 OPC_RECIP1_S
= FOP(29, FMT_S
),
8381 OPC_MAX_S
= FOP(30, FMT_S
),
8382 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8383 OPC_MAXA_S
= FOP(31, FMT_S
),
8384 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8385 OPC_CVT_D_S
= FOP(33, FMT_S
),
8386 OPC_CVT_W_S
= FOP(36, FMT_S
),
8387 OPC_CVT_L_S
= FOP(37, FMT_S
),
8388 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8389 OPC_CMP_F_S
= FOP (48, FMT_S
),
8390 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8391 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8392 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8393 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8394 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8395 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8396 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8397 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8398 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8399 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8400 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8401 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8402 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8403 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8404 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8406 OPC_ADD_D
= FOP(0, FMT_D
),
8407 OPC_SUB_D
= FOP(1, FMT_D
),
8408 OPC_MUL_D
= FOP(2, FMT_D
),
8409 OPC_DIV_D
= FOP(3, FMT_D
),
8410 OPC_SQRT_D
= FOP(4, FMT_D
),
8411 OPC_ABS_D
= FOP(5, FMT_D
),
8412 OPC_MOV_D
= FOP(6, FMT_D
),
8413 OPC_NEG_D
= FOP(7, FMT_D
),
8414 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8415 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8416 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8417 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8418 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8419 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8420 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8421 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8422 OPC_SEL_D
= FOP(16, FMT_D
),
8423 OPC_MOVCF_D
= FOP(17, FMT_D
),
8424 OPC_MOVZ_D
= FOP(18, FMT_D
),
8425 OPC_MOVN_D
= FOP(19, FMT_D
),
8426 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8427 OPC_RECIP_D
= FOP(21, FMT_D
),
8428 OPC_RSQRT_D
= FOP(22, FMT_D
),
8429 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8430 OPC_MADDF_D
= FOP(24, FMT_D
),
8431 OPC_MSUBF_D
= FOP(25, FMT_D
),
8432 OPC_RINT_D
= FOP(26, FMT_D
),
8433 OPC_CLASS_D
= FOP(27, FMT_D
),
8434 OPC_MIN_D
= FOP(28, FMT_D
),
8435 OPC_RECIP2_D
= FOP(28, FMT_D
),
8436 OPC_MINA_D
= FOP(29, FMT_D
),
8437 OPC_RECIP1_D
= FOP(29, FMT_D
),
8438 OPC_MAX_D
= FOP(30, FMT_D
),
8439 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8440 OPC_MAXA_D
= FOP(31, FMT_D
),
8441 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8442 OPC_CVT_S_D
= FOP(32, FMT_D
),
8443 OPC_CVT_W_D
= FOP(36, FMT_D
),
8444 OPC_CVT_L_D
= FOP(37, FMT_D
),
8445 OPC_CMP_F_D
= FOP (48, FMT_D
),
8446 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8447 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8448 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8449 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8450 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8451 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8452 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8453 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8454 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8455 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8456 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8457 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8458 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8459 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8460 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8462 OPC_CVT_S_W
= FOP(32, FMT_W
),
8463 OPC_CVT_D_W
= FOP(33, FMT_W
),
8464 OPC_CVT_S_L
= FOP(32, FMT_L
),
8465 OPC_CVT_D_L
= FOP(33, FMT_L
),
8466 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8468 OPC_ADD_PS
= FOP(0, FMT_PS
),
8469 OPC_SUB_PS
= FOP(1, FMT_PS
),
8470 OPC_MUL_PS
= FOP(2, FMT_PS
),
8471 OPC_DIV_PS
= FOP(3, FMT_PS
),
8472 OPC_ABS_PS
= FOP(5, FMT_PS
),
8473 OPC_MOV_PS
= FOP(6, FMT_PS
),
8474 OPC_NEG_PS
= FOP(7, FMT_PS
),
8475 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8476 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8477 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8478 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8479 OPC_MULR_PS
= FOP(26, FMT_PS
),
8480 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8481 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8482 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8483 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8485 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8486 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8487 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8488 OPC_PLL_PS
= FOP(44, FMT_PS
),
8489 OPC_PLU_PS
= FOP(45, FMT_PS
),
8490 OPC_PUL_PS
= FOP(46, FMT_PS
),
8491 OPC_PUU_PS
= FOP(47, FMT_PS
),
8492 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8493 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8494 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8495 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8496 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8497 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8498 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8499 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8500 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8501 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8502 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8503 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8504 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8505 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8506 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8507 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8511 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8512 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8513 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8514 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8515 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8516 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8517 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8518 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8519 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8520 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8521 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8522 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8523 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8524 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8525 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8526 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8527 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8528 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8529 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8530 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8531 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8532 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8534 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8535 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8536 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8537 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8538 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8539 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8540 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8541 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8542 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8543 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8544 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8545 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8546 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8547 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8548 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8549 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8550 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8551 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8552 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8553 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8554 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8555 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8557 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8559 TCGv t0
= tcg_temp_new();
8564 TCGv_i32 fp0
= tcg_temp_new_i32();
8566 gen_load_fpr32(ctx
, fp0
, fs
);
8567 tcg_gen_ext_i32_tl(t0
, fp0
);
8568 tcg_temp_free_i32(fp0
);
8570 gen_store_gpr(t0
, rt
);
8573 gen_load_gpr(t0
, rt
);
8575 TCGv_i32 fp0
= tcg_temp_new_i32();
8577 tcg_gen_trunc_tl_i32(fp0
, t0
);
8578 gen_store_fpr32(ctx
, fp0
, fs
);
8579 tcg_temp_free_i32(fp0
);
8583 gen_helper_1e0i(cfc1
, t0
, fs
);
8584 gen_store_gpr(t0
, rt
);
8587 gen_load_gpr(t0
, rt
);
8588 save_cpu_state(ctx
, 0);
8590 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8592 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8593 tcg_temp_free_i32(fs_tmp
);
8595 /* Stop translation as we may have changed hflags */
8596 ctx
->bstate
= BS_STOP
;
8598 #if defined(TARGET_MIPS64)
8600 gen_load_fpr64(ctx
, t0
, fs
);
8601 gen_store_gpr(t0
, rt
);
8604 gen_load_gpr(t0
, rt
);
8605 gen_store_fpr64(ctx
, t0
, fs
);
8610 TCGv_i32 fp0
= tcg_temp_new_i32();
8612 gen_load_fpr32h(ctx
, fp0
, fs
);
8613 tcg_gen_ext_i32_tl(t0
, fp0
);
8614 tcg_temp_free_i32(fp0
);
8616 gen_store_gpr(t0
, rt
);
8619 gen_load_gpr(t0
, rt
);
8621 TCGv_i32 fp0
= tcg_temp_new_i32();
8623 tcg_gen_trunc_tl_i32(fp0
, t0
);
8624 gen_store_fpr32h(ctx
, fp0
, fs
);
8625 tcg_temp_free_i32(fp0
);
8629 MIPS_INVAL("cp1 move");
8630 generate_exception_end(ctx
, EXCP_RI
);
8638 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8654 l1
= gen_new_label();
8655 t0
= tcg_temp_new_i32();
8656 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8657 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8658 tcg_temp_free_i32(t0
);
8660 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8662 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8667 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8671 TCGv_i32 t0
= tcg_temp_new_i32();
8672 TCGLabel
*l1
= gen_new_label();
8679 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8680 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8681 gen_load_fpr32(ctx
, t0
, fs
);
8682 gen_store_fpr32(ctx
, t0
, fd
);
8684 tcg_temp_free_i32(t0
);
8687 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8690 TCGv_i32 t0
= tcg_temp_new_i32();
8692 TCGLabel
*l1
= gen_new_label();
8699 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8700 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8701 tcg_temp_free_i32(t0
);
8702 fp0
= tcg_temp_new_i64();
8703 gen_load_fpr64(ctx
, fp0
, fs
);
8704 gen_store_fpr64(ctx
, fp0
, fd
);
8705 tcg_temp_free_i64(fp0
);
8709 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8713 TCGv_i32 t0
= tcg_temp_new_i32();
8714 TCGLabel
*l1
= gen_new_label();
8715 TCGLabel
*l2
= gen_new_label();
8722 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8723 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8724 gen_load_fpr32(ctx
, t0
, fs
);
8725 gen_store_fpr32(ctx
, t0
, fd
);
8728 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8729 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8730 gen_load_fpr32h(ctx
, t0
, fs
);
8731 gen_store_fpr32h(ctx
, t0
, fd
);
8732 tcg_temp_free_i32(t0
);
8736 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8739 TCGv_i32 t1
= tcg_const_i32(0);
8740 TCGv_i32 fp0
= tcg_temp_new_i32();
8741 TCGv_i32 fp1
= tcg_temp_new_i32();
8742 TCGv_i32 fp2
= tcg_temp_new_i32();
8743 gen_load_fpr32(ctx
, fp0
, fd
);
8744 gen_load_fpr32(ctx
, fp1
, ft
);
8745 gen_load_fpr32(ctx
, fp2
, fs
);
8749 tcg_gen_andi_i32(fp0
, fp0
, 1);
8750 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8753 tcg_gen_andi_i32(fp1
, fp1
, 1);
8754 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8757 tcg_gen_andi_i32(fp1
, fp1
, 1);
8758 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8761 MIPS_INVAL("gen_sel_s");
8762 generate_exception_end(ctx
, EXCP_RI
);
8766 gen_store_fpr32(ctx
, fp0
, fd
);
8767 tcg_temp_free_i32(fp2
);
8768 tcg_temp_free_i32(fp1
);
8769 tcg_temp_free_i32(fp0
);
8770 tcg_temp_free_i32(t1
);
8773 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8776 TCGv_i64 t1
= tcg_const_i64(0);
8777 TCGv_i64 fp0
= tcg_temp_new_i64();
8778 TCGv_i64 fp1
= tcg_temp_new_i64();
8779 TCGv_i64 fp2
= tcg_temp_new_i64();
8780 gen_load_fpr64(ctx
, fp0
, fd
);
8781 gen_load_fpr64(ctx
, fp1
, ft
);
8782 gen_load_fpr64(ctx
, fp2
, fs
);
8786 tcg_gen_andi_i64(fp0
, fp0
, 1);
8787 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8790 tcg_gen_andi_i64(fp1
, fp1
, 1);
8791 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8794 tcg_gen_andi_i64(fp1
, fp1
, 1);
8795 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8798 MIPS_INVAL("gen_sel_d");
8799 generate_exception_end(ctx
, EXCP_RI
);
8803 gen_store_fpr64(ctx
, fp0
, fd
);
8804 tcg_temp_free_i64(fp2
);
8805 tcg_temp_free_i64(fp1
);
8806 tcg_temp_free_i64(fp0
);
8807 tcg_temp_free_i64(t1
);
8810 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8811 int ft
, int fs
, int fd
, int cc
)
8813 uint32_t func
= ctx
->opcode
& 0x3f;
8817 TCGv_i32 fp0
= tcg_temp_new_i32();
8818 TCGv_i32 fp1
= tcg_temp_new_i32();
8820 gen_load_fpr32(ctx
, fp0
, fs
);
8821 gen_load_fpr32(ctx
, fp1
, ft
);
8822 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8823 tcg_temp_free_i32(fp1
);
8824 gen_store_fpr32(ctx
, fp0
, fd
);
8825 tcg_temp_free_i32(fp0
);
8830 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 TCGv_i32 fp1
= tcg_temp_new_i32();
8833 gen_load_fpr32(ctx
, fp0
, fs
);
8834 gen_load_fpr32(ctx
, fp1
, ft
);
8835 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8836 tcg_temp_free_i32(fp1
);
8837 gen_store_fpr32(ctx
, fp0
, fd
);
8838 tcg_temp_free_i32(fp0
);
8843 TCGv_i32 fp0
= tcg_temp_new_i32();
8844 TCGv_i32 fp1
= tcg_temp_new_i32();
8846 gen_load_fpr32(ctx
, fp0
, fs
);
8847 gen_load_fpr32(ctx
, fp1
, ft
);
8848 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8849 tcg_temp_free_i32(fp1
);
8850 gen_store_fpr32(ctx
, fp0
, fd
);
8851 tcg_temp_free_i32(fp0
);
8856 TCGv_i32 fp0
= tcg_temp_new_i32();
8857 TCGv_i32 fp1
= tcg_temp_new_i32();
8859 gen_load_fpr32(ctx
, fp0
, fs
);
8860 gen_load_fpr32(ctx
, fp1
, ft
);
8861 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8862 tcg_temp_free_i32(fp1
);
8863 gen_store_fpr32(ctx
, fp0
, fd
);
8864 tcg_temp_free_i32(fp0
);
8869 TCGv_i32 fp0
= tcg_temp_new_i32();
8871 gen_load_fpr32(ctx
, fp0
, fs
);
8872 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8873 gen_store_fpr32(ctx
, fp0
, fd
);
8874 tcg_temp_free_i32(fp0
);
8879 TCGv_i32 fp0
= tcg_temp_new_i32();
8881 gen_load_fpr32(ctx
, fp0
, fs
);
8882 gen_helper_float_abs_s(fp0
, fp0
);
8883 gen_store_fpr32(ctx
, fp0
, fd
);
8884 tcg_temp_free_i32(fp0
);
8889 TCGv_i32 fp0
= tcg_temp_new_i32();
8891 gen_load_fpr32(ctx
, fp0
, fs
);
8892 gen_store_fpr32(ctx
, fp0
, fd
);
8893 tcg_temp_free_i32(fp0
);
8898 TCGv_i32 fp0
= tcg_temp_new_i32();
8900 gen_load_fpr32(ctx
, fp0
, fs
);
8901 gen_helper_float_chs_s(fp0
, fp0
);
8902 gen_store_fpr32(ctx
, fp0
, fd
);
8903 tcg_temp_free_i32(fp0
);
8907 check_cp1_64bitmode(ctx
);
8909 TCGv_i32 fp32
= tcg_temp_new_i32();
8910 TCGv_i64 fp64
= tcg_temp_new_i64();
8912 gen_load_fpr32(ctx
, fp32
, fs
);
8913 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8914 tcg_temp_free_i32(fp32
);
8915 gen_store_fpr64(ctx
, fp64
, fd
);
8916 tcg_temp_free_i64(fp64
);
8920 check_cp1_64bitmode(ctx
);
8922 TCGv_i32 fp32
= tcg_temp_new_i32();
8923 TCGv_i64 fp64
= tcg_temp_new_i64();
8925 gen_load_fpr32(ctx
, fp32
, fs
);
8926 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8927 tcg_temp_free_i32(fp32
);
8928 gen_store_fpr64(ctx
, fp64
, fd
);
8929 tcg_temp_free_i64(fp64
);
8933 check_cp1_64bitmode(ctx
);
8935 TCGv_i32 fp32
= tcg_temp_new_i32();
8936 TCGv_i64 fp64
= tcg_temp_new_i64();
8938 gen_load_fpr32(ctx
, fp32
, fs
);
8939 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8940 tcg_temp_free_i32(fp32
);
8941 gen_store_fpr64(ctx
, fp64
, fd
);
8942 tcg_temp_free_i64(fp64
);
8946 check_cp1_64bitmode(ctx
);
8948 TCGv_i32 fp32
= tcg_temp_new_i32();
8949 TCGv_i64 fp64
= tcg_temp_new_i64();
8951 gen_load_fpr32(ctx
, fp32
, fs
);
8952 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8953 tcg_temp_free_i32(fp32
);
8954 gen_store_fpr64(ctx
, fp64
, fd
);
8955 tcg_temp_free_i64(fp64
);
8960 TCGv_i32 fp0
= tcg_temp_new_i32();
8962 gen_load_fpr32(ctx
, fp0
, fs
);
8963 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8964 gen_store_fpr32(ctx
, fp0
, fd
);
8965 tcg_temp_free_i32(fp0
);
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8972 gen_load_fpr32(ctx
, fp0
, fs
);
8973 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8974 gen_store_fpr32(ctx
, fp0
, fd
);
8975 tcg_temp_free_i32(fp0
);
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8982 gen_load_fpr32(ctx
, fp0
, fs
);
8983 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8984 gen_store_fpr32(ctx
, fp0
, fd
);
8985 tcg_temp_free_i32(fp0
);
8990 TCGv_i32 fp0
= tcg_temp_new_i32();
8992 gen_load_fpr32(ctx
, fp0
, fs
);
8993 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8994 gen_store_fpr32(ctx
, fp0
, fd
);
8995 tcg_temp_free_i32(fp0
);
8999 check_insn(ctx
, ISA_MIPS32R6
);
9000 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9003 check_insn(ctx
, ISA_MIPS32R6
);
9004 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9007 check_insn(ctx
, ISA_MIPS32R6
);
9008 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9012 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9015 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9017 TCGLabel
*l1
= gen_new_label();
9021 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9023 fp0
= tcg_temp_new_i32();
9024 gen_load_fpr32(ctx
, fp0
, fs
);
9025 gen_store_fpr32(ctx
, fp0
, fd
);
9026 tcg_temp_free_i32(fp0
);
9031 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9033 TCGLabel
*l1
= gen_new_label();
9037 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9038 fp0
= tcg_temp_new_i32();
9039 gen_load_fpr32(ctx
, fp0
, fs
);
9040 gen_store_fpr32(ctx
, fp0
, fd
);
9041 tcg_temp_free_i32(fp0
);
9048 TCGv_i32 fp0
= tcg_temp_new_i32();
9050 gen_load_fpr32(ctx
, fp0
, fs
);
9051 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9052 gen_store_fpr32(ctx
, fp0
, fd
);
9053 tcg_temp_free_i32(fp0
);
9058 TCGv_i32 fp0
= tcg_temp_new_i32();
9060 gen_load_fpr32(ctx
, fp0
, fs
);
9061 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9062 gen_store_fpr32(ctx
, fp0
, fd
);
9063 tcg_temp_free_i32(fp0
);
9067 check_insn(ctx
, ISA_MIPS32R6
);
9069 TCGv_i32 fp0
= tcg_temp_new_i32();
9070 TCGv_i32 fp1
= tcg_temp_new_i32();
9071 TCGv_i32 fp2
= tcg_temp_new_i32();
9072 gen_load_fpr32(ctx
, fp0
, fs
);
9073 gen_load_fpr32(ctx
, fp1
, ft
);
9074 gen_load_fpr32(ctx
, fp2
, fd
);
9075 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9076 gen_store_fpr32(ctx
, fp2
, fd
);
9077 tcg_temp_free_i32(fp2
);
9078 tcg_temp_free_i32(fp1
);
9079 tcg_temp_free_i32(fp0
);
9083 check_insn(ctx
, ISA_MIPS32R6
);
9085 TCGv_i32 fp0
= tcg_temp_new_i32();
9086 TCGv_i32 fp1
= tcg_temp_new_i32();
9087 TCGv_i32 fp2
= tcg_temp_new_i32();
9088 gen_load_fpr32(ctx
, fp0
, fs
);
9089 gen_load_fpr32(ctx
, fp1
, ft
);
9090 gen_load_fpr32(ctx
, fp2
, fd
);
9091 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9092 gen_store_fpr32(ctx
, fp2
, fd
);
9093 tcg_temp_free_i32(fp2
);
9094 tcg_temp_free_i32(fp1
);
9095 tcg_temp_free_i32(fp0
);
9099 check_insn(ctx
, ISA_MIPS32R6
);
9101 TCGv_i32 fp0
= tcg_temp_new_i32();
9102 gen_load_fpr32(ctx
, fp0
, fs
);
9103 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9104 gen_store_fpr32(ctx
, fp0
, fd
);
9105 tcg_temp_free_i32(fp0
);
9109 check_insn(ctx
, ISA_MIPS32R6
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9112 gen_load_fpr32(ctx
, fp0
, fs
);
9113 gen_helper_float_class_s(fp0
, fp0
);
9114 gen_store_fpr32(ctx
, fp0
, fd
);
9115 tcg_temp_free_i32(fp0
);
9118 case OPC_MIN_S
: /* OPC_RECIP2_S */
9119 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9121 TCGv_i32 fp0
= tcg_temp_new_i32();
9122 TCGv_i32 fp1
= tcg_temp_new_i32();
9123 TCGv_i32 fp2
= tcg_temp_new_i32();
9124 gen_load_fpr32(ctx
, fp0
, fs
);
9125 gen_load_fpr32(ctx
, fp1
, ft
);
9126 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9127 gen_store_fpr32(ctx
, fp2
, fd
);
9128 tcg_temp_free_i32(fp2
);
9129 tcg_temp_free_i32(fp1
);
9130 tcg_temp_free_i32(fp0
);
9133 check_cp1_64bitmode(ctx
);
9135 TCGv_i32 fp0
= tcg_temp_new_i32();
9136 TCGv_i32 fp1
= tcg_temp_new_i32();
9138 gen_load_fpr32(ctx
, fp0
, fs
);
9139 gen_load_fpr32(ctx
, fp1
, ft
);
9140 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9141 tcg_temp_free_i32(fp1
);
9142 gen_store_fpr32(ctx
, fp0
, fd
);
9143 tcg_temp_free_i32(fp0
);
9147 case OPC_MINA_S
: /* OPC_RECIP1_S */
9148 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9150 TCGv_i32 fp0
= tcg_temp_new_i32();
9151 TCGv_i32 fp1
= tcg_temp_new_i32();
9152 TCGv_i32 fp2
= tcg_temp_new_i32();
9153 gen_load_fpr32(ctx
, fp0
, fs
);
9154 gen_load_fpr32(ctx
, fp1
, ft
);
9155 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9156 gen_store_fpr32(ctx
, fp2
, fd
);
9157 tcg_temp_free_i32(fp2
);
9158 tcg_temp_free_i32(fp1
);
9159 tcg_temp_free_i32(fp0
);
9162 check_cp1_64bitmode(ctx
);
9164 TCGv_i32 fp0
= tcg_temp_new_i32();
9166 gen_load_fpr32(ctx
, fp0
, fs
);
9167 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9168 gen_store_fpr32(ctx
, fp0
, fd
);
9169 tcg_temp_free_i32(fp0
);
9173 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9174 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9176 TCGv_i32 fp0
= tcg_temp_new_i32();
9177 TCGv_i32 fp1
= tcg_temp_new_i32();
9178 gen_load_fpr32(ctx
, fp0
, fs
);
9179 gen_load_fpr32(ctx
, fp1
, ft
);
9180 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9181 gen_store_fpr32(ctx
, fp1
, fd
);
9182 tcg_temp_free_i32(fp1
);
9183 tcg_temp_free_i32(fp0
);
9186 check_cp1_64bitmode(ctx
);
9188 TCGv_i32 fp0
= tcg_temp_new_i32();
9190 gen_load_fpr32(ctx
, fp0
, fs
);
9191 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9192 gen_store_fpr32(ctx
, fp0
, fd
);
9193 tcg_temp_free_i32(fp0
);
9197 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9198 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9200 TCGv_i32 fp0
= tcg_temp_new_i32();
9201 TCGv_i32 fp1
= tcg_temp_new_i32();
9202 gen_load_fpr32(ctx
, fp0
, fs
);
9203 gen_load_fpr32(ctx
, fp1
, ft
);
9204 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9205 gen_store_fpr32(ctx
, fp1
, fd
);
9206 tcg_temp_free_i32(fp1
);
9207 tcg_temp_free_i32(fp0
);
9210 check_cp1_64bitmode(ctx
);
9212 TCGv_i32 fp0
= tcg_temp_new_i32();
9213 TCGv_i32 fp1
= tcg_temp_new_i32();
9215 gen_load_fpr32(ctx
, fp0
, fs
);
9216 gen_load_fpr32(ctx
, fp1
, ft
);
9217 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9218 tcg_temp_free_i32(fp1
);
9219 gen_store_fpr32(ctx
, fp0
, fd
);
9220 tcg_temp_free_i32(fp0
);
9225 check_cp1_registers(ctx
, fd
);
9227 TCGv_i32 fp32
= tcg_temp_new_i32();
9228 TCGv_i64 fp64
= tcg_temp_new_i64();
9230 gen_load_fpr32(ctx
, fp32
, fs
);
9231 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9232 tcg_temp_free_i32(fp32
);
9233 gen_store_fpr64(ctx
, fp64
, fd
);
9234 tcg_temp_free_i64(fp64
);
9239 TCGv_i32 fp0
= tcg_temp_new_i32();
9241 gen_load_fpr32(ctx
, fp0
, fs
);
9242 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9243 gen_store_fpr32(ctx
, fp0
, fd
);
9244 tcg_temp_free_i32(fp0
);
9248 check_cp1_64bitmode(ctx
);
9250 TCGv_i32 fp32
= tcg_temp_new_i32();
9251 TCGv_i64 fp64
= tcg_temp_new_i64();
9253 gen_load_fpr32(ctx
, fp32
, fs
);
9254 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9255 tcg_temp_free_i32(fp32
);
9256 gen_store_fpr64(ctx
, fp64
, fd
);
9257 tcg_temp_free_i64(fp64
);
9263 TCGv_i64 fp64
= tcg_temp_new_i64();
9264 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9265 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9267 gen_load_fpr32(ctx
, fp32_0
, fs
);
9268 gen_load_fpr32(ctx
, fp32_1
, ft
);
9269 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9270 tcg_temp_free_i32(fp32_1
);
9271 tcg_temp_free_i32(fp32_0
);
9272 gen_store_fpr64(ctx
, fp64
, fd
);
9273 tcg_temp_free_i64(fp64
);
9285 case OPC_CMP_NGLE_S
:
9292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9293 if (ctx
->opcode
& (1 << 6)) {
9294 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9296 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9300 check_cp1_registers(ctx
, fs
| ft
| fd
);
9302 TCGv_i64 fp0
= tcg_temp_new_i64();
9303 TCGv_i64 fp1
= tcg_temp_new_i64();
9305 gen_load_fpr64(ctx
, fp0
, fs
);
9306 gen_load_fpr64(ctx
, fp1
, ft
);
9307 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9308 tcg_temp_free_i64(fp1
);
9309 gen_store_fpr64(ctx
, fp0
, fd
);
9310 tcg_temp_free_i64(fp0
);
9314 check_cp1_registers(ctx
, fs
| ft
| fd
);
9316 TCGv_i64 fp0
= tcg_temp_new_i64();
9317 TCGv_i64 fp1
= tcg_temp_new_i64();
9319 gen_load_fpr64(ctx
, fp0
, fs
);
9320 gen_load_fpr64(ctx
, fp1
, ft
);
9321 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9322 tcg_temp_free_i64(fp1
);
9323 gen_store_fpr64(ctx
, fp0
, fd
);
9324 tcg_temp_free_i64(fp0
);
9328 check_cp1_registers(ctx
, fs
| ft
| fd
);
9330 TCGv_i64 fp0
= tcg_temp_new_i64();
9331 TCGv_i64 fp1
= tcg_temp_new_i64();
9333 gen_load_fpr64(ctx
, fp0
, fs
);
9334 gen_load_fpr64(ctx
, fp1
, ft
);
9335 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9336 tcg_temp_free_i64(fp1
);
9337 gen_store_fpr64(ctx
, fp0
, fd
);
9338 tcg_temp_free_i64(fp0
);
9342 check_cp1_registers(ctx
, fs
| ft
| fd
);
9344 TCGv_i64 fp0
= tcg_temp_new_i64();
9345 TCGv_i64 fp1
= tcg_temp_new_i64();
9347 gen_load_fpr64(ctx
, fp0
, fs
);
9348 gen_load_fpr64(ctx
, fp1
, ft
);
9349 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9350 tcg_temp_free_i64(fp1
);
9351 gen_store_fpr64(ctx
, fp0
, fd
);
9352 tcg_temp_free_i64(fp0
);
9356 check_cp1_registers(ctx
, fs
| fd
);
9358 TCGv_i64 fp0
= tcg_temp_new_i64();
9360 gen_load_fpr64(ctx
, fp0
, fs
);
9361 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9362 gen_store_fpr64(ctx
, fp0
, fd
);
9363 tcg_temp_free_i64(fp0
);
9367 check_cp1_registers(ctx
, fs
| fd
);
9369 TCGv_i64 fp0
= tcg_temp_new_i64();
9371 gen_load_fpr64(ctx
, fp0
, fs
);
9372 gen_helper_float_abs_d(fp0
, fp0
);
9373 gen_store_fpr64(ctx
, fp0
, fd
);
9374 tcg_temp_free_i64(fp0
);
9378 check_cp1_registers(ctx
, fs
| fd
);
9380 TCGv_i64 fp0
= tcg_temp_new_i64();
9382 gen_load_fpr64(ctx
, fp0
, fs
);
9383 gen_store_fpr64(ctx
, fp0
, fd
);
9384 tcg_temp_free_i64(fp0
);
9388 check_cp1_registers(ctx
, fs
| fd
);
9390 TCGv_i64 fp0
= tcg_temp_new_i64();
9392 gen_load_fpr64(ctx
, fp0
, fs
);
9393 gen_helper_float_chs_d(fp0
, fp0
);
9394 gen_store_fpr64(ctx
, fp0
, fd
);
9395 tcg_temp_free_i64(fp0
);
9399 check_cp1_64bitmode(ctx
);
9401 TCGv_i64 fp0
= tcg_temp_new_i64();
9403 gen_load_fpr64(ctx
, fp0
, fs
);
9404 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9405 gen_store_fpr64(ctx
, fp0
, fd
);
9406 tcg_temp_free_i64(fp0
);
9410 check_cp1_64bitmode(ctx
);
9412 TCGv_i64 fp0
= tcg_temp_new_i64();
9414 gen_load_fpr64(ctx
, fp0
, fs
);
9415 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9416 gen_store_fpr64(ctx
, fp0
, fd
);
9417 tcg_temp_free_i64(fp0
);
9421 check_cp1_64bitmode(ctx
);
9423 TCGv_i64 fp0
= tcg_temp_new_i64();
9425 gen_load_fpr64(ctx
, fp0
, fs
);
9426 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9427 gen_store_fpr64(ctx
, fp0
, fd
);
9428 tcg_temp_free_i64(fp0
);
9432 check_cp1_64bitmode(ctx
);
9434 TCGv_i64 fp0
= tcg_temp_new_i64();
9436 gen_load_fpr64(ctx
, fp0
, fs
);
9437 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9438 gen_store_fpr64(ctx
, fp0
, fd
);
9439 tcg_temp_free_i64(fp0
);
9443 check_cp1_registers(ctx
, fs
);
9445 TCGv_i32 fp32
= tcg_temp_new_i32();
9446 TCGv_i64 fp64
= tcg_temp_new_i64();
9448 gen_load_fpr64(ctx
, fp64
, fs
);
9449 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9450 tcg_temp_free_i64(fp64
);
9451 gen_store_fpr32(ctx
, fp32
, fd
);
9452 tcg_temp_free_i32(fp32
);
9456 check_cp1_registers(ctx
, fs
);
9458 TCGv_i32 fp32
= tcg_temp_new_i32();
9459 TCGv_i64 fp64
= tcg_temp_new_i64();
9461 gen_load_fpr64(ctx
, fp64
, fs
);
9462 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9463 tcg_temp_free_i64(fp64
);
9464 gen_store_fpr32(ctx
, fp32
, fd
);
9465 tcg_temp_free_i32(fp32
);
9469 check_cp1_registers(ctx
, fs
);
9471 TCGv_i32 fp32
= tcg_temp_new_i32();
9472 TCGv_i64 fp64
= tcg_temp_new_i64();
9474 gen_load_fpr64(ctx
, fp64
, fs
);
9475 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9476 tcg_temp_free_i64(fp64
);
9477 gen_store_fpr32(ctx
, fp32
, fd
);
9478 tcg_temp_free_i32(fp32
);
9482 check_cp1_registers(ctx
, fs
);
9484 TCGv_i32 fp32
= tcg_temp_new_i32();
9485 TCGv_i64 fp64
= tcg_temp_new_i64();
9487 gen_load_fpr64(ctx
, fp64
, fs
);
9488 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9489 tcg_temp_free_i64(fp64
);
9490 gen_store_fpr32(ctx
, fp32
, fd
);
9491 tcg_temp_free_i32(fp32
);
9495 check_insn(ctx
, ISA_MIPS32R6
);
9496 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9499 check_insn(ctx
, ISA_MIPS32R6
);
9500 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9503 check_insn(ctx
, ISA_MIPS32R6
);
9504 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9508 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9513 TCGLabel
*l1
= gen_new_label();
9517 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9519 fp0
= tcg_temp_new_i64();
9520 gen_load_fpr64(ctx
, fp0
, fs
);
9521 gen_store_fpr64(ctx
, fp0
, fd
);
9522 tcg_temp_free_i64(fp0
);
9527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9529 TCGLabel
*l1
= gen_new_label();
9533 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9534 fp0
= tcg_temp_new_i64();
9535 gen_load_fpr64(ctx
, fp0
, fs
);
9536 gen_store_fpr64(ctx
, fp0
, fd
);
9537 tcg_temp_free_i64(fp0
);
9543 check_cp1_registers(ctx
, fs
| fd
);
9545 TCGv_i64 fp0
= tcg_temp_new_i64();
9547 gen_load_fpr64(ctx
, fp0
, fs
);
9548 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9549 gen_store_fpr64(ctx
, fp0
, fd
);
9550 tcg_temp_free_i64(fp0
);
9554 check_cp1_registers(ctx
, fs
| fd
);
9556 TCGv_i64 fp0
= tcg_temp_new_i64();
9558 gen_load_fpr64(ctx
, fp0
, fs
);
9559 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9560 gen_store_fpr64(ctx
, fp0
, fd
);
9561 tcg_temp_free_i64(fp0
);
9565 check_insn(ctx
, ISA_MIPS32R6
);
9567 TCGv_i64 fp0
= tcg_temp_new_i64();
9568 TCGv_i64 fp1
= tcg_temp_new_i64();
9569 TCGv_i64 fp2
= tcg_temp_new_i64();
9570 gen_load_fpr64(ctx
, fp0
, fs
);
9571 gen_load_fpr64(ctx
, fp1
, ft
);
9572 gen_load_fpr64(ctx
, fp2
, fd
);
9573 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9574 gen_store_fpr64(ctx
, fp2
, fd
);
9575 tcg_temp_free_i64(fp2
);
9576 tcg_temp_free_i64(fp1
);
9577 tcg_temp_free_i64(fp0
);
9581 check_insn(ctx
, ISA_MIPS32R6
);
9583 TCGv_i64 fp0
= tcg_temp_new_i64();
9584 TCGv_i64 fp1
= tcg_temp_new_i64();
9585 TCGv_i64 fp2
= tcg_temp_new_i64();
9586 gen_load_fpr64(ctx
, fp0
, fs
);
9587 gen_load_fpr64(ctx
, fp1
, ft
);
9588 gen_load_fpr64(ctx
, fp2
, fd
);
9589 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9590 gen_store_fpr64(ctx
, fp2
, fd
);
9591 tcg_temp_free_i64(fp2
);
9592 tcg_temp_free_i64(fp1
);
9593 tcg_temp_free_i64(fp0
);
9597 check_insn(ctx
, ISA_MIPS32R6
);
9599 TCGv_i64 fp0
= tcg_temp_new_i64();
9600 gen_load_fpr64(ctx
, fp0
, fs
);
9601 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9602 gen_store_fpr64(ctx
, fp0
, fd
);
9603 tcg_temp_free_i64(fp0
);
9607 check_insn(ctx
, ISA_MIPS32R6
);
9609 TCGv_i64 fp0
= tcg_temp_new_i64();
9610 gen_load_fpr64(ctx
, fp0
, fs
);
9611 gen_helper_float_class_d(fp0
, fp0
);
9612 gen_store_fpr64(ctx
, fp0
, fd
);
9613 tcg_temp_free_i64(fp0
);
9616 case OPC_MIN_D
: /* OPC_RECIP2_D */
9617 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9619 TCGv_i64 fp0
= tcg_temp_new_i64();
9620 TCGv_i64 fp1
= tcg_temp_new_i64();
9621 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_load_fpr64(ctx
, fp1
, ft
);
9623 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9624 gen_store_fpr64(ctx
, fp1
, fd
);
9625 tcg_temp_free_i64(fp1
);
9626 tcg_temp_free_i64(fp0
);
9629 check_cp1_64bitmode(ctx
);
9631 TCGv_i64 fp0
= tcg_temp_new_i64();
9632 TCGv_i64 fp1
= tcg_temp_new_i64();
9634 gen_load_fpr64(ctx
, fp0
, fs
);
9635 gen_load_fpr64(ctx
, fp1
, ft
);
9636 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9637 tcg_temp_free_i64(fp1
);
9638 gen_store_fpr64(ctx
, fp0
, fd
);
9639 tcg_temp_free_i64(fp0
);
9643 case OPC_MINA_D
: /* OPC_RECIP1_D */
9644 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9646 TCGv_i64 fp0
= tcg_temp_new_i64();
9647 TCGv_i64 fp1
= tcg_temp_new_i64();
9648 gen_load_fpr64(ctx
, fp0
, fs
);
9649 gen_load_fpr64(ctx
, fp1
, ft
);
9650 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9651 gen_store_fpr64(ctx
, fp1
, fd
);
9652 tcg_temp_free_i64(fp1
);
9653 tcg_temp_free_i64(fp0
);
9656 check_cp1_64bitmode(ctx
);
9658 TCGv_i64 fp0
= tcg_temp_new_i64();
9660 gen_load_fpr64(ctx
, fp0
, fs
);
9661 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9662 gen_store_fpr64(ctx
, fp0
, fd
);
9663 tcg_temp_free_i64(fp0
);
9667 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9668 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9670 TCGv_i64 fp0
= tcg_temp_new_i64();
9671 TCGv_i64 fp1
= tcg_temp_new_i64();
9672 gen_load_fpr64(ctx
, fp0
, fs
);
9673 gen_load_fpr64(ctx
, fp1
, ft
);
9674 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9675 gen_store_fpr64(ctx
, fp1
, fd
);
9676 tcg_temp_free_i64(fp1
);
9677 tcg_temp_free_i64(fp0
);
9680 check_cp1_64bitmode(ctx
);
9682 TCGv_i64 fp0
= tcg_temp_new_i64();
9684 gen_load_fpr64(ctx
, fp0
, fs
);
9685 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9686 gen_store_fpr64(ctx
, fp0
, fd
);
9687 tcg_temp_free_i64(fp0
);
9691 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9692 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9694 TCGv_i64 fp0
= tcg_temp_new_i64();
9695 TCGv_i64 fp1
= tcg_temp_new_i64();
9696 gen_load_fpr64(ctx
, fp0
, fs
);
9697 gen_load_fpr64(ctx
, fp1
, ft
);
9698 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9699 gen_store_fpr64(ctx
, fp1
, fd
);
9700 tcg_temp_free_i64(fp1
);
9701 tcg_temp_free_i64(fp0
);
9704 check_cp1_64bitmode(ctx
);
9706 TCGv_i64 fp0
= tcg_temp_new_i64();
9707 TCGv_i64 fp1
= tcg_temp_new_i64();
9709 gen_load_fpr64(ctx
, fp0
, fs
);
9710 gen_load_fpr64(ctx
, fp1
, ft
);
9711 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9712 tcg_temp_free_i64(fp1
);
9713 gen_store_fpr64(ctx
, fp0
, fd
);
9714 tcg_temp_free_i64(fp0
);
9727 case OPC_CMP_NGLE_D
:
9734 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9735 if (ctx
->opcode
& (1 << 6)) {
9736 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9738 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9742 check_cp1_registers(ctx
, fs
);
9744 TCGv_i32 fp32
= tcg_temp_new_i32();
9745 TCGv_i64 fp64
= tcg_temp_new_i64();
9747 gen_load_fpr64(ctx
, fp64
, fs
);
9748 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9749 tcg_temp_free_i64(fp64
);
9750 gen_store_fpr32(ctx
, fp32
, fd
);
9751 tcg_temp_free_i32(fp32
);
9755 check_cp1_registers(ctx
, fs
);
9757 TCGv_i32 fp32
= tcg_temp_new_i32();
9758 TCGv_i64 fp64
= tcg_temp_new_i64();
9760 gen_load_fpr64(ctx
, fp64
, fs
);
9761 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9762 tcg_temp_free_i64(fp64
);
9763 gen_store_fpr32(ctx
, fp32
, fd
);
9764 tcg_temp_free_i32(fp32
);
9768 check_cp1_64bitmode(ctx
);
9770 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 gen_load_fpr64(ctx
, fp0
, fs
);
9773 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9774 gen_store_fpr64(ctx
, fp0
, fd
);
9775 tcg_temp_free_i64(fp0
);
9780 TCGv_i32 fp0
= tcg_temp_new_i32();
9782 gen_load_fpr32(ctx
, fp0
, fs
);
9783 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9784 gen_store_fpr32(ctx
, fp0
, fd
);
9785 tcg_temp_free_i32(fp0
);
9789 check_cp1_registers(ctx
, fd
);
9791 TCGv_i32 fp32
= tcg_temp_new_i32();
9792 TCGv_i64 fp64
= tcg_temp_new_i64();
9794 gen_load_fpr32(ctx
, fp32
, fs
);
9795 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9796 tcg_temp_free_i32(fp32
);
9797 gen_store_fpr64(ctx
, fp64
, fd
);
9798 tcg_temp_free_i64(fp64
);
9802 check_cp1_64bitmode(ctx
);
9804 TCGv_i32 fp32
= tcg_temp_new_i32();
9805 TCGv_i64 fp64
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp64
, fs
);
9808 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9809 tcg_temp_free_i64(fp64
);
9810 gen_store_fpr32(ctx
, fp32
, fd
);
9811 tcg_temp_free_i32(fp32
);
9815 check_cp1_64bitmode(ctx
);
9817 TCGv_i64 fp0
= tcg_temp_new_i64();
9819 gen_load_fpr64(ctx
, fp0
, fs
);
9820 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9821 gen_store_fpr64(ctx
, fp0
, fd
);
9822 tcg_temp_free_i64(fp0
);
9828 TCGv_i64 fp0
= tcg_temp_new_i64();
9830 gen_load_fpr64(ctx
, fp0
, fs
);
9831 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9832 gen_store_fpr64(ctx
, fp0
, fd
);
9833 tcg_temp_free_i64(fp0
);
9839 TCGv_i64 fp0
= tcg_temp_new_i64();
9840 TCGv_i64 fp1
= tcg_temp_new_i64();
9842 gen_load_fpr64(ctx
, fp0
, fs
);
9843 gen_load_fpr64(ctx
, fp1
, ft
);
9844 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9845 tcg_temp_free_i64(fp1
);
9846 gen_store_fpr64(ctx
, fp0
, fd
);
9847 tcg_temp_free_i64(fp0
);
9853 TCGv_i64 fp0
= tcg_temp_new_i64();
9854 TCGv_i64 fp1
= tcg_temp_new_i64();
9856 gen_load_fpr64(ctx
, fp0
, fs
);
9857 gen_load_fpr64(ctx
, fp1
, ft
);
9858 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9859 tcg_temp_free_i64(fp1
);
9860 gen_store_fpr64(ctx
, fp0
, fd
);
9861 tcg_temp_free_i64(fp0
);
9867 TCGv_i64 fp0
= tcg_temp_new_i64();
9868 TCGv_i64 fp1
= tcg_temp_new_i64();
9870 gen_load_fpr64(ctx
, fp0
, fs
);
9871 gen_load_fpr64(ctx
, fp1
, ft
);
9872 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9873 tcg_temp_free_i64(fp1
);
9874 gen_store_fpr64(ctx
, fp0
, fd
);
9875 tcg_temp_free_i64(fp0
);
9881 TCGv_i64 fp0
= tcg_temp_new_i64();
9883 gen_load_fpr64(ctx
, fp0
, fs
);
9884 gen_helper_float_abs_ps(fp0
, fp0
);
9885 gen_store_fpr64(ctx
, fp0
, fd
);
9886 tcg_temp_free_i64(fp0
);
9892 TCGv_i64 fp0
= tcg_temp_new_i64();
9894 gen_load_fpr64(ctx
, fp0
, fs
);
9895 gen_store_fpr64(ctx
, fp0
, fd
);
9896 tcg_temp_free_i64(fp0
);
9902 TCGv_i64 fp0
= tcg_temp_new_i64();
9904 gen_load_fpr64(ctx
, fp0
, fs
);
9905 gen_helper_float_chs_ps(fp0
, fp0
);
9906 gen_store_fpr64(ctx
, fp0
, fd
);
9907 tcg_temp_free_i64(fp0
);
9912 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9917 TCGLabel
*l1
= gen_new_label();
9921 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9922 fp0
= tcg_temp_new_i64();
9923 gen_load_fpr64(ctx
, fp0
, fs
);
9924 gen_store_fpr64(ctx
, fp0
, fd
);
9925 tcg_temp_free_i64(fp0
);
9932 TCGLabel
*l1
= gen_new_label();
9936 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9937 fp0
= tcg_temp_new_i64();
9938 gen_load_fpr64(ctx
, fp0
, fs
);
9939 gen_store_fpr64(ctx
, fp0
, fd
);
9940 tcg_temp_free_i64(fp0
);
9948 TCGv_i64 fp0
= tcg_temp_new_i64();
9949 TCGv_i64 fp1
= tcg_temp_new_i64();
9951 gen_load_fpr64(ctx
, fp0
, ft
);
9952 gen_load_fpr64(ctx
, fp1
, fs
);
9953 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9954 tcg_temp_free_i64(fp1
);
9955 gen_store_fpr64(ctx
, fp0
, fd
);
9956 tcg_temp_free_i64(fp0
);
9962 TCGv_i64 fp0
= tcg_temp_new_i64();
9963 TCGv_i64 fp1
= tcg_temp_new_i64();
9965 gen_load_fpr64(ctx
, fp0
, ft
);
9966 gen_load_fpr64(ctx
, fp1
, fs
);
9967 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9968 tcg_temp_free_i64(fp1
);
9969 gen_store_fpr64(ctx
, fp0
, fd
);
9970 tcg_temp_free_i64(fp0
);
9976 TCGv_i64 fp0
= tcg_temp_new_i64();
9977 TCGv_i64 fp1
= tcg_temp_new_i64();
9979 gen_load_fpr64(ctx
, fp0
, fs
);
9980 gen_load_fpr64(ctx
, fp1
, ft
);
9981 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9982 tcg_temp_free_i64(fp1
);
9983 gen_store_fpr64(ctx
, fp0
, fd
);
9984 tcg_temp_free_i64(fp0
);
9990 TCGv_i64 fp0
= tcg_temp_new_i64();
9992 gen_load_fpr64(ctx
, fp0
, fs
);
9993 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
10001 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 gen_load_fpr64(ctx
, fp0
, fs
);
10004 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10005 gen_store_fpr64(ctx
, fp0
, fd
);
10006 tcg_temp_free_i64(fp0
);
10009 case OPC_RSQRT2_PS
:
10012 TCGv_i64 fp0
= tcg_temp_new_i64();
10013 TCGv_i64 fp1
= tcg_temp_new_i64();
10015 gen_load_fpr64(ctx
, fp0
, fs
);
10016 gen_load_fpr64(ctx
, fp1
, ft
);
10017 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10018 tcg_temp_free_i64(fp1
);
10019 gen_store_fpr64(ctx
, fp0
, fd
);
10020 tcg_temp_free_i64(fp0
);
10024 check_cp1_64bitmode(ctx
);
10026 TCGv_i32 fp0
= tcg_temp_new_i32();
10028 gen_load_fpr32h(ctx
, fp0
, fs
);
10029 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10030 gen_store_fpr32(ctx
, fp0
, fd
);
10031 tcg_temp_free_i32(fp0
);
10034 case OPC_CVT_PW_PS
:
10037 TCGv_i64 fp0
= tcg_temp_new_i64();
10039 gen_load_fpr64(ctx
, fp0
, fs
);
10040 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10041 gen_store_fpr64(ctx
, fp0
, fd
);
10042 tcg_temp_free_i64(fp0
);
10046 check_cp1_64bitmode(ctx
);
10048 TCGv_i32 fp0
= tcg_temp_new_i32();
10050 gen_load_fpr32(ctx
, fp0
, fs
);
10051 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10052 gen_store_fpr32(ctx
, fp0
, fd
);
10053 tcg_temp_free_i32(fp0
);
10059 TCGv_i32 fp0
= tcg_temp_new_i32();
10060 TCGv_i32 fp1
= tcg_temp_new_i32();
10062 gen_load_fpr32(ctx
, fp0
, fs
);
10063 gen_load_fpr32(ctx
, fp1
, ft
);
10064 gen_store_fpr32h(ctx
, fp0
, fd
);
10065 gen_store_fpr32(ctx
, fp1
, fd
);
10066 tcg_temp_free_i32(fp0
);
10067 tcg_temp_free_i32(fp1
);
10073 TCGv_i32 fp0
= tcg_temp_new_i32();
10074 TCGv_i32 fp1
= tcg_temp_new_i32();
10076 gen_load_fpr32(ctx
, fp0
, fs
);
10077 gen_load_fpr32h(ctx
, fp1
, ft
);
10078 gen_store_fpr32(ctx
, fp1
, fd
);
10079 gen_store_fpr32h(ctx
, fp0
, fd
);
10080 tcg_temp_free_i32(fp0
);
10081 tcg_temp_free_i32(fp1
);
10087 TCGv_i32 fp0
= tcg_temp_new_i32();
10088 TCGv_i32 fp1
= tcg_temp_new_i32();
10090 gen_load_fpr32h(ctx
, fp0
, fs
);
10091 gen_load_fpr32(ctx
, fp1
, ft
);
10092 gen_store_fpr32(ctx
, fp1
, fd
);
10093 gen_store_fpr32h(ctx
, fp0
, fd
);
10094 tcg_temp_free_i32(fp0
);
10095 tcg_temp_free_i32(fp1
);
10101 TCGv_i32 fp0
= tcg_temp_new_i32();
10102 TCGv_i32 fp1
= tcg_temp_new_i32();
10104 gen_load_fpr32h(ctx
, fp0
, fs
);
10105 gen_load_fpr32h(ctx
, fp1
, ft
);
10106 gen_store_fpr32(ctx
, fp1
, fd
);
10107 gen_store_fpr32h(ctx
, fp0
, fd
);
10108 tcg_temp_free_i32(fp0
);
10109 tcg_temp_free_i32(fp1
);
10113 case OPC_CMP_UN_PS
:
10114 case OPC_CMP_EQ_PS
:
10115 case OPC_CMP_UEQ_PS
:
10116 case OPC_CMP_OLT_PS
:
10117 case OPC_CMP_ULT_PS
:
10118 case OPC_CMP_OLE_PS
:
10119 case OPC_CMP_ULE_PS
:
10120 case OPC_CMP_SF_PS
:
10121 case OPC_CMP_NGLE_PS
:
10122 case OPC_CMP_SEQ_PS
:
10123 case OPC_CMP_NGL_PS
:
10124 case OPC_CMP_LT_PS
:
10125 case OPC_CMP_NGE_PS
:
10126 case OPC_CMP_LE_PS
:
10127 case OPC_CMP_NGT_PS
:
10128 if (ctx
->opcode
& (1 << 6)) {
10129 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10131 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10135 MIPS_INVAL("farith");
10136 generate_exception_end(ctx
, EXCP_RI
);
10141 /* Coprocessor 3 (FPU) */
10142 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10143 int fd
, int fs
, int base
, int index
)
10145 TCGv t0
= tcg_temp_new();
10148 gen_load_gpr(t0
, index
);
10149 } else if (index
== 0) {
10150 gen_load_gpr(t0
, base
);
10152 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10154 /* Don't do NOP if destination is zero: we must perform the actual
10160 TCGv_i32 fp0
= tcg_temp_new_i32();
10162 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10163 tcg_gen_trunc_tl_i32(fp0
, t0
);
10164 gen_store_fpr32(ctx
, fp0
, fd
);
10165 tcg_temp_free_i32(fp0
);
10170 check_cp1_registers(ctx
, fd
);
10172 TCGv_i64 fp0
= tcg_temp_new_i64();
10173 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10174 gen_store_fpr64(ctx
, fp0
, fd
);
10175 tcg_temp_free_i64(fp0
);
10179 check_cp1_64bitmode(ctx
);
10180 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10182 TCGv_i64 fp0
= tcg_temp_new_i64();
10184 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10185 gen_store_fpr64(ctx
, fp0
, fd
);
10186 tcg_temp_free_i64(fp0
);
10192 TCGv_i32 fp0
= tcg_temp_new_i32();
10193 gen_load_fpr32(ctx
, fp0
, fs
);
10194 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10195 tcg_temp_free_i32(fp0
);
10200 check_cp1_registers(ctx
, fs
);
10202 TCGv_i64 fp0
= tcg_temp_new_i64();
10203 gen_load_fpr64(ctx
, fp0
, fs
);
10204 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10205 tcg_temp_free_i64(fp0
);
10209 check_cp1_64bitmode(ctx
);
10210 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10212 TCGv_i64 fp0
= tcg_temp_new_i64();
10213 gen_load_fpr64(ctx
, fp0
, fs
);
10214 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10215 tcg_temp_free_i64(fp0
);
10222 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10223 int fd
, int fr
, int fs
, int ft
)
10229 TCGv t0
= tcg_temp_local_new();
10230 TCGv_i32 fp
= tcg_temp_new_i32();
10231 TCGv_i32 fph
= tcg_temp_new_i32();
10232 TCGLabel
*l1
= gen_new_label();
10233 TCGLabel
*l2
= gen_new_label();
10235 gen_load_gpr(t0
, fr
);
10236 tcg_gen_andi_tl(t0
, t0
, 0x7);
10238 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10239 gen_load_fpr32(ctx
, fp
, fs
);
10240 gen_load_fpr32h(ctx
, fph
, fs
);
10241 gen_store_fpr32(ctx
, fp
, fd
);
10242 gen_store_fpr32h(ctx
, fph
, fd
);
10245 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10247 #ifdef TARGET_WORDS_BIGENDIAN
10248 gen_load_fpr32(ctx
, fp
, fs
);
10249 gen_load_fpr32h(ctx
, fph
, ft
);
10250 gen_store_fpr32h(ctx
, fp
, fd
);
10251 gen_store_fpr32(ctx
, fph
, fd
);
10253 gen_load_fpr32h(ctx
, fph
, fs
);
10254 gen_load_fpr32(ctx
, fp
, ft
);
10255 gen_store_fpr32(ctx
, fph
, fd
);
10256 gen_store_fpr32h(ctx
, fp
, fd
);
10259 tcg_temp_free_i32(fp
);
10260 tcg_temp_free_i32(fph
);
10266 TCGv_i32 fp0
= tcg_temp_new_i32();
10267 TCGv_i32 fp1
= tcg_temp_new_i32();
10268 TCGv_i32 fp2
= tcg_temp_new_i32();
10270 gen_load_fpr32(ctx
, fp0
, fs
);
10271 gen_load_fpr32(ctx
, fp1
, ft
);
10272 gen_load_fpr32(ctx
, fp2
, fr
);
10273 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10274 tcg_temp_free_i32(fp0
);
10275 tcg_temp_free_i32(fp1
);
10276 gen_store_fpr32(ctx
, fp2
, fd
);
10277 tcg_temp_free_i32(fp2
);
10282 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10284 TCGv_i64 fp0
= tcg_temp_new_i64();
10285 TCGv_i64 fp1
= tcg_temp_new_i64();
10286 TCGv_i64 fp2
= tcg_temp_new_i64();
10288 gen_load_fpr64(ctx
, fp0
, fs
);
10289 gen_load_fpr64(ctx
, fp1
, ft
);
10290 gen_load_fpr64(ctx
, fp2
, fr
);
10291 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10292 tcg_temp_free_i64(fp0
);
10293 tcg_temp_free_i64(fp1
);
10294 gen_store_fpr64(ctx
, fp2
, fd
);
10295 tcg_temp_free_i64(fp2
);
10301 TCGv_i64 fp0
= tcg_temp_new_i64();
10302 TCGv_i64 fp1
= tcg_temp_new_i64();
10303 TCGv_i64 fp2
= tcg_temp_new_i64();
10305 gen_load_fpr64(ctx
, fp0
, fs
);
10306 gen_load_fpr64(ctx
, fp1
, ft
);
10307 gen_load_fpr64(ctx
, fp2
, fr
);
10308 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10309 tcg_temp_free_i64(fp0
);
10310 tcg_temp_free_i64(fp1
);
10311 gen_store_fpr64(ctx
, fp2
, fd
);
10312 tcg_temp_free_i64(fp2
);
10318 TCGv_i32 fp0
= tcg_temp_new_i32();
10319 TCGv_i32 fp1
= tcg_temp_new_i32();
10320 TCGv_i32 fp2
= tcg_temp_new_i32();
10322 gen_load_fpr32(ctx
, fp0
, fs
);
10323 gen_load_fpr32(ctx
, fp1
, ft
);
10324 gen_load_fpr32(ctx
, fp2
, fr
);
10325 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10326 tcg_temp_free_i32(fp0
);
10327 tcg_temp_free_i32(fp1
);
10328 gen_store_fpr32(ctx
, fp2
, fd
);
10329 tcg_temp_free_i32(fp2
);
10334 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10336 TCGv_i64 fp0
= tcg_temp_new_i64();
10337 TCGv_i64 fp1
= tcg_temp_new_i64();
10338 TCGv_i64 fp2
= tcg_temp_new_i64();
10340 gen_load_fpr64(ctx
, fp0
, fs
);
10341 gen_load_fpr64(ctx
, fp1
, ft
);
10342 gen_load_fpr64(ctx
, fp2
, fr
);
10343 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10344 tcg_temp_free_i64(fp0
);
10345 tcg_temp_free_i64(fp1
);
10346 gen_store_fpr64(ctx
, fp2
, fd
);
10347 tcg_temp_free_i64(fp2
);
10353 TCGv_i64 fp0
= tcg_temp_new_i64();
10354 TCGv_i64 fp1
= tcg_temp_new_i64();
10355 TCGv_i64 fp2
= tcg_temp_new_i64();
10357 gen_load_fpr64(ctx
, fp0
, fs
);
10358 gen_load_fpr64(ctx
, fp1
, ft
);
10359 gen_load_fpr64(ctx
, fp2
, fr
);
10360 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10361 tcg_temp_free_i64(fp0
);
10362 tcg_temp_free_i64(fp1
);
10363 gen_store_fpr64(ctx
, fp2
, fd
);
10364 tcg_temp_free_i64(fp2
);
10370 TCGv_i32 fp0
= tcg_temp_new_i32();
10371 TCGv_i32 fp1
= tcg_temp_new_i32();
10372 TCGv_i32 fp2
= tcg_temp_new_i32();
10374 gen_load_fpr32(ctx
, fp0
, fs
);
10375 gen_load_fpr32(ctx
, fp1
, ft
);
10376 gen_load_fpr32(ctx
, fp2
, fr
);
10377 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10378 tcg_temp_free_i32(fp0
);
10379 tcg_temp_free_i32(fp1
);
10380 gen_store_fpr32(ctx
, fp2
, fd
);
10381 tcg_temp_free_i32(fp2
);
10386 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10388 TCGv_i64 fp0
= tcg_temp_new_i64();
10389 TCGv_i64 fp1
= tcg_temp_new_i64();
10390 TCGv_i64 fp2
= tcg_temp_new_i64();
10392 gen_load_fpr64(ctx
, fp0
, fs
);
10393 gen_load_fpr64(ctx
, fp1
, ft
);
10394 gen_load_fpr64(ctx
, fp2
, fr
);
10395 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10396 tcg_temp_free_i64(fp0
);
10397 tcg_temp_free_i64(fp1
);
10398 gen_store_fpr64(ctx
, fp2
, fd
);
10399 tcg_temp_free_i64(fp2
);
10405 TCGv_i64 fp0
= tcg_temp_new_i64();
10406 TCGv_i64 fp1
= tcg_temp_new_i64();
10407 TCGv_i64 fp2
= tcg_temp_new_i64();
10409 gen_load_fpr64(ctx
, fp0
, fs
);
10410 gen_load_fpr64(ctx
, fp1
, ft
);
10411 gen_load_fpr64(ctx
, fp2
, fr
);
10412 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10413 tcg_temp_free_i64(fp0
);
10414 tcg_temp_free_i64(fp1
);
10415 gen_store_fpr64(ctx
, fp2
, fd
);
10416 tcg_temp_free_i64(fp2
);
10422 TCGv_i32 fp0
= tcg_temp_new_i32();
10423 TCGv_i32 fp1
= tcg_temp_new_i32();
10424 TCGv_i32 fp2
= tcg_temp_new_i32();
10426 gen_load_fpr32(ctx
, fp0
, fs
);
10427 gen_load_fpr32(ctx
, fp1
, ft
);
10428 gen_load_fpr32(ctx
, fp2
, fr
);
10429 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10430 tcg_temp_free_i32(fp0
);
10431 tcg_temp_free_i32(fp1
);
10432 gen_store_fpr32(ctx
, fp2
, fd
);
10433 tcg_temp_free_i32(fp2
);
10438 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10440 TCGv_i64 fp0
= tcg_temp_new_i64();
10441 TCGv_i64 fp1
= tcg_temp_new_i64();
10442 TCGv_i64 fp2
= tcg_temp_new_i64();
10444 gen_load_fpr64(ctx
, fp0
, fs
);
10445 gen_load_fpr64(ctx
, fp1
, ft
);
10446 gen_load_fpr64(ctx
, fp2
, fr
);
10447 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10448 tcg_temp_free_i64(fp0
);
10449 tcg_temp_free_i64(fp1
);
10450 gen_store_fpr64(ctx
, fp2
, fd
);
10451 tcg_temp_free_i64(fp2
);
10457 TCGv_i64 fp0
= tcg_temp_new_i64();
10458 TCGv_i64 fp1
= tcg_temp_new_i64();
10459 TCGv_i64 fp2
= tcg_temp_new_i64();
10461 gen_load_fpr64(ctx
, fp0
, fs
);
10462 gen_load_fpr64(ctx
, fp1
, ft
);
10463 gen_load_fpr64(ctx
, fp2
, fr
);
10464 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10465 tcg_temp_free_i64(fp0
);
10466 tcg_temp_free_i64(fp1
);
10467 gen_store_fpr64(ctx
, fp2
, fd
);
10468 tcg_temp_free_i64(fp2
);
10472 MIPS_INVAL("flt3_arith");
10473 generate_exception_end(ctx
, EXCP_RI
);
10478 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10482 #if !defined(CONFIG_USER_ONLY)
10483 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10484 Therefore only check the ISA in system mode. */
10485 check_insn(ctx
, ISA_MIPS32R2
);
10487 t0
= tcg_temp_new();
10491 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10492 gen_store_gpr(t0
, rt
);
10495 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10496 gen_store_gpr(t0
, rt
);
10499 gen_helper_rdhwr_cc(t0
, cpu_env
);
10500 gen_store_gpr(t0
, rt
);
10503 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10504 gen_store_gpr(t0
, rt
);
10507 check_insn(ctx
, ISA_MIPS32R6
);
10509 /* Performance counter registers are not implemented other than
10510 * control register 0.
10512 generate_exception(ctx
, EXCP_RI
);
10514 gen_helper_rdhwr_performance(t0
, cpu_env
);
10515 gen_store_gpr(t0
, rt
);
10518 check_insn(ctx
, ISA_MIPS32R6
);
10519 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10520 gen_store_gpr(t0
, rt
);
10523 #if defined(CONFIG_USER_ONLY)
10524 tcg_gen_ld_tl(t0
, cpu_env
,
10525 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10526 gen_store_gpr(t0
, rt
);
10529 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10530 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10531 tcg_gen_ld_tl(t0
, cpu_env
,
10532 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10533 gen_store_gpr(t0
, rt
);
10535 generate_exception_end(ctx
, EXCP_RI
);
10539 default: /* Invalid */
10540 MIPS_INVAL("rdhwr");
10541 generate_exception_end(ctx
, EXCP_RI
);
10547 static inline void clear_branch_hflags(DisasContext
*ctx
)
10549 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10550 if (ctx
->bstate
== BS_NONE
) {
10551 save_cpu_state(ctx
, 0);
10553 /* it is not safe to save ctx->hflags as hflags may be changed
10554 in execution time by the instruction in delay / forbidden slot. */
10555 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10559 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10561 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10562 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10563 /* Branches completion */
10564 clear_branch_hflags(ctx
);
10565 ctx
->bstate
= BS_BRANCH
;
10566 /* FIXME: Need to clear can_do_io. */
10567 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10568 case MIPS_HFLAG_FBNSLOT
:
10569 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10572 /* unconditional branch */
10573 if (proc_hflags
& MIPS_HFLAG_BX
) {
10574 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10576 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10578 case MIPS_HFLAG_BL
:
10579 /* blikely taken case */
10580 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10582 case MIPS_HFLAG_BC
:
10583 /* Conditional branch */
10585 TCGLabel
*l1
= gen_new_label();
10587 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10588 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10590 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10593 case MIPS_HFLAG_BR
:
10594 /* unconditional branch to register */
10595 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10596 TCGv t0
= tcg_temp_new();
10597 TCGv_i32 t1
= tcg_temp_new_i32();
10599 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10600 tcg_gen_trunc_tl_i32(t1
, t0
);
10602 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10603 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10604 tcg_gen_or_i32(hflags
, hflags
, t1
);
10605 tcg_temp_free_i32(t1
);
10607 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10609 tcg_gen_mov_tl(cpu_PC
, btarget
);
10611 if (ctx
->singlestep_enabled
) {
10612 save_cpu_state(ctx
, 0);
10613 gen_helper_raise_exception_debug(cpu_env
);
10615 tcg_gen_exit_tb(0);
10618 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10624 /* Compact Branches */
10625 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10626 int rs
, int rt
, int32_t offset
)
10628 int bcond_compute
= 0;
10629 TCGv t0
= tcg_temp_new();
10630 TCGv t1
= tcg_temp_new();
10631 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10633 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10634 #ifdef MIPS_DEBUG_DISAS
10635 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10638 generate_exception_end(ctx
, EXCP_RI
);
10642 /* Load needed operands and calculate btarget */
10644 /* compact branch */
10645 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10646 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10647 gen_load_gpr(t0
, rs
);
10648 gen_load_gpr(t1
, rt
);
10650 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10651 if (rs
<= rt
&& rs
== 0) {
10652 /* OPC_BEQZALC, OPC_BNEZALC */
10653 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10656 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10657 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10658 gen_load_gpr(t0
, rs
);
10659 gen_load_gpr(t1
, rt
);
10661 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10663 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10664 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10665 if (rs
== 0 || rs
== rt
) {
10666 /* OPC_BLEZALC, OPC_BGEZALC */
10667 /* OPC_BGTZALC, OPC_BLTZALC */
10668 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10670 gen_load_gpr(t0
, rs
);
10671 gen_load_gpr(t1
, rt
);
10673 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10677 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10682 /* OPC_BEQZC, OPC_BNEZC */
10683 gen_load_gpr(t0
, rs
);
10685 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10687 /* OPC_JIC, OPC_JIALC */
10688 TCGv tbase
= tcg_temp_new();
10689 TCGv toffset
= tcg_temp_new();
10691 gen_load_gpr(tbase
, rt
);
10692 tcg_gen_movi_tl(toffset
, offset
);
10693 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10694 tcg_temp_free(tbase
);
10695 tcg_temp_free(toffset
);
10699 MIPS_INVAL("Compact branch/jump");
10700 generate_exception_end(ctx
, EXCP_RI
);
10704 if (bcond_compute
== 0) {
10705 /* Uncoditional compact branch */
10708 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10711 ctx
->hflags
|= MIPS_HFLAG_BR
;
10714 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10717 ctx
->hflags
|= MIPS_HFLAG_B
;
10720 MIPS_INVAL("Compact branch/jump");
10721 generate_exception_end(ctx
, EXCP_RI
);
10725 /* Generating branch here as compact branches don't have delay slot */
10726 gen_branch(ctx
, 4);
10728 /* Conditional compact branch */
10729 TCGLabel
*fs
= gen_new_label();
10730 save_cpu_state(ctx
, 0);
10733 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10734 if (rs
== 0 && rt
!= 0) {
10736 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10737 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10739 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10742 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10745 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10746 if (rs
== 0 && rt
!= 0) {
10748 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10749 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10751 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10754 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10757 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10758 if (rs
== 0 && rt
!= 0) {
10760 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10761 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10763 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10766 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10769 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10770 if (rs
== 0 && rt
!= 0) {
10772 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10773 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10775 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10778 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10781 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10782 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10784 /* OPC_BOVC, OPC_BNVC */
10785 TCGv t2
= tcg_temp_new();
10786 TCGv t3
= tcg_temp_new();
10787 TCGv t4
= tcg_temp_new();
10788 TCGv input_overflow
= tcg_temp_new();
10790 gen_load_gpr(t0
, rs
);
10791 gen_load_gpr(t1
, rt
);
10792 tcg_gen_ext32s_tl(t2
, t0
);
10793 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10794 tcg_gen_ext32s_tl(t3
, t1
);
10795 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10796 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10798 tcg_gen_add_tl(t4
, t2
, t3
);
10799 tcg_gen_ext32s_tl(t4
, t4
);
10800 tcg_gen_xor_tl(t2
, t2
, t3
);
10801 tcg_gen_xor_tl(t3
, t4
, t3
);
10802 tcg_gen_andc_tl(t2
, t3
, t2
);
10803 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10804 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10805 if (opc
== OPC_BOVC
) {
10807 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10810 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10812 tcg_temp_free(input_overflow
);
10816 } else if (rs
< rt
&& rs
== 0) {
10817 /* OPC_BEQZALC, OPC_BNEZALC */
10818 if (opc
== OPC_BEQZALC
) {
10820 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10823 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10826 /* OPC_BEQC, OPC_BNEC */
10827 if (opc
== OPC_BEQC
) {
10829 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10832 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10837 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10840 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10843 MIPS_INVAL("Compact conditional branch/jump");
10844 generate_exception_end(ctx
, EXCP_RI
);
10848 /* Generating branch here as compact branches don't have delay slot */
10849 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10852 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10860 /* ISA extensions (ASEs) */
10861 /* MIPS16 extension to MIPS32 */
10863 /* MIPS16 major opcodes */
10865 M16_OPC_ADDIUSP
= 0x00,
10866 M16_OPC_ADDIUPC
= 0x01,
10868 M16_OPC_JAL
= 0x03,
10869 M16_OPC_BEQZ
= 0x04,
10870 M16_OPC_BNEQZ
= 0x05,
10871 M16_OPC_SHIFT
= 0x06,
10873 M16_OPC_RRIA
= 0x08,
10874 M16_OPC_ADDIU8
= 0x09,
10875 M16_OPC_SLTI
= 0x0a,
10876 M16_OPC_SLTIU
= 0x0b,
10879 M16_OPC_CMPI
= 0x0e,
10883 M16_OPC_LWSP
= 0x12,
10885 M16_OPC_LBU
= 0x14,
10886 M16_OPC_LHU
= 0x15,
10887 M16_OPC_LWPC
= 0x16,
10888 M16_OPC_LWU
= 0x17,
10891 M16_OPC_SWSP
= 0x1a,
10893 M16_OPC_RRR
= 0x1c,
10895 M16_OPC_EXTEND
= 0x1e,
10899 /* I8 funct field */
10918 /* RR funct field */
10952 /* I64 funct field */
10960 I64_DADDIUPC
= 0x6,
10964 /* RR ry field for CNVT */
10966 RR_RY_CNVT_ZEB
= 0x0,
10967 RR_RY_CNVT_ZEH
= 0x1,
10968 RR_RY_CNVT_ZEW
= 0x2,
10969 RR_RY_CNVT_SEB
= 0x4,
10970 RR_RY_CNVT_SEH
= 0x5,
10971 RR_RY_CNVT_SEW
= 0x6,
10974 static int xlat (int r
)
10976 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10981 static void gen_mips16_save (DisasContext
*ctx
,
10982 int xsregs
, int aregs
,
10983 int do_ra
, int do_s0
, int do_s1
,
10986 TCGv t0
= tcg_temp_new();
10987 TCGv t1
= tcg_temp_new();
10988 TCGv t2
= tcg_temp_new();
11018 generate_exception_end(ctx
, EXCP_RI
);
11024 gen_base_offset_addr(ctx
, t0
, 29, 12);
11025 gen_load_gpr(t1
, 7);
11026 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11029 gen_base_offset_addr(ctx
, t0
, 29, 8);
11030 gen_load_gpr(t1
, 6);
11031 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11034 gen_base_offset_addr(ctx
, t0
, 29, 4);
11035 gen_load_gpr(t1
, 5);
11036 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11039 gen_base_offset_addr(ctx
, t0
, 29, 0);
11040 gen_load_gpr(t1
, 4);
11041 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11044 gen_load_gpr(t0
, 29);
11046 #define DECR_AND_STORE(reg) do { \
11047 tcg_gen_movi_tl(t2, -4); \
11048 gen_op_addr_add(ctx, t0, t0, t2); \
11049 gen_load_gpr(t1, reg); \
11050 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11054 DECR_AND_STORE(31);
11059 DECR_AND_STORE(30);
11062 DECR_AND_STORE(23);
11065 DECR_AND_STORE(22);
11068 DECR_AND_STORE(21);
11071 DECR_AND_STORE(20);
11074 DECR_AND_STORE(19);
11077 DECR_AND_STORE(18);
11081 DECR_AND_STORE(17);
11084 DECR_AND_STORE(16);
11114 generate_exception_end(ctx
, EXCP_RI
);
11130 #undef DECR_AND_STORE
11132 tcg_gen_movi_tl(t2
, -framesize
);
11133 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11139 static void gen_mips16_restore (DisasContext
*ctx
,
11140 int xsregs
, int aregs
,
11141 int do_ra
, int do_s0
, int do_s1
,
11145 TCGv t0
= tcg_temp_new();
11146 TCGv t1
= tcg_temp_new();
11147 TCGv t2
= tcg_temp_new();
11149 tcg_gen_movi_tl(t2
, framesize
);
11150 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11152 #define DECR_AND_LOAD(reg) do { \
11153 tcg_gen_movi_tl(t2, -4); \
11154 gen_op_addr_add(ctx, t0, t0, t2); \
11155 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11156 gen_store_gpr(t1, reg); \
11220 generate_exception_end(ctx
, EXCP_RI
);
11236 #undef DECR_AND_LOAD
11238 tcg_gen_movi_tl(t2
, framesize
);
11239 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11245 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11246 int is_64_bit
, int extended
)
11250 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11251 generate_exception_end(ctx
, EXCP_RI
);
11255 t0
= tcg_temp_new();
11257 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11258 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11260 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11266 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11269 TCGv_i32 t0
= tcg_const_i32(op
);
11270 TCGv t1
= tcg_temp_new();
11271 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11272 gen_helper_cache(cpu_env
, t1
, t0
);
11275 #if defined(TARGET_MIPS64)
11276 static void decode_i64_mips16 (DisasContext
*ctx
,
11277 int ry
, int funct
, int16_t offset
,
11282 check_insn(ctx
, ISA_MIPS3
);
11283 check_mips_64(ctx
);
11284 offset
= extended
? offset
: offset
<< 3;
11285 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11288 check_insn(ctx
, ISA_MIPS3
);
11289 check_mips_64(ctx
);
11290 offset
= extended
? offset
: offset
<< 3;
11291 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11294 check_insn(ctx
, ISA_MIPS3
);
11295 check_mips_64(ctx
);
11296 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11297 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11300 check_insn(ctx
, ISA_MIPS3
);
11301 check_mips_64(ctx
);
11302 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11303 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11306 check_insn(ctx
, ISA_MIPS3
);
11307 check_mips_64(ctx
);
11308 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11309 generate_exception_end(ctx
, EXCP_RI
);
11311 offset
= extended
? offset
: offset
<< 3;
11312 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11316 check_insn(ctx
, ISA_MIPS3
);
11317 check_mips_64(ctx
);
11318 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11319 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11322 check_insn(ctx
, ISA_MIPS3
);
11323 check_mips_64(ctx
);
11324 offset
= extended
? offset
: offset
<< 2;
11325 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11328 check_insn(ctx
, ISA_MIPS3
);
11329 check_mips_64(ctx
);
11330 offset
= extended
? offset
: offset
<< 2;
11331 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11337 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11339 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11340 int op
, rx
, ry
, funct
, sa
;
11341 int16_t imm
, offset
;
11343 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11344 op
= (ctx
->opcode
>> 11) & 0x1f;
11345 sa
= (ctx
->opcode
>> 22) & 0x1f;
11346 funct
= (ctx
->opcode
>> 8) & 0x7;
11347 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11348 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11349 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11350 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11351 | (ctx
->opcode
& 0x1f));
11353 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11356 case M16_OPC_ADDIUSP
:
11357 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11359 case M16_OPC_ADDIUPC
:
11360 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11363 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11364 /* No delay slot, so just process as a normal instruction */
11367 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11368 /* No delay slot, so just process as a normal instruction */
11370 case M16_OPC_BNEQZ
:
11371 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11372 /* No delay slot, so just process as a normal instruction */
11374 case M16_OPC_SHIFT
:
11375 switch (ctx
->opcode
& 0x3) {
11377 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11380 #if defined(TARGET_MIPS64)
11381 check_mips_64(ctx
);
11382 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11384 generate_exception_end(ctx
, EXCP_RI
);
11388 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11391 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11395 #if defined(TARGET_MIPS64)
11397 check_insn(ctx
, ISA_MIPS3
);
11398 check_mips_64(ctx
);
11399 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11403 imm
= ctx
->opcode
& 0xf;
11404 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11405 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11406 imm
= (int16_t) (imm
<< 1) >> 1;
11407 if ((ctx
->opcode
>> 4) & 0x1) {
11408 #if defined(TARGET_MIPS64)
11409 check_mips_64(ctx
);
11410 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11412 generate_exception_end(ctx
, EXCP_RI
);
11415 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11418 case M16_OPC_ADDIU8
:
11419 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11422 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11424 case M16_OPC_SLTIU
:
11425 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11430 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11433 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11436 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11439 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11442 check_insn(ctx
, ISA_MIPS32
);
11444 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11445 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11446 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11447 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11448 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11449 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11450 | (ctx
->opcode
& 0xf)) << 3;
11452 if (ctx
->opcode
& (1 << 7)) {
11453 gen_mips16_save(ctx
, xsregs
, aregs
,
11454 do_ra
, do_s0
, do_s1
,
11457 gen_mips16_restore(ctx
, xsregs
, aregs
,
11458 do_ra
, do_s0
, do_s1
,
11464 generate_exception_end(ctx
, EXCP_RI
);
11469 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11472 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11474 #if defined(TARGET_MIPS64)
11476 check_insn(ctx
, ISA_MIPS3
);
11477 check_mips_64(ctx
);
11478 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11482 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11485 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11488 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11491 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11494 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11497 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11500 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11502 #if defined(TARGET_MIPS64)
11504 check_insn(ctx
, ISA_MIPS3
);
11505 check_mips_64(ctx
);
11506 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11510 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11513 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11516 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11519 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11521 #if defined(TARGET_MIPS64)
11523 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11527 generate_exception_end(ctx
, EXCP_RI
);
11534 static inline bool is_uhi(int sdbbp_code
)
11536 #ifdef CONFIG_USER_ONLY
11539 return semihosting_enabled() && sdbbp_code
== 1;
11543 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11547 int op
, cnvt_op
, op1
, offset
;
11551 op
= (ctx
->opcode
>> 11) & 0x1f;
11552 sa
= (ctx
->opcode
>> 2) & 0x7;
11553 sa
= sa
== 0 ? 8 : sa
;
11554 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11555 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11556 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11557 op1
= offset
= ctx
->opcode
& 0x1f;
11562 case M16_OPC_ADDIUSP
:
11564 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11566 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11569 case M16_OPC_ADDIUPC
:
11570 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11573 offset
= (ctx
->opcode
& 0x7ff) << 1;
11574 offset
= (int16_t)(offset
<< 4) >> 4;
11575 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11576 /* No delay slot, so just process as a normal instruction */
11579 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11580 offset
= (((ctx
->opcode
& 0x1f) << 21)
11581 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11583 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11584 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11588 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11589 ((int8_t)ctx
->opcode
) << 1, 0);
11590 /* No delay slot, so just process as a normal instruction */
11592 case M16_OPC_BNEQZ
:
11593 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11594 ((int8_t)ctx
->opcode
) << 1, 0);
11595 /* No delay slot, so just process as a normal instruction */
11597 case M16_OPC_SHIFT
:
11598 switch (ctx
->opcode
& 0x3) {
11600 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11603 #if defined(TARGET_MIPS64)
11604 check_insn(ctx
, ISA_MIPS3
);
11605 check_mips_64(ctx
);
11606 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11608 generate_exception_end(ctx
, EXCP_RI
);
11612 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11615 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11619 #if defined(TARGET_MIPS64)
11621 check_insn(ctx
, ISA_MIPS3
);
11622 check_mips_64(ctx
);
11623 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11628 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11630 if ((ctx
->opcode
>> 4) & 1) {
11631 #if defined(TARGET_MIPS64)
11632 check_insn(ctx
, ISA_MIPS3
);
11633 check_mips_64(ctx
);
11634 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11636 generate_exception_end(ctx
, EXCP_RI
);
11639 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11643 case M16_OPC_ADDIU8
:
11645 int16_t imm
= (int8_t) ctx
->opcode
;
11647 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11652 int16_t imm
= (uint8_t) ctx
->opcode
;
11653 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11656 case M16_OPC_SLTIU
:
11658 int16_t imm
= (uint8_t) ctx
->opcode
;
11659 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11666 funct
= (ctx
->opcode
>> 8) & 0x7;
11669 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11670 ((int8_t)ctx
->opcode
) << 1, 0);
11673 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11674 ((int8_t)ctx
->opcode
) << 1, 0);
11677 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11680 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11681 ((int8_t)ctx
->opcode
) << 3);
11684 check_insn(ctx
, ISA_MIPS32
);
11686 int do_ra
= ctx
->opcode
& (1 << 6);
11687 int do_s0
= ctx
->opcode
& (1 << 5);
11688 int do_s1
= ctx
->opcode
& (1 << 4);
11689 int framesize
= ctx
->opcode
& 0xf;
11691 if (framesize
== 0) {
11694 framesize
= framesize
<< 3;
11697 if (ctx
->opcode
& (1 << 7)) {
11698 gen_mips16_save(ctx
, 0, 0,
11699 do_ra
, do_s0
, do_s1
, framesize
);
11701 gen_mips16_restore(ctx
, 0, 0,
11702 do_ra
, do_s0
, do_s1
, framesize
);
11708 int rz
= xlat(ctx
->opcode
& 0x7);
11710 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11711 ((ctx
->opcode
>> 5) & 0x7);
11712 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11716 reg32
= ctx
->opcode
& 0x1f;
11717 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11720 generate_exception_end(ctx
, EXCP_RI
);
11727 int16_t imm
= (uint8_t) ctx
->opcode
;
11729 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11734 int16_t imm
= (uint8_t) ctx
->opcode
;
11735 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11738 #if defined(TARGET_MIPS64)
11740 check_insn(ctx
, ISA_MIPS3
);
11741 check_mips_64(ctx
);
11742 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11746 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11749 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11752 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11755 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11758 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11761 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11764 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11766 #if defined (TARGET_MIPS64)
11768 check_insn(ctx
, ISA_MIPS3
);
11769 check_mips_64(ctx
);
11770 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11774 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11777 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11780 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11783 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11787 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11790 switch (ctx
->opcode
& 0x3) {
11792 mips32_op
= OPC_ADDU
;
11795 mips32_op
= OPC_SUBU
;
11797 #if defined(TARGET_MIPS64)
11799 mips32_op
= OPC_DADDU
;
11800 check_insn(ctx
, ISA_MIPS3
);
11801 check_mips_64(ctx
);
11804 mips32_op
= OPC_DSUBU
;
11805 check_insn(ctx
, ISA_MIPS3
);
11806 check_mips_64(ctx
);
11810 generate_exception_end(ctx
, EXCP_RI
);
11814 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11823 int nd
= (ctx
->opcode
>> 7) & 0x1;
11824 int link
= (ctx
->opcode
>> 6) & 0x1;
11825 int ra
= (ctx
->opcode
>> 5) & 0x1;
11828 check_insn(ctx
, ISA_MIPS32
);
11837 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11842 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11843 gen_helper_do_semihosting(cpu_env
);
11845 /* XXX: not clear which exception should be raised
11846 * when in debug mode...
11848 check_insn(ctx
, ISA_MIPS32
);
11849 generate_exception_end(ctx
, EXCP_DBp
);
11853 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11856 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11859 generate_exception_end(ctx
, EXCP_BREAK
);
11862 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11865 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11868 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11870 #if defined (TARGET_MIPS64)
11872 check_insn(ctx
, ISA_MIPS3
);
11873 check_mips_64(ctx
);
11874 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11878 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11881 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11884 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11887 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11890 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11893 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11896 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11899 check_insn(ctx
, ISA_MIPS32
);
11901 case RR_RY_CNVT_ZEB
:
11902 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11904 case RR_RY_CNVT_ZEH
:
11905 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11907 case RR_RY_CNVT_SEB
:
11908 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11910 case RR_RY_CNVT_SEH
:
11911 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11913 #if defined (TARGET_MIPS64)
11914 case RR_RY_CNVT_ZEW
:
11915 check_insn(ctx
, ISA_MIPS64
);
11916 check_mips_64(ctx
);
11917 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11919 case RR_RY_CNVT_SEW
:
11920 check_insn(ctx
, ISA_MIPS64
);
11921 check_mips_64(ctx
);
11922 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11926 generate_exception_end(ctx
, EXCP_RI
);
11931 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11933 #if defined (TARGET_MIPS64)
11935 check_insn(ctx
, ISA_MIPS3
);
11936 check_mips_64(ctx
);
11937 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11940 check_insn(ctx
, ISA_MIPS3
);
11941 check_mips_64(ctx
);
11942 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11945 check_insn(ctx
, ISA_MIPS3
);
11946 check_mips_64(ctx
);
11947 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11950 check_insn(ctx
, ISA_MIPS3
);
11951 check_mips_64(ctx
);
11952 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11956 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11959 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11962 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11965 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11967 #if defined (TARGET_MIPS64)
11969 check_insn(ctx
, ISA_MIPS3
);
11970 check_mips_64(ctx
);
11971 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11974 check_insn(ctx
, ISA_MIPS3
);
11975 check_mips_64(ctx
);
11976 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11979 check_insn(ctx
, ISA_MIPS3
);
11980 check_mips_64(ctx
);
11981 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11984 check_insn(ctx
, ISA_MIPS3
);
11985 check_mips_64(ctx
);
11986 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11990 generate_exception_end(ctx
, EXCP_RI
);
11994 case M16_OPC_EXTEND
:
11995 decode_extended_mips16_opc(env
, ctx
);
11998 #if defined(TARGET_MIPS64)
12000 funct
= (ctx
->opcode
>> 8) & 0x7;
12001 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12005 generate_exception_end(ctx
, EXCP_RI
);
12012 /* microMIPS extension to MIPS32/MIPS64 */
12015 * microMIPS32/microMIPS64 major opcodes
12017 * 1. MIPS Architecture for Programmers Volume II-B:
12018 * The microMIPS32 Instruction Set (Revision 3.05)
12020 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12022 * 2. MIPS Architecture For Programmers Volume II-A:
12023 * The MIPS64 Instruction Set (Revision 3.51)
12053 POOL32S
= 0x16, /* MIPS64 */
12054 DADDIU32
= 0x17, /* MIPS64 */
12083 /* 0x29 is reserved */
12096 /* 0x31 is reserved */
12109 SD32
= 0x36, /* MIPS64 */
12110 LD32
= 0x37, /* MIPS64 */
12112 /* 0x39 is reserved */
12128 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12138 /* POOL32A encoding of minor opcode field */
12141 /* These opcodes are distinguished only by bits 9..6; those bits are
12142 * what are recorded below. */
12179 /* The following can be distinguished by their lower 6 bits. */
12189 /* POOL32AXF encoding of minor opcode field extension */
12192 * 1. MIPS Architecture for Programmers Volume II-B:
12193 * The microMIPS32 Instruction Set (Revision 3.05)
12195 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12197 * 2. MIPS Architecture for Programmers VolumeIV-e:
12198 * The MIPS DSP Application-Specific Extension
12199 * to the microMIPS32 Architecture (Revision 2.34)
12201 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12216 /* begin of microMIPS32 DSP */
12218 /* bits 13..12 for 0x01 */
12224 /* bits 13..12 for 0x2a */
12230 /* bits 13..12 for 0x32 */
12234 /* end of microMIPS32 DSP */
12236 /* bits 15..12 for 0x2c */
12253 /* bits 15..12 for 0x34 */
12261 /* bits 15..12 for 0x3c */
12263 JR
= 0x0, /* alias */
12271 /* bits 15..12 for 0x05 */
12275 /* bits 15..12 for 0x0d */
12287 /* bits 15..12 for 0x15 */
12293 /* bits 15..12 for 0x1d */
12297 /* bits 15..12 for 0x2d */
12302 /* bits 15..12 for 0x35 */
12309 /* POOL32B encoding of minor opcode field (bits 15..12) */
12325 /* POOL32C encoding of minor opcode field (bits 15..12) */
12333 /* 0xa is reserved */
12340 /* 0x6 is reserved */
12346 /* POOL32F encoding of minor opcode field (bits 5..0) */
12349 /* These are the bit 7..6 values */
12358 /* These are the bit 8..6 values */
12383 MOVZ_FMT_05
= 0x05,
12417 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12424 /* POOL32Fxf encoding of minor opcode extension field */
12462 /* POOL32I encoding of minor opcode field (bits 25..21) */
12492 /* These overlap and are distinguished by bit16 of the instruction */
12501 /* POOL16A encoding of minor opcode field */
12508 /* POOL16B encoding of minor opcode field */
12515 /* POOL16C encoding of minor opcode field */
12535 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12555 /* POOL16D encoding of minor opcode field */
12562 /* POOL16E encoding of minor opcode field */
12569 static int mmreg (int r
)
12571 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12576 /* Used for 16-bit store instructions. */
12577 static int mmreg2 (int r
)
12579 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12584 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12585 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12586 #define uMIPS_RS2(op) uMIPS_RS(op)
12587 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12588 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12589 #define uMIPS_RS5(op) (op & 0x1f)
12591 /* Signed immediate */
12592 #define SIMM(op, start, width) \
12593 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12596 /* Zero-extended immediate */
12597 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12599 static void gen_addiur1sp(DisasContext
*ctx
)
12601 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12603 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12606 static void gen_addiur2(DisasContext
*ctx
)
12608 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12609 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12610 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12612 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12615 static void gen_addiusp(DisasContext
*ctx
)
12617 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12620 if (encoded
<= 1) {
12621 decoded
= 256 + encoded
;
12622 } else if (encoded
<= 255) {
12624 } else if (encoded
<= 509) {
12625 decoded
= encoded
- 512;
12627 decoded
= encoded
- 768;
12630 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12633 static void gen_addius5(DisasContext
*ctx
)
12635 int imm
= SIMM(ctx
->opcode
, 1, 4);
12636 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12638 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12641 static void gen_andi16(DisasContext
*ctx
)
12643 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12644 31, 32, 63, 64, 255, 32768, 65535 };
12645 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12646 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12647 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12649 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12652 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12653 int base
, int16_t offset
)
12658 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12659 generate_exception_end(ctx
, EXCP_RI
);
12663 t0
= tcg_temp_new();
12665 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12667 t1
= tcg_const_tl(reglist
);
12668 t2
= tcg_const_i32(ctx
->mem_idx
);
12670 save_cpu_state(ctx
, 1);
12673 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12676 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12678 #ifdef TARGET_MIPS64
12680 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12683 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12689 tcg_temp_free_i32(t2
);
12693 static void gen_pool16c_insn(DisasContext
*ctx
)
12695 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12696 int rs
= mmreg(ctx
->opcode
& 0x7);
12698 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12703 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12709 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12715 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12721 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12728 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12729 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12731 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12740 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12741 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12743 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12750 int reg
= ctx
->opcode
& 0x1f;
12752 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12758 int reg
= ctx
->opcode
& 0x1f;
12759 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12760 /* Let normal delay slot handling in our caller take us
12761 to the branch target. */
12766 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12767 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12771 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12772 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12776 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12780 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12783 generate_exception_end(ctx
, EXCP_BREAK
);
12786 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12787 gen_helper_do_semihosting(cpu_env
);
12789 /* XXX: not clear which exception should be raised
12790 * when in debug mode...
12792 check_insn(ctx
, ISA_MIPS32
);
12793 generate_exception_end(ctx
, EXCP_DBp
);
12796 case JRADDIUSP
+ 0:
12797 case JRADDIUSP
+ 1:
12799 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12800 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12801 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12802 /* Let normal delay slot handling in our caller take us
12803 to the branch target. */
12807 generate_exception_end(ctx
, EXCP_RI
);
12812 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12815 int rd
, rs
, re
, rt
;
12816 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12817 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12818 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12819 rd
= rd_enc
[enc_dest
];
12820 re
= re_enc
[enc_dest
];
12821 rs
= rs_rt_enc
[enc_rs
];
12822 rt
= rs_rt_enc
[enc_rt
];
12824 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12829 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12831 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12835 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12837 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12838 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12840 switch (ctx
->opcode
& 0xf) {
12842 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12845 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12849 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12850 int offset
= extract32(ctx
->opcode
, 4, 4);
12851 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12854 case R6_JRC16
: /* JRCADDIUSP */
12855 if ((ctx
->opcode
>> 4) & 1) {
12857 int imm
= extract32(ctx
->opcode
, 5, 5);
12858 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12859 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12862 int rs
= extract32(ctx
->opcode
, 5, 5);
12863 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12866 case MOVEP
... MOVEP_07
:
12867 case MOVEP_0C
... MOVEP_0F
:
12869 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12870 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12871 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12872 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12876 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12879 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12883 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12884 int offset
= extract32(ctx
->opcode
, 4, 4);
12885 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12888 case JALRC16
: /* BREAK16, SDBBP16 */
12889 switch (ctx
->opcode
& 0x3f) {
12891 case JALRC16
+ 0x20:
12893 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12898 generate_exception(ctx
, EXCP_BREAK
);
12902 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12903 gen_helper_do_semihosting(cpu_env
);
12905 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12906 generate_exception(ctx
, EXCP_RI
);
12908 generate_exception(ctx
, EXCP_DBp
);
12915 generate_exception(ctx
, EXCP_RI
);
12920 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12922 TCGv t0
= tcg_temp_new();
12923 TCGv t1
= tcg_temp_new();
12925 gen_load_gpr(t0
, base
);
12928 gen_load_gpr(t1
, index
);
12929 tcg_gen_shli_tl(t1
, t1
, 2);
12930 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12933 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12934 gen_store_gpr(t1
, rd
);
12940 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12941 int base
, int16_t offset
)
12945 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12946 generate_exception_end(ctx
, EXCP_RI
);
12950 t0
= tcg_temp_new();
12951 t1
= tcg_temp_new();
12953 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12958 generate_exception_end(ctx
, EXCP_RI
);
12961 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12962 gen_store_gpr(t1
, rd
);
12963 tcg_gen_movi_tl(t1
, 4);
12964 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12965 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12966 gen_store_gpr(t1
, rd
+1);
12969 gen_load_gpr(t1
, rd
);
12970 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12971 tcg_gen_movi_tl(t1
, 4);
12972 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12973 gen_load_gpr(t1
, rd
+1);
12974 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12976 #ifdef TARGET_MIPS64
12979 generate_exception_end(ctx
, EXCP_RI
);
12982 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12983 gen_store_gpr(t1
, rd
);
12984 tcg_gen_movi_tl(t1
, 8);
12985 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12986 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12987 gen_store_gpr(t1
, rd
+1);
12990 gen_load_gpr(t1
, rd
);
12991 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12992 tcg_gen_movi_tl(t1
, 8);
12993 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12994 gen_load_gpr(t1
, rd
+1);
12995 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13003 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13005 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13006 int minor
= (ctx
->opcode
>> 12) & 0xf;
13007 uint32_t mips32_op
;
13009 switch (extension
) {
13011 mips32_op
= OPC_TEQ
;
13014 mips32_op
= OPC_TGE
;
13017 mips32_op
= OPC_TGEU
;
13020 mips32_op
= OPC_TLT
;
13023 mips32_op
= OPC_TLTU
;
13026 mips32_op
= OPC_TNE
;
13028 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13030 #ifndef CONFIG_USER_ONLY
13033 check_cp0_enabled(ctx
);
13035 /* Treat as NOP. */
13038 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13042 check_cp0_enabled(ctx
);
13044 TCGv t0
= tcg_temp_new();
13046 gen_load_gpr(t0
, rt
);
13047 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13053 switch (minor
& 3) {
13055 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13058 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13061 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13064 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13067 goto pool32axf_invalid
;
13071 switch (minor
& 3) {
13073 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13076 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13079 goto pool32axf_invalid
;
13085 check_insn(ctx
, ISA_MIPS32R6
);
13086 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13089 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13092 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13095 mips32_op
= OPC_CLO
;
13098 mips32_op
= OPC_CLZ
;
13100 check_insn(ctx
, ISA_MIPS32
);
13101 gen_cl(ctx
, mips32_op
, rt
, rs
);
13104 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13105 gen_rdhwr(ctx
, rt
, rs
, 0);
13108 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13111 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13112 mips32_op
= OPC_MULT
;
13115 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13116 mips32_op
= OPC_MULTU
;
13119 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13120 mips32_op
= OPC_DIV
;
13123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13124 mips32_op
= OPC_DIVU
;
13127 check_insn(ctx
, ISA_MIPS32
);
13128 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13131 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13132 mips32_op
= OPC_MADD
;
13135 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13136 mips32_op
= OPC_MADDU
;
13139 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13140 mips32_op
= OPC_MSUB
;
13143 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13144 mips32_op
= OPC_MSUBU
;
13146 check_insn(ctx
, ISA_MIPS32
);
13147 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13150 goto pool32axf_invalid
;
13161 generate_exception_err(ctx
, EXCP_CpU
, 2);
13164 goto pool32axf_invalid
;
13169 case JALR
: /* JALRC */
13170 case JALR_HB
: /* JALRC_HB */
13171 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13172 /* JALRC, JALRC_HB */
13173 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13175 /* JALR, JALR_HB */
13176 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13177 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13182 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13183 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13184 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13187 goto pool32axf_invalid
;
13193 check_cp0_enabled(ctx
);
13194 check_insn(ctx
, ISA_MIPS32R2
);
13195 gen_load_srsgpr(rs
, rt
);
13198 check_cp0_enabled(ctx
);
13199 check_insn(ctx
, ISA_MIPS32R2
);
13200 gen_store_srsgpr(rs
, rt
);
13203 goto pool32axf_invalid
;
13206 #ifndef CONFIG_USER_ONLY
13210 mips32_op
= OPC_TLBP
;
13213 mips32_op
= OPC_TLBR
;
13216 mips32_op
= OPC_TLBWI
;
13219 mips32_op
= OPC_TLBWR
;
13222 mips32_op
= OPC_TLBINV
;
13225 mips32_op
= OPC_TLBINVF
;
13228 mips32_op
= OPC_WAIT
;
13231 mips32_op
= OPC_DERET
;
13234 mips32_op
= OPC_ERET
;
13236 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13239 goto pool32axf_invalid
;
13245 check_cp0_enabled(ctx
);
13247 TCGv t0
= tcg_temp_new();
13249 save_cpu_state(ctx
, 1);
13250 gen_helper_di(t0
, cpu_env
);
13251 gen_store_gpr(t0
, rs
);
13252 /* Stop translation as we may have switched the execution mode */
13253 ctx
->bstate
= BS_STOP
;
13258 check_cp0_enabled(ctx
);
13260 TCGv t0
= tcg_temp_new();
13262 save_cpu_state(ctx
, 1);
13263 gen_helper_ei(t0
, cpu_env
);
13264 gen_store_gpr(t0
, rs
);
13265 /* Stop translation as we may have switched the execution mode */
13266 ctx
->bstate
= BS_STOP
;
13271 goto pool32axf_invalid
;
13281 generate_exception_end(ctx
, EXCP_SYSCALL
);
13284 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13285 gen_helper_do_semihosting(cpu_env
);
13287 check_insn(ctx
, ISA_MIPS32
);
13288 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13289 generate_exception_end(ctx
, EXCP_RI
);
13291 generate_exception_end(ctx
, EXCP_DBp
);
13296 goto pool32axf_invalid
;
13300 switch (minor
& 3) {
13302 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13305 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13308 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13311 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13314 goto pool32axf_invalid
;
13318 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13321 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13324 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13327 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13330 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13333 goto pool32axf_invalid
;
13338 MIPS_INVAL("pool32axf");
13339 generate_exception_end(ctx
, EXCP_RI
);
13344 /* Values for microMIPS fmt field. Variable-width, depending on which
13345 formats the instruction supports. */
13364 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13366 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13367 uint32_t mips32_op
;
13369 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13370 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13371 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13373 switch (extension
) {
13374 case FLOAT_1BIT_FMT(CFC1
, 0):
13375 mips32_op
= OPC_CFC1
;
13377 case FLOAT_1BIT_FMT(CTC1
, 0):
13378 mips32_op
= OPC_CTC1
;
13380 case FLOAT_1BIT_FMT(MFC1
, 0):
13381 mips32_op
= OPC_MFC1
;
13383 case FLOAT_1BIT_FMT(MTC1
, 0):
13384 mips32_op
= OPC_MTC1
;
13386 case FLOAT_1BIT_FMT(MFHC1
, 0):
13387 mips32_op
= OPC_MFHC1
;
13389 case FLOAT_1BIT_FMT(MTHC1
, 0):
13390 mips32_op
= OPC_MTHC1
;
13392 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13395 /* Reciprocal square root */
13396 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13397 mips32_op
= OPC_RSQRT_S
;
13399 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13400 mips32_op
= OPC_RSQRT_D
;
13404 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13405 mips32_op
= OPC_SQRT_S
;
13407 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13408 mips32_op
= OPC_SQRT_D
;
13412 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13413 mips32_op
= OPC_RECIP_S
;
13415 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13416 mips32_op
= OPC_RECIP_D
;
13420 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13421 mips32_op
= OPC_FLOOR_L_S
;
13423 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13424 mips32_op
= OPC_FLOOR_L_D
;
13426 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13427 mips32_op
= OPC_FLOOR_W_S
;
13429 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13430 mips32_op
= OPC_FLOOR_W_D
;
13434 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13435 mips32_op
= OPC_CEIL_L_S
;
13437 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13438 mips32_op
= OPC_CEIL_L_D
;
13440 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13441 mips32_op
= OPC_CEIL_W_S
;
13443 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13444 mips32_op
= OPC_CEIL_W_D
;
13448 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13449 mips32_op
= OPC_TRUNC_L_S
;
13451 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13452 mips32_op
= OPC_TRUNC_L_D
;
13454 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13455 mips32_op
= OPC_TRUNC_W_S
;
13457 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13458 mips32_op
= OPC_TRUNC_W_D
;
13462 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13463 mips32_op
= OPC_ROUND_L_S
;
13465 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13466 mips32_op
= OPC_ROUND_L_D
;
13468 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13469 mips32_op
= OPC_ROUND_W_S
;
13471 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13472 mips32_op
= OPC_ROUND_W_D
;
13475 /* Integer to floating-point conversion */
13476 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13477 mips32_op
= OPC_CVT_L_S
;
13479 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13480 mips32_op
= OPC_CVT_L_D
;
13482 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13483 mips32_op
= OPC_CVT_W_S
;
13485 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13486 mips32_op
= OPC_CVT_W_D
;
13489 /* Paired-foo conversions */
13490 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13491 mips32_op
= OPC_CVT_S_PL
;
13493 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13494 mips32_op
= OPC_CVT_S_PU
;
13496 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13497 mips32_op
= OPC_CVT_PW_PS
;
13499 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13500 mips32_op
= OPC_CVT_PS_PW
;
13503 /* Floating-point moves */
13504 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13505 mips32_op
= OPC_MOV_S
;
13507 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13508 mips32_op
= OPC_MOV_D
;
13510 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13511 mips32_op
= OPC_MOV_PS
;
13514 /* Absolute value */
13515 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13516 mips32_op
= OPC_ABS_S
;
13518 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13519 mips32_op
= OPC_ABS_D
;
13521 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13522 mips32_op
= OPC_ABS_PS
;
13526 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13527 mips32_op
= OPC_NEG_S
;
13529 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13530 mips32_op
= OPC_NEG_D
;
13532 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13533 mips32_op
= OPC_NEG_PS
;
13536 /* Reciprocal square root step */
13537 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13538 mips32_op
= OPC_RSQRT1_S
;
13540 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13541 mips32_op
= OPC_RSQRT1_D
;
13543 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13544 mips32_op
= OPC_RSQRT1_PS
;
13547 /* Reciprocal step */
13548 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13549 mips32_op
= OPC_RECIP1_S
;
13551 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13552 mips32_op
= OPC_RECIP1_S
;
13554 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13555 mips32_op
= OPC_RECIP1_PS
;
13558 /* Conversions from double */
13559 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13560 mips32_op
= OPC_CVT_D_S
;
13562 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13563 mips32_op
= OPC_CVT_D_W
;
13565 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13566 mips32_op
= OPC_CVT_D_L
;
13569 /* Conversions from single */
13570 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13571 mips32_op
= OPC_CVT_S_D
;
13573 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13574 mips32_op
= OPC_CVT_S_W
;
13576 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13577 mips32_op
= OPC_CVT_S_L
;
13579 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13582 /* Conditional moves on floating-point codes */
13583 case COND_FLOAT_MOV(MOVT
, 0):
13584 case COND_FLOAT_MOV(MOVT
, 1):
13585 case COND_FLOAT_MOV(MOVT
, 2):
13586 case COND_FLOAT_MOV(MOVT
, 3):
13587 case COND_FLOAT_MOV(MOVT
, 4):
13588 case COND_FLOAT_MOV(MOVT
, 5):
13589 case COND_FLOAT_MOV(MOVT
, 6):
13590 case COND_FLOAT_MOV(MOVT
, 7):
13591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13592 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13594 case COND_FLOAT_MOV(MOVF
, 0):
13595 case COND_FLOAT_MOV(MOVF
, 1):
13596 case COND_FLOAT_MOV(MOVF
, 2):
13597 case COND_FLOAT_MOV(MOVF
, 3):
13598 case COND_FLOAT_MOV(MOVF
, 4):
13599 case COND_FLOAT_MOV(MOVF
, 5):
13600 case COND_FLOAT_MOV(MOVF
, 6):
13601 case COND_FLOAT_MOV(MOVF
, 7):
13602 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13603 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13606 MIPS_INVAL("pool32fxf");
13607 generate_exception_end(ctx
, EXCP_RI
);
13612 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13616 int rt
, rs
, rd
, rr
;
13618 uint32_t op
, minor
, mips32_op
;
13619 uint32_t cond
, fmt
, cc
;
13621 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13622 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13624 rt
= (ctx
->opcode
>> 21) & 0x1f;
13625 rs
= (ctx
->opcode
>> 16) & 0x1f;
13626 rd
= (ctx
->opcode
>> 11) & 0x1f;
13627 rr
= (ctx
->opcode
>> 6) & 0x1f;
13628 imm
= (int16_t) ctx
->opcode
;
13630 op
= (ctx
->opcode
>> 26) & 0x3f;
13633 minor
= ctx
->opcode
& 0x3f;
13636 minor
= (ctx
->opcode
>> 6) & 0xf;
13639 mips32_op
= OPC_SLL
;
13642 mips32_op
= OPC_SRA
;
13645 mips32_op
= OPC_SRL
;
13648 mips32_op
= OPC_ROTR
;
13650 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13653 check_insn(ctx
, ISA_MIPS32R6
);
13654 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13657 check_insn(ctx
, ISA_MIPS32R6
);
13658 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13661 check_insn(ctx
, ISA_MIPS32R6
);
13662 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13665 goto pool32a_invalid
;
13669 minor
= (ctx
->opcode
>> 6) & 0xf;
13673 mips32_op
= OPC_ADD
;
13676 mips32_op
= OPC_ADDU
;
13679 mips32_op
= OPC_SUB
;
13682 mips32_op
= OPC_SUBU
;
13685 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13686 mips32_op
= OPC_MUL
;
13688 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13692 mips32_op
= OPC_SLLV
;
13695 mips32_op
= OPC_SRLV
;
13698 mips32_op
= OPC_SRAV
;
13701 mips32_op
= OPC_ROTRV
;
13703 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13705 /* Logical operations */
13707 mips32_op
= OPC_AND
;
13710 mips32_op
= OPC_OR
;
13713 mips32_op
= OPC_NOR
;
13716 mips32_op
= OPC_XOR
;
13718 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13720 /* Set less than */
13722 mips32_op
= OPC_SLT
;
13725 mips32_op
= OPC_SLTU
;
13727 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13730 goto pool32a_invalid
;
13734 minor
= (ctx
->opcode
>> 6) & 0xf;
13736 /* Conditional moves */
13737 case MOVN
: /* MUL */
13738 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13740 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13743 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13746 case MOVZ
: /* MUH */
13747 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13749 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13752 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13756 check_insn(ctx
, ISA_MIPS32R6
);
13757 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13760 check_insn(ctx
, ISA_MIPS32R6
);
13761 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13763 case LWXS
: /* DIV */
13764 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13766 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13769 gen_ldxs(ctx
, rs
, rt
, rd
);
13773 check_insn(ctx
, ISA_MIPS32R6
);
13774 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13777 check_insn(ctx
, ISA_MIPS32R6
);
13778 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13781 check_insn(ctx
, ISA_MIPS32R6
);
13782 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13785 goto pool32a_invalid
;
13789 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13792 check_insn(ctx
, ISA_MIPS32R6
);
13793 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13794 extract32(ctx
->opcode
, 9, 2));
13797 check_insn(ctx
, ISA_MIPS32R6
);
13798 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13799 extract32(ctx
->opcode
, 9, 2));
13802 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13805 gen_pool32axf(env
, ctx
, rt
, rs
);
13808 generate_exception_end(ctx
, EXCP_BREAK
);
13811 check_insn(ctx
, ISA_MIPS32R6
);
13812 generate_exception_end(ctx
, EXCP_RI
);
13816 MIPS_INVAL("pool32a");
13817 generate_exception_end(ctx
, EXCP_RI
);
13822 minor
= (ctx
->opcode
>> 12) & 0xf;
13825 check_cp0_enabled(ctx
);
13826 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13827 gen_cache_operation(ctx
, rt
, rs
, imm
);
13832 /* COP2: Not implemented. */
13833 generate_exception_err(ctx
, EXCP_CpU
, 2);
13835 #ifdef TARGET_MIPS64
13838 check_insn(ctx
, ISA_MIPS3
);
13839 check_mips_64(ctx
);
13844 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13846 #ifdef TARGET_MIPS64
13849 check_insn(ctx
, ISA_MIPS3
);
13850 check_mips_64(ctx
);
13855 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13858 MIPS_INVAL("pool32b");
13859 generate_exception_end(ctx
, EXCP_RI
);
13864 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13865 minor
= ctx
->opcode
& 0x3f;
13866 check_cp1_enabled(ctx
);
13869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13870 mips32_op
= OPC_ALNV_PS
;
13873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13874 mips32_op
= OPC_MADD_S
;
13877 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13878 mips32_op
= OPC_MADD_D
;
13881 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13882 mips32_op
= OPC_MADD_PS
;
13885 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13886 mips32_op
= OPC_MSUB_S
;
13889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13890 mips32_op
= OPC_MSUB_D
;
13893 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13894 mips32_op
= OPC_MSUB_PS
;
13897 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13898 mips32_op
= OPC_NMADD_S
;
13901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13902 mips32_op
= OPC_NMADD_D
;
13905 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13906 mips32_op
= OPC_NMADD_PS
;
13909 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13910 mips32_op
= OPC_NMSUB_S
;
13913 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13914 mips32_op
= OPC_NMSUB_D
;
13917 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13918 mips32_op
= OPC_NMSUB_PS
;
13920 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13922 case CABS_COND_FMT
:
13923 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13924 cond
= (ctx
->opcode
>> 6) & 0xf;
13925 cc
= (ctx
->opcode
>> 13) & 0x7;
13926 fmt
= (ctx
->opcode
>> 10) & 0x3;
13929 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13932 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13935 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13938 goto pool32f_invalid
;
13942 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13943 cond
= (ctx
->opcode
>> 6) & 0xf;
13944 cc
= (ctx
->opcode
>> 13) & 0x7;
13945 fmt
= (ctx
->opcode
>> 10) & 0x3;
13948 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13951 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13954 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13957 goto pool32f_invalid
;
13961 check_insn(ctx
, ISA_MIPS32R6
);
13962 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13965 check_insn(ctx
, ISA_MIPS32R6
);
13966 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13969 gen_pool32fxf(ctx
, rt
, rs
);
13973 switch ((ctx
->opcode
>> 6) & 0x7) {
13975 mips32_op
= OPC_PLL_PS
;
13978 mips32_op
= OPC_PLU_PS
;
13981 mips32_op
= OPC_PUL_PS
;
13984 mips32_op
= OPC_PUU_PS
;
13987 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13988 mips32_op
= OPC_CVT_PS_S
;
13990 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13993 goto pool32f_invalid
;
13997 check_insn(ctx
, ISA_MIPS32R6
);
13998 switch ((ctx
->opcode
>> 9) & 0x3) {
14000 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14003 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14006 goto pool32f_invalid
;
14011 switch ((ctx
->opcode
>> 6) & 0x7) {
14013 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14014 mips32_op
= OPC_LWXC1
;
14017 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14018 mips32_op
= OPC_SWXC1
;
14021 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14022 mips32_op
= OPC_LDXC1
;
14025 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14026 mips32_op
= OPC_SDXC1
;
14029 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14030 mips32_op
= OPC_LUXC1
;
14033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14034 mips32_op
= OPC_SUXC1
;
14036 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14039 goto pool32f_invalid
;
14043 check_insn(ctx
, ISA_MIPS32R6
);
14044 switch ((ctx
->opcode
>> 9) & 0x3) {
14046 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14049 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14052 goto pool32f_invalid
;
14057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14058 fmt
= (ctx
->opcode
>> 9) & 0x3;
14059 switch ((ctx
->opcode
>> 6) & 0x7) {
14063 mips32_op
= OPC_RSQRT2_S
;
14066 mips32_op
= OPC_RSQRT2_D
;
14069 mips32_op
= OPC_RSQRT2_PS
;
14072 goto pool32f_invalid
;
14078 mips32_op
= OPC_RECIP2_S
;
14081 mips32_op
= OPC_RECIP2_D
;
14084 mips32_op
= OPC_RECIP2_PS
;
14087 goto pool32f_invalid
;
14091 mips32_op
= OPC_ADDR_PS
;
14094 mips32_op
= OPC_MULR_PS
;
14096 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14099 goto pool32f_invalid
;
14103 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14104 cc
= (ctx
->opcode
>> 13) & 0x7;
14105 fmt
= (ctx
->opcode
>> 9) & 0x3;
14106 switch ((ctx
->opcode
>> 6) & 0x7) {
14107 case MOVF_FMT
: /* RINT_FMT */
14108 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14112 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14115 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14118 goto pool32f_invalid
;
14124 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14127 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14131 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14134 goto pool32f_invalid
;
14138 case MOVT_FMT
: /* CLASS_FMT */
14139 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14143 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14146 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14149 goto pool32f_invalid
;
14155 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14158 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14162 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14165 goto pool32f_invalid
;
14170 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14173 goto pool32f_invalid
;
14176 #define FINSN_3ARG_SDPS(prfx) \
14177 switch ((ctx->opcode >> 8) & 0x3) { \
14179 mips32_op = OPC_##prfx##_S; \
14182 mips32_op = OPC_##prfx##_D; \
14184 case FMT_SDPS_PS: \
14186 mips32_op = OPC_##prfx##_PS; \
14189 goto pool32f_invalid; \
14192 check_insn(ctx
, ISA_MIPS32R6
);
14193 switch ((ctx
->opcode
>> 9) & 0x3) {
14195 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14198 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14201 goto pool32f_invalid
;
14205 check_insn(ctx
, ISA_MIPS32R6
);
14206 switch ((ctx
->opcode
>> 9) & 0x3) {
14208 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14211 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14214 goto pool32f_invalid
;
14218 /* regular FP ops */
14219 switch ((ctx
->opcode
>> 6) & 0x3) {
14221 FINSN_3ARG_SDPS(ADD
);
14224 FINSN_3ARG_SDPS(SUB
);
14227 FINSN_3ARG_SDPS(MUL
);
14230 fmt
= (ctx
->opcode
>> 8) & 0x3;
14232 mips32_op
= OPC_DIV_D
;
14233 } else if (fmt
== 0) {
14234 mips32_op
= OPC_DIV_S
;
14236 goto pool32f_invalid
;
14240 goto pool32f_invalid
;
14245 switch ((ctx
->opcode
>> 6) & 0x7) {
14246 case MOVN_FMT
: /* SELNEZ_FMT */
14247 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14249 switch ((ctx
->opcode
>> 9) & 0x3) {
14251 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14254 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14257 goto pool32f_invalid
;
14261 FINSN_3ARG_SDPS(MOVN
);
14265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14266 FINSN_3ARG_SDPS(MOVN
);
14268 case MOVZ_FMT
: /* SELEQZ_FMT */
14269 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14271 switch ((ctx
->opcode
>> 9) & 0x3) {
14273 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14276 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14279 goto pool32f_invalid
;
14283 FINSN_3ARG_SDPS(MOVZ
);
14287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14288 FINSN_3ARG_SDPS(MOVZ
);
14291 check_insn(ctx
, ISA_MIPS32R6
);
14292 switch ((ctx
->opcode
>> 9) & 0x3) {
14294 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14297 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14300 goto pool32f_invalid
;
14304 check_insn(ctx
, ISA_MIPS32R6
);
14305 switch ((ctx
->opcode
>> 9) & 0x3) {
14307 mips32_op
= OPC_MADDF_S
;
14310 mips32_op
= OPC_MADDF_D
;
14313 goto pool32f_invalid
;
14317 check_insn(ctx
, ISA_MIPS32R6
);
14318 switch ((ctx
->opcode
>> 9) & 0x3) {
14320 mips32_op
= OPC_MSUBF_S
;
14323 mips32_op
= OPC_MSUBF_D
;
14326 goto pool32f_invalid
;
14330 goto pool32f_invalid
;
14334 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14338 MIPS_INVAL("pool32f");
14339 generate_exception_end(ctx
, EXCP_RI
);
14343 generate_exception_err(ctx
, EXCP_CpU
, 1);
14347 minor
= (ctx
->opcode
>> 21) & 0x1f;
14350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14351 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14355 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14356 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14361 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14364 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14365 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14368 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14369 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14370 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14373 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14374 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14375 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14379 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14382 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14383 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14387 case TLTI
: /* BC1EQZC */
14388 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14390 check_cp1_enabled(ctx
);
14391 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14394 mips32_op
= OPC_TLTI
;
14398 case TGEI
: /* BC1NEZC */
14399 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14401 check_cp1_enabled(ctx
);
14402 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14405 mips32_op
= OPC_TGEI
;
14410 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14411 mips32_op
= OPC_TLTIU
;
14414 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14415 mips32_op
= OPC_TGEIU
;
14417 case TNEI
: /* SYNCI */
14418 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14420 /* Break the TB to be able to sync copied instructions
14422 ctx
->bstate
= BS_STOP
;
14425 mips32_op
= OPC_TNEI
;
14430 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14431 mips32_op
= OPC_TEQI
;
14433 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14438 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14439 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14440 4, rs
, 0, imm
<< 1, 0);
14441 /* Compact branches don't have a delay slot, so just let
14442 the normal delay slot handling take us to the branch
14446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14447 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14450 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14451 /* Break the TB to be able to sync copied instructions
14453 ctx
->bstate
= BS_STOP
;
14457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14458 /* COP2: Not implemented. */
14459 generate_exception_err(ctx
, EXCP_CpU
, 2);
14462 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14463 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14466 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14467 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14470 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14471 mips32_op
= OPC_BC1FANY4
;
14474 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14475 mips32_op
= OPC_BC1TANY4
;
14478 check_insn(ctx
, ASE_MIPS3D
);
14481 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14482 check_cp1_enabled(ctx
);
14483 gen_compute_branch1(ctx
, mips32_op
,
14484 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14486 generate_exception_err(ctx
, EXCP_CpU
, 1);
14491 /* MIPS DSP: not implemented */
14494 MIPS_INVAL("pool32i");
14495 generate_exception_end(ctx
, EXCP_RI
);
14500 minor
= (ctx
->opcode
>> 12) & 0xf;
14501 offset
= sextract32(ctx
->opcode
, 0,
14502 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14505 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14506 mips32_op
= OPC_LWL
;
14509 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14510 mips32_op
= OPC_SWL
;
14513 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14514 mips32_op
= OPC_LWR
;
14517 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14518 mips32_op
= OPC_SWR
;
14520 #if defined(TARGET_MIPS64)
14522 check_insn(ctx
, ISA_MIPS3
);
14523 check_mips_64(ctx
);
14524 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14525 mips32_op
= OPC_LDL
;
14528 check_insn(ctx
, ISA_MIPS3
);
14529 check_mips_64(ctx
);
14530 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14531 mips32_op
= OPC_SDL
;
14534 check_insn(ctx
, ISA_MIPS3
);
14535 check_mips_64(ctx
);
14536 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14537 mips32_op
= OPC_LDR
;
14540 check_insn(ctx
, ISA_MIPS3
);
14541 check_mips_64(ctx
);
14542 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14543 mips32_op
= OPC_SDR
;
14546 check_insn(ctx
, ISA_MIPS3
);
14547 check_mips_64(ctx
);
14548 mips32_op
= OPC_LWU
;
14551 check_insn(ctx
, ISA_MIPS3
);
14552 check_mips_64(ctx
);
14553 mips32_op
= OPC_LLD
;
14557 mips32_op
= OPC_LL
;
14560 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14563 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14566 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14568 #if defined(TARGET_MIPS64)
14570 check_insn(ctx
, ISA_MIPS3
);
14571 check_mips_64(ctx
);
14572 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14576 /* Treat as no-op */
14577 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14578 /* hint codes 24-31 are reserved and signal RI */
14579 generate_exception(ctx
, EXCP_RI
);
14583 MIPS_INVAL("pool32c");
14584 generate_exception_end(ctx
, EXCP_RI
);
14588 case ADDI32
: /* AUI, LUI */
14589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14591 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14594 mips32_op
= OPC_ADDI
;
14599 mips32_op
= OPC_ADDIU
;
14601 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14604 /* Logical operations */
14606 mips32_op
= OPC_ORI
;
14609 mips32_op
= OPC_XORI
;
14612 mips32_op
= OPC_ANDI
;
14614 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14617 /* Set less than immediate */
14619 mips32_op
= OPC_SLTI
;
14622 mips32_op
= OPC_SLTIU
;
14624 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14627 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14628 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14629 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14630 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14632 case JALS32
: /* BOVC, BEQC, BEQZALC */
14633 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14636 mips32_op
= OPC_BOVC
;
14637 } else if (rs
< rt
&& rs
== 0) {
14639 mips32_op
= OPC_BEQZALC
;
14642 mips32_op
= OPC_BEQC
;
14644 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14647 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14648 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14649 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14652 case BEQ32
: /* BC */
14653 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14655 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14656 sextract32(ctx
->opcode
<< 1, 0, 27));
14659 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14662 case BNE32
: /* BALC */
14663 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14665 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14666 sextract32(ctx
->opcode
<< 1, 0, 27));
14669 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14672 case J32
: /* BGTZC, BLTZC, BLTC */
14673 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14674 if (rs
== 0 && rt
!= 0) {
14676 mips32_op
= OPC_BGTZC
;
14677 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14679 mips32_op
= OPC_BLTZC
;
14682 mips32_op
= OPC_BLTC
;
14684 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14687 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14688 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14691 case JAL32
: /* BLEZC, BGEZC, BGEC */
14692 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14693 if (rs
== 0 && rt
!= 0) {
14695 mips32_op
= OPC_BLEZC
;
14696 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14698 mips32_op
= OPC_BGEZC
;
14701 mips32_op
= OPC_BGEC
;
14703 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14706 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14707 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14708 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14711 /* Floating point (COP1) */
14713 mips32_op
= OPC_LWC1
;
14716 mips32_op
= OPC_LDC1
;
14719 mips32_op
= OPC_SWC1
;
14722 mips32_op
= OPC_SDC1
;
14724 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14726 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14727 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14728 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14729 switch ((ctx
->opcode
>> 16) & 0x1f) {
14730 case ADDIUPC_00
... ADDIUPC_07
:
14731 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14734 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14737 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14739 case LWPC_08
... LWPC_0F
:
14740 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14743 generate_exception(ctx
, EXCP_RI
);
14748 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14749 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14751 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14754 case BNVC
: /* BNEC, BNEZALC */
14755 check_insn(ctx
, ISA_MIPS32R6
);
14758 mips32_op
= OPC_BNVC
;
14759 } else if (rs
< rt
&& rs
== 0) {
14761 mips32_op
= OPC_BNEZALC
;
14764 mips32_op
= OPC_BNEC
;
14766 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14768 case R6_BNEZC
: /* JIALC */
14769 check_insn(ctx
, ISA_MIPS32R6
);
14772 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14773 sextract32(ctx
->opcode
<< 1, 0, 22));
14776 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14779 case R6_BEQZC
: /* JIC */
14780 check_insn(ctx
, ISA_MIPS32R6
);
14783 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14784 sextract32(ctx
->opcode
<< 1, 0, 22));
14787 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14790 case BLEZALC
: /* BGEZALC, BGEUC */
14791 check_insn(ctx
, ISA_MIPS32R6
);
14792 if (rs
== 0 && rt
!= 0) {
14794 mips32_op
= OPC_BLEZALC
;
14795 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14797 mips32_op
= OPC_BGEZALC
;
14800 mips32_op
= OPC_BGEUC
;
14802 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14804 case BGTZALC
: /* BLTZALC, BLTUC */
14805 check_insn(ctx
, ISA_MIPS32R6
);
14806 if (rs
== 0 && rt
!= 0) {
14808 mips32_op
= OPC_BGTZALC
;
14809 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14811 mips32_op
= OPC_BLTZALC
;
14814 mips32_op
= OPC_BLTUC
;
14816 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14818 /* Loads and stores */
14820 mips32_op
= OPC_LB
;
14823 mips32_op
= OPC_LBU
;
14826 mips32_op
= OPC_LH
;
14829 mips32_op
= OPC_LHU
;
14832 mips32_op
= OPC_LW
;
14834 #ifdef TARGET_MIPS64
14836 check_insn(ctx
, ISA_MIPS3
);
14837 check_mips_64(ctx
);
14838 mips32_op
= OPC_LD
;
14841 check_insn(ctx
, ISA_MIPS3
);
14842 check_mips_64(ctx
);
14843 mips32_op
= OPC_SD
;
14847 mips32_op
= OPC_SB
;
14850 mips32_op
= OPC_SH
;
14853 mips32_op
= OPC_SW
;
14856 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14859 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14862 generate_exception_end(ctx
, EXCP_RI
);
14867 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14871 /* make sure instructions are on a halfword boundary */
14872 if (ctx
->pc
& 0x1) {
14873 env
->CP0_BadVAddr
= ctx
->pc
;
14874 generate_exception_end(ctx
, EXCP_AdEL
);
14878 op
= (ctx
->opcode
>> 10) & 0x3f;
14879 /* Enforce properly-sized instructions in a delay slot */
14880 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14881 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14883 /* POOL32A, POOL32B, POOL32I, POOL32C */
14885 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14887 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14889 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14891 /* LB32, LH32, LWC132, LDC132, LW32 */
14892 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14893 generate_exception_end(ctx
, EXCP_RI
);
14898 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14900 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14902 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14903 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14904 generate_exception_end(ctx
, EXCP_RI
);
14914 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14915 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14916 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14919 switch (ctx
->opcode
& 0x1) {
14927 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14928 /* In the Release 6 the register number location in
14929 * the instruction encoding has changed.
14931 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14933 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14939 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14940 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14941 int amount
= (ctx
->opcode
>> 1) & 0x7;
14943 amount
= amount
== 0 ? 8 : amount
;
14945 switch (ctx
->opcode
& 0x1) {
14954 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14958 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14959 gen_pool16c_r6_insn(ctx
);
14961 gen_pool16c_insn(ctx
);
14966 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14967 int rb
= 28; /* GP */
14968 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14970 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14974 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14975 if (ctx
->opcode
& 1) {
14976 generate_exception_end(ctx
, EXCP_RI
);
14979 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14980 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14981 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14982 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14987 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14988 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14989 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14990 offset
= (offset
== 0xf ? -1 : offset
);
14992 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14997 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14998 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14999 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15001 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15006 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15007 int rb
= 29; /* SP */
15008 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15010 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15015 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15016 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15017 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15019 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15024 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15025 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15026 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15028 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15033 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15034 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15035 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15037 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15042 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15043 int rb
= 29; /* SP */
15044 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15046 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15051 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15052 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15053 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15055 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15060 int rd
= uMIPS_RD5(ctx
->opcode
);
15061 int rs
= uMIPS_RS5(ctx
->opcode
);
15063 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15070 switch (ctx
->opcode
& 0x1) {
15080 switch (ctx
->opcode
& 0x1) {
15085 gen_addiur1sp(ctx
);
15089 case B16
: /* BC16 */
15090 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15091 sextract32(ctx
->opcode
, 0, 10) << 1,
15092 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15094 case BNEZ16
: /* BNEZC16 */
15095 case BEQZ16
: /* BEQZC16 */
15096 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15097 mmreg(uMIPS_RD(ctx
->opcode
)),
15098 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15099 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15104 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15105 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15107 imm
= (imm
== 0x7f ? -1 : imm
);
15108 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15114 generate_exception_end(ctx
, EXCP_RI
);
15117 decode_micromips32_opc(env
, ctx
);
15124 /* SmartMIPS extension to MIPS32 */
15126 #if defined(TARGET_MIPS64)
15128 /* MDMX extension to MIPS64 */
15132 /* MIPSDSP functions. */
15133 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15134 int rd
, int base
, int offset
)
15139 t0
= tcg_temp_new();
15142 gen_load_gpr(t0
, offset
);
15143 } else if (offset
== 0) {
15144 gen_load_gpr(t0
, base
);
15146 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15151 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15152 gen_store_gpr(t0
, rd
);
15155 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15156 gen_store_gpr(t0
, rd
);
15159 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15160 gen_store_gpr(t0
, rd
);
15162 #if defined(TARGET_MIPS64)
15164 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15165 gen_store_gpr(t0
, rd
);
15172 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15173 int ret
, int v1
, int v2
)
15179 /* Treat as NOP. */
15183 v1_t
= tcg_temp_new();
15184 v2_t
= tcg_temp_new();
15186 gen_load_gpr(v1_t
, v1
);
15187 gen_load_gpr(v2_t
, v2
);
15190 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15191 case OPC_MULT_G_2E
:
15195 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15197 case OPC_ADDUH_R_QB
:
15198 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15201 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15203 case OPC_ADDQH_R_PH
:
15204 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15207 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15209 case OPC_ADDQH_R_W
:
15210 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15213 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15215 case OPC_SUBUH_R_QB
:
15216 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15219 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15221 case OPC_SUBQH_R_PH
:
15222 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15225 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15227 case OPC_SUBQH_R_W
:
15228 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15232 case OPC_ABSQ_S_PH_DSP
:
15234 case OPC_ABSQ_S_QB
:
15236 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15238 case OPC_ABSQ_S_PH
:
15240 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15244 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15246 case OPC_PRECEQ_W_PHL
:
15248 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15249 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15251 case OPC_PRECEQ_W_PHR
:
15253 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15254 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15255 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15257 case OPC_PRECEQU_PH_QBL
:
15259 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15261 case OPC_PRECEQU_PH_QBR
:
15263 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15265 case OPC_PRECEQU_PH_QBLA
:
15267 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15269 case OPC_PRECEQU_PH_QBRA
:
15271 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15273 case OPC_PRECEU_PH_QBL
:
15275 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15277 case OPC_PRECEU_PH_QBR
:
15279 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15281 case OPC_PRECEU_PH_QBLA
:
15283 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15285 case OPC_PRECEU_PH_QBRA
:
15287 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15291 case OPC_ADDU_QB_DSP
:
15295 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15297 case OPC_ADDQ_S_PH
:
15299 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15303 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15307 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15309 case OPC_ADDU_S_QB
:
15311 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15315 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15317 case OPC_ADDU_S_PH
:
15319 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15323 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15325 case OPC_SUBQ_S_PH
:
15327 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15331 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15335 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15337 case OPC_SUBU_S_QB
:
15339 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15343 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15345 case OPC_SUBU_S_PH
:
15347 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15351 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15355 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15359 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15361 case OPC_RADDU_W_QB
:
15363 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15367 case OPC_CMPU_EQ_QB_DSP
:
15369 case OPC_PRECR_QB_PH
:
15371 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15373 case OPC_PRECRQ_QB_PH
:
15375 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15377 case OPC_PRECR_SRA_PH_W
:
15380 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15381 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15383 tcg_temp_free_i32(sa_t
);
15386 case OPC_PRECR_SRA_R_PH_W
:
15389 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15390 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15392 tcg_temp_free_i32(sa_t
);
15395 case OPC_PRECRQ_PH_W
:
15397 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15399 case OPC_PRECRQ_RS_PH_W
:
15401 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15403 case OPC_PRECRQU_S_QB_PH
:
15405 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15409 #ifdef TARGET_MIPS64
15410 case OPC_ABSQ_S_QH_DSP
:
15412 case OPC_PRECEQ_L_PWL
:
15414 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15416 case OPC_PRECEQ_L_PWR
:
15418 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15420 case OPC_PRECEQ_PW_QHL
:
15422 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15424 case OPC_PRECEQ_PW_QHR
:
15426 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15428 case OPC_PRECEQ_PW_QHLA
:
15430 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15432 case OPC_PRECEQ_PW_QHRA
:
15434 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15436 case OPC_PRECEQU_QH_OBL
:
15438 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15440 case OPC_PRECEQU_QH_OBR
:
15442 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15444 case OPC_PRECEQU_QH_OBLA
:
15446 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15448 case OPC_PRECEQU_QH_OBRA
:
15450 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15452 case OPC_PRECEU_QH_OBL
:
15454 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15456 case OPC_PRECEU_QH_OBR
:
15458 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15460 case OPC_PRECEU_QH_OBLA
:
15462 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15464 case OPC_PRECEU_QH_OBRA
:
15466 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15468 case OPC_ABSQ_S_OB
:
15470 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15472 case OPC_ABSQ_S_PW
:
15474 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15476 case OPC_ABSQ_S_QH
:
15478 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15482 case OPC_ADDU_OB_DSP
:
15484 case OPC_RADDU_L_OB
:
15486 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15490 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15492 case OPC_SUBQ_S_PW
:
15494 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15498 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15500 case OPC_SUBQ_S_QH
:
15502 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15506 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15508 case OPC_SUBU_S_OB
:
15510 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15514 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15516 case OPC_SUBU_S_QH
:
15518 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15522 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15524 case OPC_SUBUH_R_OB
:
15526 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15530 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15532 case OPC_ADDQ_S_PW
:
15534 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15538 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15540 case OPC_ADDQ_S_QH
:
15542 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15546 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15548 case OPC_ADDU_S_OB
:
15550 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15554 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15556 case OPC_ADDU_S_QH
:
15558 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15562 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15564 case OPC_ADDUH_R_OB
:
15566 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15570 case OPC_CMPU_EQ_OB_DSP
:
15572 case OPC_PRECR_OB_QH
:
15574 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15576 case OPC_PRECR_SRA_QH_PW
:
15579 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15580 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15581 tcg_temp_free_i32(ret_t
);
15584 case OPC_PRECR_SRA_R_QH_PW
:
15587 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15588 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15589 tcg_temp_free_i32(sa_v
);
15592 case OPC_PRECRQ_OB_QH
:
15594 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15596 case OPC_PRECRQ_PW_L
:
15598 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15600 case OPC_PRECRQ_QH_PW
:
15602 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15604 case OPC_PRECRQ_RS_QH_PW
:
15606 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15608 case OPC_PRECRQU_S_OB_QH
:
15610 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15617 tcg_temp_free(v1_t
);
15618 tcg_temp_free(v2_t
);
15621 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15622 int ret
, int v1
, int v2
)
15630 /* Treat as NOP. */
15634 t0
= tcg_temp_new();
15635 v1_t
= tcg_temp_new();
15636 v2_t
= tcg_temp_new();
15638 tcg_gen_movi_tl(t0
, v1
);
15639 gen_load_gpr(v1_t
, v1
);
15640 gen_load_gpr(v2_t
, v2
);
15643 case OPC_SHLL_QB_DSP
:
15645 op2
= MASK_SHLL_QB(ctx
->opcode
);
15649 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15653 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15657 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15661 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15663 case OPC_SHLL_S_PH
:
15665 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15667 case OPC_SHLLV_S_PH
:
15669 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15673 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15675 case OPC_SHLLV_S_W
:
15677 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15681 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15685 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15689 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15693 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15697 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15699 case OPC_SHRA_R_QB
:
15701 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15705 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15707 case OPC_SHRAV_R_QB
:
15709 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15713 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15715 case OPC_SHRA_R_PH
:
15717 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15721 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15723 case OPC_SHRAV_R_PH
:
15725 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15729 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15731 case OPC_SHRAV_R_W
:
15733 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15735 default: /* Invalid */
15736 MIPS_INVAL("MASK SHLL.QB");
15737 generate_exception_end(ctx
, EXCP_RI
);
15742 #ifdef TARGET_MIPS64
15743 case OPC_SHLL_OB_DSP
:
15744 op2
= MASK_SHLL_OB(ctx
->opcode
);
15748 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15752 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15754 case OPC_SHLL_S_PW
:
15756 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15758 case OPC_SHLLV_S_PW
:
15760 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15764 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15768 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15772 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15776 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15778 case OPC_SHLL_S_QH
:
15780 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15782 case OPC_SHLLV_S_QH
:
15784 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15788 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15792 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15794 case OPC_SHRA_R_OB
:
15796 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15798 case OPC_SHRAV_R_OB
:
15800 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15804 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15808 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15810 case OPC_SHRA_R_PW
:
15812 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15814 case OPC_SHRAV_R_PW
:
15816 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15820 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15824 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15826 case OPC_SHRA_R_QH
:
15828 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15830 case OPC_SHRAV_R_QH
:
15832 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15836 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15840 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15844 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15848 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15850 default: /* Invalid */
15851 MIPS_INVAL("MASK SHLL.OB");
15852 generate_exception_end(ctx
, EXCP_RI
);
15860 tcg_temp_free(v1_t
);
15861 tcg_temp_free(v2_t
);
15864 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15865 int ret
, int v1
, int v2
, int check_ret
)
15871 if ((ret
== 0) && (check_ret
== 1)) {
15872 /* Treat as NOP. */
15876 t0
= tcg_temp_new_i32();
15877 v1_t
= tcg_temp_new();
15878 v2_t
= tcg_temp_new();
15880 tcg_gen_movi_i32(t0
, ret
);
15881 gen_load_gpr(v1_t
, v1
);
15882 gen_load_gpr(v2_t
, v2
);
15885 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15886 * the same mask and op1. */
15887 case OPC_MULT_G_2E
:
15891 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15894 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15897 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15899 case OPC_MULQ_RS_W
:
15900 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15904 case OPC_DPA_W_PH_DSP
:
15906 case OPC_DPAU_H_QBL
:
15908 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15910 case OPC_DPAU_H_QBR
:
15912 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15914 case OPC_DPSU_H_QBL
:
15916 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15918 case OPC_DPSU_H_QBR
:
15920 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15924 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15926 case OPC_DPAX_W_PH
:
15928 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15930 case OPC_DPAQ_S_W_PH
:
15932 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15934 case OPC_DPAQX_S_W_PH
:
15936 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15938 case OPC_DPAQX_SA_W_PH
:
15940 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15944 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15946 case OPC_DPSX_W_PH
:
15948 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15950 case OPC_DPSQ_S_W_PH
:
15952 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15954 case OPC_DPSQX_S_W_PH
:
15956 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15958 case OPC_DPSQX_SA_W_PH
:
15960 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15962 case OPC_MULSAQ_S_W_PH
:
15964 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15966 case OPC_DPAQ_SA_L_W
:
15968 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15970 case OPC_DPSQ_SA_L_W
:
15972 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15974 case OPC_MAQ_S_W_PHL
:
15976 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15978 case OPC_MAQ_S_W_PHR
:
15980 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15982 case OPC_MAQ_SA_W_PHL
:
15984 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15986 case OPC_MAQ_SA_W_PHR
:
15988 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15990 case OPC_MULSA_W_PH
:
15992 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15996 #ifdef TARGET_MIPS64
15997 case OPC_DPAQ_W_QH_DSP
:
15999 int ac
= ret
& 0x03;
16000 tcg_gen_movi_i32(t0
, ac
);
16005 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16009 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16013 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16017 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16021 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16023 case OPC_DPAQ_S_W_QH
:
16025 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16027 case OPC_DPAQ_SA_L_PW
:
16029 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16031 case OPC_DPAU_H_OBL
:
16033 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16035 case OPC_DPAU_H_OBR
:
16037 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16041 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16043 case OPC_DPSQ_S_W_QH
:
16045 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16047 case OPC_DPSQ_SA_L_PW
:
16049 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16051 case OPC_DPSU_H_OBL
:
16053 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16055 case OPC_DPSU_H_OBR
:
16057 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16059 case OPC_MAQ_S_L_PWL
:
16061 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16063 case OPC_MAQ_S_L_PWR
:
16065 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16067 case OPC_MAQ_S_W_QHLL
:
16069 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16071 case OPC_MAQ_SA_W_QHLL
:
16073 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16075 case OPC_MAQ_S_W_QHLR
:
16077 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16079 case OPC_MAQ_SA_W_QHLR
:
16081 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16083 case OPC_MAQ_S_W_QHRL
:
16085 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16087 case OPC_MAQ_SA_W_QHRL
:
16089 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16091 case OPC_MAQ_S_W_QHRR
:
16093 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16095 case OPC_MAQ_SA_W_QHRR
:
16097 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16099 case OPC_MULSAQ_S_L_PW
:
16101 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16103 case OPC_MULSAQ_S_W_QH
:
16105 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16111 case OPC_ADDU_QB_DSP
:
16113 case OPC_MULEU_S_PH_QBL
:
16115 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16117 case OPC_MULEU_S_PH_QBR
:
16119 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16121 case OPC_MULQ_RS_PH
:
16123 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16125 case OPC_MULEQ_S_W_PHL
:
16127 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16129 case OPC_MULEQ_S_W_PHR
:
16131 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16133 case OPC_MULQ_S_PH
:
16135 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16139 #ifdef TARGET_MIPS64
16140 case OPC_ADDU_OB_DSP
:
16142 case OPC_MULEQ_S_PW_QHL
:
16144 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16146 case OPC_MULEQ_S_PW_QHR
:
16148 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16150 case OPC_MULEU_S_QH_OBL
:
16152 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16154 case OPC_MULEU_S_QH_OBR
:
16156 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16158 case OPC_MULQ_RS_QH
:
16160 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16167 tcg_temp_free_i32(t0
);
16168 tcg_temp_free(v1_t
);
16169 tcg_temp_free(v2_t
);
16172 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16180 /* Treat as NOP. */
16184 t0
= tcg_temp_new();
16185 val_t
= tcg_temp_new();
16186 gen_load_gpr(val_t
, val
);
16189 case OPC_ABSQ_S_PH_DSP
:
16193 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16198 target_long result
;
16199 imm
= (ctx
->opcode
>> 16) & 0xFF;
16200 result
= (uint32_t)imm
<< 24 |
16201 (uint32_t)imm
<< 16 |
16202 (uint32_t)imm
<< 8 |
16204 result
= (int32_t)result
;
16205 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16210 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16211 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16212 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16213 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16214 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16215 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16220 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16221 imm
= (int16_t)(imm
<< 6) >> 6;
16222 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16223 (target_long
)((int32_t)imm
<< 16 | \
16229 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16230 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16231 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16232 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16236 #ifdef TARGET_MIPS64
16237 case OPC_ABSQ_S_QH_DSP
:
16244 imm
= (ctx
->opcode
>> 16) & 0xFF;
16245 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16246 temp
= (temp
<< 16) | temp
;
16247 temp
= (temp
<< 32) | temp
;
16248 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16256 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16257 imm
= (int16_t)(imm
<< 6) >> 6;
16258 temp
= ((target_long
)imm
<< 32) \
16259 | ((target_long
)imm
& 0xFFFFFFFF);
16260 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16268 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16269 imm
= (int16_t)(imm
<< 6) >> 6;
16271 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16272 ((uint64_t)(uint16_t)imm
<< 32) |
16273 ((uint64_t)(uint16_t)imm
<< 16) |
16274 (uint64_t)(uint16_t)imm
;
16275 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16280 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16281 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16282 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16283 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16284 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16285 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16286 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16290 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16291 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16292 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16296 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16297 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16298 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16299 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16300 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16307 tcg_temp_free(val_t
);
16310 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16311 uint32_t op1
, uint32_t op2
,
16312 int ret
, int v1
, int v2
, int check_ret
)
16318 if ((ret
== 0) && (check_ret
== 1)) {
16319 /* Treat as NOP. */
16323 t1
= tcg_temp_new();
16324 v1_t
= tcg_temp_new();
16325 v2_t
= tcg_temp_new();
16327 gen_load_gpr(v1_t
, v1
);
16328 gen_load_gpr(v2_t
, v2
);
16331 case OPC_CMPU_EQ_QB_DSP
:
16333 case OPC_CMPU_EQ_QB
:
16335 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16337 case OPC_CMPU_LT_QB
:
16339 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16341 case OPC_CMPU_LE_QB
:
16343 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16345 case OPC_CMPGU_EQ_QB
:
16347 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16349 case OPC_CMPGU_LT_QB
:
16351 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16353 case OPC_CMPGU_LE_QB
:
16355 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16357 case OPC_CMPGDU_EQ_QB
:
16359 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16360 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16361 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16362 tcg_gen_shli_tl(t1
, t1
, 24);
16363 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16365 case OPC_CMPGDU_LT_QB
:
16367 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16368 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16369 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16370 tcg_gen_shli_tl(t1
, t1
, 24);
16371 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16373 case OPC_CMPGDU_LE_QB
:
16375 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16376 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16377 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16378 tcg_gen_shli_tl(t1
, t1
, 24);
16379 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16381 case OPC_CMP_EQ_PH
:
16383 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16385 case OPC_CMP_LT_PH
:
16387 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16389 case OPC_CMP_LE_PH
:
16391 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16395 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16399 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16401 case OPC_PACKRL_PH
:
16403 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16407 #ifdef TARGET_MIPS64
16408 case OPC_CMPU_EQ_OB_DSP
:
16410 case OPC_CMP_EQ_PW
:
16412 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16414 case OPC_CMP_LT_PW
:
16416 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16418 case OPC_CMP_LE_PW
:
16420 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16422 case OPC_CMP_EQ_QH
:
16424 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16426 case OPC_CMP_LT_QH
:
16428 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16430 case OPC_CMP_LE_QH
:
16432 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16434 case OPC_CMPGDU_EQ_OB
:
16436 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16438 case OPC_CMPGDU_LT_OB
:
16440 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16442 case OPC_CMPGDU_LE_OB
:
16444 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16446 case OPC_CMPGU_EQ_OB
:
16448 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16450 case OPC_CMPGU_LT_OB
:
16452 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16454 case OPC_CMPGU_LE_OB
:
16456 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16458 case OPC_CMPU_EQ_OB
:
16460 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16462 case OPC_CMPU_LT_OB
:
16464 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16466 case OPC_CMPU_LE_OB
:
16468 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16470 case OPC_PACKRL_PW
:
16472 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16476 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16480 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16484 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16492 tcg_temp_free(v1_t
);
16493 tcg_temp_free(v2_t
);
16496 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16497 uint32_t op1
, int rt
, int rs
, int sa
)
16504 /* Treat as NOP. */
16508 t0
= tcg_temp_new();
16509 gen_load_gpr(t0
, rs
);
16512 case OPC_APPEND_DSP
:
16513 switch (MASK_APPEND(ctx
->opcode
)) {
16516 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16518 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16522 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16523 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16524 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16525 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16527 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16531 if (sa
!= 0 && sa
!= 2) {
16532 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16533 tcg_gen_ext32u_tl(t0
, t0
);
16534 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16535 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16537 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16539 default: /* Invalid */
16540 MIPS_INVAL("MASK APPEND");
16541 generate_exception_end(ctx
, EXCP_RI
);
16545 #ifdef TARGET_MIPS64
16546 case OPC_DAPPEND_DSP
:
16547 switch (MASK_DAPPEND(ctx
->opcode
)) {
16550 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16554 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16555 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16556 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16560 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16561 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16562 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16567 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16568 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16569 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16570 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16573 default: /* Invalid */
16574 MIPS_INVAL("MASK DAPPEND");
16575 generate_exception_end(ctx
, EXCP_RI
);
16584 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16585 int ret
, int v1
, int v2
, int check_ret
)
16594 if ((ret
== 0) && (check_ret
== 1)) {
16595 /* Treat as NOP. */
16599 t0
= tcg_temp_new();
16600 t1
= tcg_temp_new();
16601 v1_t
= tcg_temp_new();
16602 v2_t
= tcg_temp_new();
16604 gen_load_gpr(v1_t
, v1
);
16605 gen_load_gpr(v2_t
, v2
);
16608 case OPC_EXTR_W_DSP
:
16612 tcg_gen_movi_tl(t0
, v2
);
16613 tcg_gen_movi_tl(t1
, v1
);
16614 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16617 tcg_gen_movi_tl(t0
, v2
);
16618 tcg_gen_movi_tl(t1
, v1
);
16619 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16621 case OPC_EXTR_RS_W
:
16622 tcg_gen_movi_tl(t0
, v2
);
16623 tcg_gen_movi_tl(t1
, v1
);
16624 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16627 tcg_gen_movi_tl(t0
, v2
);
16628 tcg_gen_movi_tl(t1
, v1
);
16629 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16631 case OPC_EXTRV_S_H
:
16632 tcg_gen_movi_tl(t0
, v2
);
16633 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16636 tcg_gen_movi_tl(t0
, v2
);
16637 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16639 case OPC_EXTRV_R_W
:
16640 tcg_gen_movi_tl(t0
, v2
);
16641 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16643 case OPC_EXTRV_RS_W
:
16644 tcg_gen_movi_tl(t0
, v2
);
16645 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16648 tcg_gen_movi_tl(t0
, v2
);
16649 tcg_gen_movi_tl(t1
, v1
);
16650 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16653 tcg_gen_movi_tl(t0
, v2
);
16654 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16657 tcg_gen_movi_tl(t0
, v2
);
16658 tcg_gen_movi_tl(t1
, v1
);
16659 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16662 tcg_gen_movi_tl(t0
, v2
);
16663 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16666 imm
= (ctx
->opcode
>> 20) & 0x3F;
16667 tcg_gen_movi_tl(t0
, ret
);
16668 tcg_gen_movi_tl(t1
, imm
);
16669 gen_helper_shilo(t0
, t1
, cpu_env
);
16672 tcg_gen_movi_tl(t0
, ret
);
16673 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16676 tcg_gen_movi_tl(t0
, ret
);
16677 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16680 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16681 tcg_gen_movi_tl(t0
, imm
);
16682 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16685 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16686 tcg_gen_movi_tl(t0
, imm
);
16687 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16691 #ifdef TARGET_MIPS64
16692 case OPC_DEXTR_W_DSP
:
16696 tcg_gen_movi_tl(t0
, ret
);
16697 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16701 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16702 int ac
= (ctx
->opcode
>> 11) & 0x03;
16703 tcg_gen_movi_tl(t0
, shift
);
16704 tcg_gen_movi_tl(t1
, ac
);
16705 gen_helper_dshilo(t0
, t1
, cpu_env
);
16710 int ac
= (ctx
->opcode
>> 11) & 0x03;
16711 tcg_gen_movi_tl(t0
, ac
);
16712 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16716 tcg_gen_movi_tl(t0
, v2
);
16717 tcg_gen_movi_tl(t1
, v1
);
16719 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16722 tcg_gen_movi_tl(t0
, v2
);
16723 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16726 tcg_gen_movi_tl(t0
, v2
);
16727 tcg_gen_movi_tl(t1
, v1
);
16728 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16731 tcg_gen_movi_tl(t0
, v2
);
16732 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16735 tcg_gen_movi_tl(t0
, v2
);
16736 tcg_gen_movi_tl(t1
, v1
);
16737 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16739 case OPC_DEXTR_R_L
:
16740 tcg_gen_movi_tl(t0
, v2
);
16741 tcg_gen_movi_tl(t1
, v1
);
16742 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16744 case OPC_DEXTR_RS_L
:
16745 tcg_gen_movi_tl(t0
, v2
);
16746 tcg_gen_movi_tl(t1
, v1
);
16747 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16750 tcg_gen_movi_tl(t0
, v2
);
16751 tcg_gen_movi_tl(t1
, v1
);
16752 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16754 case OPC_DEXTR_R_W
:
16755 tcg_gen_movi_tl(t0
, v2
);
16756 tcg_gen_movi_tl(t1
, v1
);
16757 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16759 case OPC_DEXTR_RS_W
:
16760 tcg_gen_movi_tl(t0
, v2
);
16761 tcg_gen_movi_tl(t1
, v1
);
16762 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16764 case OPC_DEXTR_S_H
:
16765 tcg_gen_movi_tl(t0
, v2
);
16766 tcg_gen_movi_tl(t1
, v1
);
16767 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16769 case OPC_DEXTRV_S_H
:
16770 tcg_gen_movi_tl(t0
, v2
);
16771 tcg_gen_movi_tl(t1
, v1
);
16772 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16775 tcg_gen_movi_tl(t0
, v2
);
16776 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16778 case OPC_DEXTRV_R_L
:
16779 tcg_gen_movi_tl(t0
, v2
);
16780 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16782 case OPC_DEXTRV_RS_L
:
16783 tcg_gen_movi_tl(t0
, v2
);
16784 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16787 tcg_gen_movi_tl(t0
, v2
);
16788 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16790 case OPC_DEXTRV_R_W
:
16791 tcg_gen_movi_tl(t0
, v2
);
16792 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16794 case OPC_DEXTRV_RS_W
:
16795 tcg_gen_movi_tl(t0
, v2
);
16796 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16805 tcg_temp_free(v1_t
);
16806 tcg_temp_free(v2_t
);
16809 /* End MIPSDSP functions. */
16811 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16813 int rs
, rt
, rd
, sa
;
16816 rs
= (ctx
->opcode
>> 21) & 0x1f;
16817 rt
= (ctx
->opcode
>> 16) & 0x1f;
16818 rd
= (ctx
->opcode
>> 11) & 0x1f;
16819 sa
= (ctx
->opcode
>> 6) & 0x1f;
16821 op1
= MASK_SPECIAL(ctx
->opcode
);
16824 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16826 case OPC_MULT
... OPC_DIVU
:
16827 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16837 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16840 MIPS_INVAL("special_r6 muldiv");
16841 generate_exception_end(ctx
, EXCP_RI
);
16847 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16851 if (rt
== 0 && sa
== 1) {
16852 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16853 We need additionally to check other fields */
16854 gen_cl(ctx
, op1
, rd
, rs
);
16856 generate_exception_end(ctx
, EXCP_RI
);
16860 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16861 gen_helper_do_semihosting(cpu_env
);
16863 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16864 generate_exception_end(ctx
, EXCP_RI
);
16866 generate_exception_end(ctx
, EXCP_DBp
);
16870 #if defined(TARGET_MIPS64)
16872 check_mips_64(ctx
);
16873 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16877 if (rt
== 0 && sa
== 1) {
16878 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16879 We need additionally to check other fields */
16880 check_mips_64(ctx
);
16881 gen_cl(ctx
, op1
, rd
, rs
);
16883 generate_exception_end(ctx
, EXCP_RI
);
16886 case OPC_DMULT
... OPC_DDIVU
:
16887 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16897 check_mips_64(ctx
);
16898 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16901 MIPS_INVAL("special_r6 muldiv");
16902 generate_exception_end(ctx
, EXCP_RI
);
16907 default: /* Invalid */
16908 MIPS_INVAL("special_r6");
16909 generate_exception_end(ctx
, EXCP_RI
);
16914 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16916 int rs
, rt
, rd
, sa
;
16919 rs
= (ctx
->opcode
>> 21) & 0x1f;
16920 rt
= (ctx
->opcode
>> 16) & 0x1f;
16921 rd
= (ctx
->opcode
>> 11) & 0x1f;
16922 sa
= (ctx
->opcode
>> 6) & 0x1f;
16924 op1
= MASK_SPECIAL(ctx
->opcode
);
16926 case OPC_MOVN
: /* Conditional move */
16928 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16929 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16930 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16932 case OPC_MFHI
: /* Move from HI/LO */
16934 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16937 case OPC_MTLO
: /* Move to HI/LO */
16938 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16941 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16942 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16943 check_cp1_enabled(ctx
);
16944 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16945 (ctx
->opcode
>> 16) & 1);
16947 generate_exception_err(ctx
, EXCP_CpU
, 1);
16953 check_insn(ctx
, INSN_VR54XX
);
16954 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16955 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16957 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16962 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16964 #if defined(TARGET_MIPS64)
16965 case OPC_DMULT
... OPC_DDIVU
:
16966 check_insn(ctx
, ISA_MIPS3
);
16967 check_mips_64(ctx
);
16968 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16972 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16975 #ifdef MIPS_STRICT_STANDARD
16976 MIPS_INVAL("SPIM");
16977 generate_exception_end(ctx
, EXCP_RI
);
16979 /* Implemented as RI exception for now. */
16980 MIPS_INVAL("spim (unofficial)");
16981 generate_exception_end(ctx
, EXCP_RI
);
16984 default: /* Invalid */
16985 MIPS_INVAL("special_legacy");
16986 generate_exception_end(ctx
, EXCP_RI
);
16991 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16993 int rs
, rt
, rd
, sa
;
16996 rs
= (ctx
->opcode
>> 21) & 0x1f;
16997 rt
= (ctx
->opcode
>> 16) & 0x1f;
16998 rd
= (ctx
->opcode
>> 11) & 0x1f;
16999 sa
= (ctx
->opcode
>> 6) & 0x1f;
17001 op1
= MASK_SPECIAL(ctx
->opcode
);
17003 case OPC_SLL
: /* Shift with immediate */
17004 if (sa
== 5 && rd
== 0 &&
17005 rs
== 0 && rt
== 0) { /* PAUSE */
17006 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17007 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17008 generate_exception_end(ctx
, EXCP_RI
);
17014 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17017 switch ((ctx
->opcode
>> 21) & 0x1f) {
17019 /* rotr is decoded as srl on non-R2 CPUs */
17020 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17025 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17028 generate_exception_end(ctx
, EXCP_RI
);
17032 case OPC_ADD
... OPC_SUBU
:
17033 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17035 case OPC_SLLV
: /* Shifts */
17037 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17040 switch ((ctx
->opcode
>> 6) & 0x1f) {
17042 /* rotrv is decoded as srlv on non-R2 CPUs */
17043 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17048 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17051 generate_exception_end(ctx
, EXCP_RI
);
17055 case OPC_SLT
: /* Set on less than */
17057 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17059 case OPC_AND
: /* Logic*/
17063 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17066 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17068 case OPC_TGE
... OPC_TEQ
: /* Traps */
17070 check_insn(ctx
, ISA_MIPS2
);
17071 gen_trap(ctx
, op1
, rs
, rt
, -1);
17073 case OPC_LSA
: /* OPC_PMON */
17074 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17075 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17076 decode_opc_special_r6(env
, ctx
);
17078 /* Pmon entry point, also R4010 selsl */
17079 #ifdef MIPS_STRICT_STANDARD
17080 MIPS_INVAL("PMON / selsl");
17081 generate_exception_end(ctx
, EXCP_RI
);
17083 gen_helper_0e0i(pmon
, sa
);
17088 generate_exception_end(ctx
, EXCP_SYSCALL
);
17091 generate_exception_end(ctx
, EXCP_BREAK
);
17094 check_insn(ctx
, ISA_MIPS2
);
17095 /* Treat as NOP. */
17098 #if defined(TARGET_MIPS64)
17099 /* MIPS64 specific opcodes */
17104 check_insn(ctx
, ISA_MIPS3
);
17105 check_mips_64(ctx
);
17106 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17109 switch ((ctx
->opcode
>> 21) & 0x1f) {
17111 /* drotr is decoded as dsrl on non-R2 CPUs */
17112 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17117 check_insn(ctx
, ISA_MIPS3
);
17118 check_mips_64(ctx
);
17119 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17122 generate_exception_end(ctx
, EXCP_RI
);
17127 switch ((ctx
->opcode
>> 21) & 0x1f) {
17129 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17130 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17135 check_insn(ctx
, ISA_MIPS3
);
17136 check_mips_64(ctx
);
17137 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17140 generate_exception_end(ctx
, EXCP_RI
);
17144 case OPC_DADD
... OPC_DSUBU
:
17145 check_insn(ctx
, ISA_MIPS3
);
17146 check_mips_64(ctx
);
17147 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17151 check_insn(ctx
, ISA_MIPS3
);
17152 check_mips_64(ctx
);
17153 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17156 switch ((ctx
->opcode
>> 6) & 0x1f) {
17158 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17159 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17164 check_insn(ctx
, ISA_MIPS3
);
17165 check_mips_64(ctx
);
17166 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17169 generate_exception_end(ctx
, EXCP_RI
);
17174 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17175 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17176 decode_opc_special_r6(env
, ctx
);
17181 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17182 decode_opc_special_r6(env
, ctx
);
17184 decode_opc_special_legacy(env
, ctx
);
17189 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17196 rs
= (ctx
->opcode
>> 21) & 0x1f;
17197 rt
= (ctx
->opcode
>> 16) & 0x1f;
17198 rd
= (ctx
->opcode
>> 11) & 0x1f;
17200 op1
= MASK_SPECIAL2(ctx
->opcode
);
17202 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17203 case OPC_MSUB
... OPC_MSUBU
:
17204 check_insn(ctx
, ISA_MIPS32
);
17205 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17208 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17211 case OPC_DIVU_G_2F
:
17212 case OPC_MULT_G_2F
:
17213 case OPC_MULTU_G_2F
:
17215 case OPC_MODU_G_2F
:
17216 check_insn(ctx
, INSN_LOONGSON2F
);
17217 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17221 check_insn(ctx
, ISA_MIPS32
);
17222 gen_cl(ctx
, op1
, rd
, rs
);
17225 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17226 gen_helper_do_semihosting(cpu_env
);
17228 /* XXX: not clear which exception should be raised
17229 * when in debug mode...
17231 check_insn(ctx
, ISA_MIPS32
);
17232 generate_exception_end(ctx
, EXCP_DBp
);
17235 #if defined(TARGET_MIPS64)
17238 check_insn(ctx
, ISA_MIPS64
);
17239 check_mips_64(ctx
);
17240 gen_cl(ctx
, op1
, rd
, rs
);
17242 case OPC_DMULT_G_2F
:
17243 case OPC_DMULTU_G_2F
:
17244 case OPC_DDIV_G_2F
:
17245 case OPC_DDIVU_G_2F
:
17246 case OPC_DMOD_G_2F
:
17247 case OPC_DMODU_G_2F
:
17248 check_insn(ctx
, INSN_LOONGSON2F
);
17249 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17252 default: /* Invalid */
17253 MIPS_INVAL("special2_legacy");
17254 generate_exception_end(ctx
, EXCP_RI
);
17259 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17261 int rs
, rt
, rd
, sa
;
17265 rs
= (ctx
->opcode
>> 21) & 0x1f;
17266 rt
= (ctx
->opcode
>> 16) & 0x1f;
17267 rd
= (ctx
->opcode
>> 11) & 0x1f;
17268 sa
= (ctx
->opcode
>> 6) & 0x1f;
17269 imm
= (int16_t)ctx
->opcode
>> 7;
17271 op1
= MASK_SPECIAL3(ctx
->opcode
);
17275 /* hint codes 24-31 are reserved and signal RI */
17276 generate_exception_end(ctx
, EXCP_RI
);
17278 /* Treat as NOP. */
17281 check_cp0_enabled(ctx
);
17282 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17283 gen_cache_operation(ctx
, rt
, rs
, imm
);
17287 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17290 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17295 /* Treat as NOP. */
17298 op2
= MASK_BSHFL(ctx
->opcode
);
17300 case OPC_ALIGN
... OPC_ALIGN_END
:
17301 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17304 gen_bitswap(ctx
, op2
, rd
, rt
);
17309 #if defined(TARGET_MIPS64)
17311 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17314 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17317 check_mips_64(ctx
);
17320 /* Treat as NOP. */
17323 op2
= MASK_DBSHFL(ctx
->opcode
);
17325 case OPC_DALIGN
... OPC_DALIGN_END
:
17326 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17329 gen_bitswap(ctx
, op2
, rd
, rt
);
17336 default: /* Invalid */
17337 MIPS_INVAL("special3_r6");
17338 generate_exception_end(ctx
, EXCP_RI
);
17343 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17348 rs
= (ctx
->opcode
>> 21) & 0x1f;
17349 rt
= (ctx
->opcode
>> 16) & 0x1f;
17350 rd
= (ctx
->opcode
>> 11) & 0x1f;
17352 op1
= MASK_SPECIAL3(ctx
->opcode
);
17354 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17355 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17356 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17357 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17358 * the same mask and op1. */
17359 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17360 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17363 case OPC_ADDUH_R_QB
:
17365 case OPC_ADDQH_R_PH
:
17367 case OPC_ADDQH_R_W
:
17369 case OPC_SUBUH_R_QB
:
17371 case OPC_SUBQH_R_PH
:
17373 case OPC_SUBQH_R_W
:
17374 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17379 case OPC_MULQ_RS_W
:
17380 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17383 MIPS_INVAL("MASK ADDUH.QB");
17384 generate_exception_end(ctx
, EXCP_RI
);
17387 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17388 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17390 generate_exception_end(ctx
, EXCP_RI
);
17394 op2
= MASK_LX(ctx
->opcode
);
17396 #if defined(TARGET_MIPS64)
17402 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17404 default: /* Invalid */
17405 MIPS_INVAL("MASK LX");
17406 generate_exception_end(ctx
, EXCP_RI
);
17410 case OPC_ABSQ_S_PH_DSP
:
17411 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17413 case OPC_ABSQ_S_QB
:
17414 case OPC_ABSQ_S_PH
:
17416 case OPC_PRECEQ_W_PHL
:
17417 case OPC_PRECEQ_W_PHR
:
17418 case OPC_PRECEQU_PH_QBL
:
17419 case OPC_PRECEQU_PH_QBR
:
17420 case OPC_PRECEQU_PH_QBLA
:
17421 case OPC_PRECEQU_PH_QBRA
:
17422 case OPC_PRECEU_PH_QBL
:
17423 case OPC_PRECEU_PH_QBR
:
17424 case OPC_PRECEU_PH_QBLA
:
17425 case OPC_PRECEU_PH_QBRA
:
17426 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17433 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17436 MIPS_INVAL("MASK ABSQ_S.PH");
17437 generate_exception_end(ctx
, EXCP_RI
);
17441 case OPC_ADDU_QB_DSP
:
17442 op2
= MASK_ADDU_QB(ctx
->opcode
);
17445 case OPC_ADDQ_S_PH
:
17448 case OPC_ADDU_S_QB
:
17450 case OPC_ADDU_S_PH
:
17452 case OPC_SUBQ_S_PH
:
17455 case OPC_SUBU_S_QB
:
17457 case OPC_SUBU_S_PH
:
17461 case OPC_RADDU_W_QB
:
17462 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17464 case OPC_MULEU_S_PH_QBL
:
17465 case OPC_MULEU_S_PH_QBR
:
17466 case OPC_MULQ_RS_PH
:
17467 case OPC_MULEQ_S_W_PHL
:
17468 case OPC_MULEQ_S_W_PHR
:
17469 case OPC_MULQ_S_PH
:
17470 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17472 default: /* Invalid */
17473 MIPS_INVAL("MASK ADDU.QB");
17474 generate_exception_end(ctx
, EXCP_RI
);
17479 case OPC_CMPU_EQ_QB_DSP
:
17480 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17482 case OPC_PRECR_SRA_PH_W
:
17483 case OPC_PRECR_SRA_R_PH_W
:
17484 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17486 case OPC_PRECR_QB_PH
:
17487 case OPC_PRECRQ_QB_PH
:
17488 case OPC_PRECRQ_PH_W
:
17489 case OPC_PRECRQ_RS_PH_W
:
17490 case OPC_PRECRQU_S_QB_PH
:
17491 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17493 case OPC_CMPU_EQ_QB
:
17494 case OPC_CMPU_LT_QB
:
17495 case OPC_CMPU_LE_QB
:
17496 case OPC_CMP_EQ_PH
:
17497 case OPC_CMP_LT_PH
:
17498 case OPC_CMP_LE_PH
:
17499 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17501 case OPC_CMPGU_EQ_QB
:
17502 case OPC_CMPGU_LT_QB
:
17503 case OPC_CMPGU_LE_QB
:
17504 case OPC_CMPGDU_EQ_QB
:
17505 case OPC_CMPGDU_LT_QB
:
17506 case OPC_CMPGDU_LE_QB
:
17509 case OPC_PACKRL_PH
:
17510 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17512 default: /* Invalid */
17513 MIPS_INVAL("MASK CMPU.EQ.QB");
17514 generate_exception_end(ctx
, EXCP_RI
);
17518 case OPC_SHLL_QB_DSP
:
17519 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17521 case OPC_DPA_W_PH_DSP
:
17522 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17524 case OPC_DPAU_H_QBL
:
17525 case OPC_DPAU_H_QBR
:
17526 case OPC_DPSU_H_QBL
:
17527 case OPC_DPSU_H_QBR
:
17529 case OPC_DPAX_W_PH
:
17530 case OPC_DPAQ_S_W_PH
:
17531 case OPC_DPAQX_S_W_PH
:
17532 case OPC_DPAQX_SA_W_PH
:
17534 case OPC_DPSX_W_PH
:
17535 case OPC_DPSQ_S_W_PH
:
17536 case OPC_DPSQX_S_W_PH
:
17537 case OPC_DPSQX_SA_W_PH
:
17538 case OPC_MULSAQ_S_W_PH
:
17539 case OPC_DPAQ_SA_L_W
:
17540 case OPC_DPSQ_SA_L_W
:
17541 case OPC_MAQ_S_W_PHL
:
17542 case OPC_MAQ_S_W_PHR
:
17543 case OPC_MAQ_SA_W_PHL
:
17544 case OPC_MAQ_SA_W_PHR
:
17545 case OPC_MULSA_W_PH
:
17546 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17548 default: /* Invalid */
17549 MIPS_INVAL("MASK DPAW.PH");
17550 generate_exception_end(ctx
, EXCP_RI
);
17555 op2
= MASK_INSV(ctx
->opcode
);
17566 t0
= tcg_temp_new();
17567 t1
= tcg_temp_new();
17569 gen_load_gpr(t0
, rt
);
17570 gen_load_gpr(t1
, rs
);
17572 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17578 default: /* Invalid */
17579 MIPS_INVAL("MASK INSV");
17580 generate_exception_end(ctx
, EXCP_RI
);
17584 case OPC_APPEND_DSP
:
17585 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17587 case OPC_EXTR_W_DSP
:
17588 op2
= MASK_EXTR_W(ctx
->opcode
);
17592 case OPC_EXTR_RS_W
:
17594 case OPC_EXTRV_S_H
:
17596 case OPC_EXTRV_R_W
:
17597 case OPC_EXTRV_RS_W
:
17602 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17605 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17611 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17613 default: /* Invalid */
17614 MIPS_INVAL("MASK EXTR.W");
17615 generate_exception_end(ctx
, EXCP_RI
);
17619 #if defined(TARGET_MIPS64)
17620 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17621 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17622 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17623 check_insn(ctx
, INSN_LOONGSON2E
);
17624 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17626 case OPC_ABSQ_S_QH_DSP
:
17627 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17629 case OPC_PRECEQ_L_PWL
:
17630 case OPC_PRECEQ_L_PWR
:
17631 case OPC_PRECEQ_PW_QHL
:
17632 case OPC_PRECEQ_PW_QHR
:
17633 case OPC_PRECEQ_PW_QHLA
:
17634 case OPC_PRECEQ_PW_QHRA
:
17635 case OPC_PRECEQU_QH_OBL
:
17636 case OPC_PRECEQU_QH_OBR
:
17637 case OPC_PRECEQU_QH_OBLA
:
17638 case OPC_PRECEQU_QH_OBRA
:
17639 case OPC_PRECEU_QH_OBL
:
17640 case OPC_PRECEU_QH_OBR
:
17641 case OPC_PRECEU_QH_OBLA
:
17642 case OPC_PRECEU_QH_OBRA
:
17643 case OPC_ABSQ_S_OB
:
17644 case OPC_ABSQ_S_PW
:
17645 case OPC_ABSQ_S_QH
:
17646 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17654 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17656 default: /* Invalid */
17657 MIPS_INVAL("MASK ABSQ_S.QH");
17658 generate_exception_end(ctx
, EXCP_RI
);
17662 case OPC_ADDU_OB_DSP
:
17663 op2
= MASK_ADDU_OB(ctx
->opcode
);
17665 case OPC_RADDU_L_OB
:
17667 case OPC_SUBQ_S_PW
:
17669 case OPC_SUBQ_S_QH
:
17671 case OPC_SUBU_S_OB
:
17673 case OPC_SUBU_S_QH
:
17675 case OPC_SUBUH_R_OB
:
17677 case OPC_ADDQ_S_PW
:
17679 case OPC_ADDQ_S_QH
:
17681 case OPC_ADDU_S_OB
:
17683 case OPC_ADDU_S_QH
:
17685 case OPC_ADDUH_R_OB
:
17686 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17688 case OPC_MULEQ_S_PW_QHL
:
17689 case OPC_MULEQ_S_PW_QHR
:
17690 case OPC_MULEU_S_QH_OBL
:
17691 case OPC_MULEU_S_QH_OBR
:
17692 case OPC_MULQ_RS_QH
:
17693 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17695 default: /* Invalid */
17696 MIPS_INVAL("MASK ADDU.OB");
17697 generate_exception_end(ctx
, EXCP_RI
);
17701 case OPC_CMPU_EQ_OB_DSP
:
17702 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17704 case OPC_PRECR_SRA_QH_PW
:
17705 case OPC_PRECR_SRA_R_QH_PW
:
17706 /* Return value is rt. */
17707 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17709 case OPC_PRECR_OB_QH
:
17710 case OPC_PRECRQ_OB_QH
:
17711 case OPC_PRECRQ_PW_L
:
17712 case OPC_PRECRQ_QH_PW
:
17713 case OPC_PRECRQ_RS_QH_PW
:
17714 case OPC_PRECRQU_S_OB_QH
:
17715 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17717 case OPC_CMPU_EQ_OB
:
17718 case OPC_CMPU_LT_OB
:
17719 case OPC_CMPU_LE_OB
:
17720 case OPC_CMP_EQ_QH
:
17721 case OPC_CMP_LT_QH
:
17722 case OPC_CMP_LE_QH
:
17723 case OPC_CMP_EQ_PW
:
17724 case OPC_CMP_LT_PW
:
17725 case OPC_CMP_LE_PW
:
17726 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17728 case OPC_CMPGDU_EQ_OB
:
17729 case OPC_CMPGDU_LT_OB
:
17730 case OPC_CMPGDU_LE_OB
:
17731 case OPC_CMPGU_EQ_OB
:
17732 case OPC_CMPGU_LT_OB
:
17733 case OPC_CMPGU_LE_OB
:
17734 case OPC_PACKRL_PW
:
17738 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17740 default: /* Invalid */
17741 MIPS_INVAL("MASK CMPU_EQ.OB");
17742 generate_exception_end(ctx
, EXCP_RI
);
17746 case OPC_DAPPEND_DSP
:
17747 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17749 case OPC_DEXTR_W_DSP
:
17750 op2
= MASK_DEXTR_W(ctx
->opcode
);
17757 case OPC_DEXTR_R_L
:
17758 case OPC_DEXTR_RS_L
:
17760 case OPC_DEXTR_R_W
:
17761 case OPC_DEXTR_RS_W
:
17762 case OPC_DEXTR_S_H
:
17764 case OPC_DEXTRV_R_L
:
17765 case OPC_DEXTRV_RS_L
:
17766 case OPC_DEXTRV_S_H
:
17768 case OPC_DEXTRV_R_W
:
17769 case OPC_DEXTRV_RS_W
:
17770 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17775 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17777 default: /* Invalid */
17778 MIPS_INVAL("MASK EXTR.W");
17779 generate_exception_end(ctx
, EXCP_RI
);
17783 case OPC_DPAQ_W_QH_DSP
:
17784 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17786 case OPC_DPAU_H_OBL
:
17787 case OPC_DPAU_H_OBR
:
17788 case OPC_DPSU_H_OBL
:
17789 case OPC_DPSU_H_OBR
:
17791 case OPC_DPAQ_S_W_QH
:
17793 case OPC_DPSQ_S_W_QH
:
17794 case OPC_MULSAQ_S_W_QH
:
17795 case OPC_DPAQ_SA_L_PW
:
17796 case OPC_DPSQ_SA_L_PW
:
17797 case OPC_MULSAQ_S_L_PW
:
17798 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17800 case OPC_MAQ_S_W_QHLL
:
17801 case OPC_MAQ_S_W_QHLR
:
17802 case OPC_MAQ_S_W_QHRL
:
17803 case OPC_MAQ_S_W_QHRR
:
17804 case OPC_MAQ_SA_W_QHLL
:
17805 case OPC_MAQ_SA_W_QHLR
:
17806 case OPC_MAQ_SA_W_QHRL
:
17807 case OPC_MAQ_SA_W_QHRR
:
17808 case OPC_MAQ_S_L_PWL
:
17809 case OPC_MAQ_S_L_PWR
:
17814 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17816 default: /* Invalid */
17817 MIPS_INVAL("MASK DPAQ.W.QH");
17818 generate_exception_end(ctx
, EXCP_RI
);
17822 case OPC_DINSV_DSP
:
17823 op2
= MASK_INSV(ctx
->opcode
);
17834 t0
= tcg_temp_new();
17835 t1
= tcg_temp_new();
17837 gen_load_gpr(t0
, rt
);
17838 gen_load_gpr(t1
, rs
);
17840 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17846 default: /* Invalid */
17847 MIPS_INVAL("MASK DINSV");
17848 generate_exception_end(ctx
, EXCP_RI
);
17852 case OPC_SHLL_OB_DSP
:
17853 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17856 default: /* Invalid */
17857 MIPS_INVAL("special3_legacy");
17858 generate_exception_end(ctx
, EXCP_RI
);
17863 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17865 int rs
, rt
, rd
, sa
;
17868 rs
= (ctx
->opcode
>> 21) & 0x1f;
17869 rt
= (ctx
->opcode
>> 16) & 0x1f;
17870 rd
= (ctx
->opcode
>> 11) & 0x1f;
17871 sa
= (ctx
->opcode
>> 6) & 0x1f;
17873 op1
= MASK_SPECIAL3(ctx
->opcode
);
17877 check_insn(ctx
, ISA_MIPS32R2
);
17878 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17881 op2
= MASK_BSHFL(ctx
->opcode
);
17883 case OPC_ALIGN
... OPC_ALIGN_END
:
17885 check_insn(ctx
, ISA_MIPS32R6
);
17886 decode_opc_special3_r6(env
, ctx
);
17889 check_insn(ctx
, ISA_MIPS32R2
);
17890 gen_bshfl(ctx
, op2
, rt
, rd
);
17894 #if defined(TARGET_MIPS64)
17895 case OPC_DEXTM
... OPC_DEXT
:
17896 case OPC_DINSM
... OPC_DINS
:
17897 check_insn(ctx
, ISA_MIPS64R2
);
17898 check_mips_64(ctx
);
17899 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17902 op2
= MASK_DBSHFL(ctx
->opcode
);
17904 case OPC_DALIGN
... OPC_DALIGN_END
:
17906 check_insn(ctx
, ISA_MIPS32R6
);
17907 decode_opc_special3_r6(env
, ctx
);
17910 check_insn(ctx
, ISA_MIPS64R2
);
17911 check_mips_64(ctx
);
17912 op2
= MASK_DBSHFL(ctx
->opcode
);
17913 gen_bshfl(ctx
, op2
, rt
, rd
);
17919 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17922 check_insn(ctx
, ASE_MT
);
17924 TCGv t0
= tcg_temp_new();
17925 TCGv t1
= tcg_temp_new();
17927 gen_load_gpr(t0
, rt
);
17928 gen_load_gpr(t1
, rs
);
17929 gen_helper_fork(t0
, t1
);
17935 check_insn(ctx
, ASE_MT
);
17937 TCGv t0
= tcg_temp_new();
17939 gen_load_gpr(t0
, rs
);
17940 gen_helper_yield(t0
, cpu_env
, t0
);
17941 gen_store_gpr(t0
, rd
);
17946 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17947 decode_opc_special3_r6(env
, ctx
);
17949 decode_opc_special3_legacy(env
, ctx
);
17954 /* MIPS SIMD Architecture (MSA) */
17955 static inline int check_msa_access(DisasContext
*ctx
)
17957 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17958 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17959 generate_exception_end(ctx
, EXCP_RI
);
17963 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17964 if (ctx
->insn_flags
& ASE_MSA
) {
17965 generate_exception_end(ctx
, EXCP_MSADIS
);
17968 generate_exception_end(ctx
, EXCP_RI
);
17975 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17977 /* generates tcg ops to check if any element is 0 */
17978 /* Note this function only works with MSA_WRLEN = 128 */
17979 uint64_t eval_zero_or_big
= 0;
17980 uint64_t eval_big
= 0;
17981 TCGv_i64 t0
= tcg_temp_new_i64();
17982 TCGv_i64 t1
= tcg_temp_new_i64();
17985 eval_zero_or_big
= 0x0101010101010101ULL
;
17986 eval_big
= 0x8080808080808080ULL
;
17989 eval_zero_or_big
= 0x0001000100010001ULL
;
17990 eval_big
= 0x8000800080008000ULL
;
17993 eval_zero_or_big
= 0x0000000100000001ULL
;
17994 eval_big
= 0x8000000080000000ULL
;
17997 eval_zero_or_big
= 0x0000000000000001ULL
;
17998 eval_big
= 0x8000000000000000ULL
;
18001 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18002 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18003 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18004 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18005 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18006 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18007 tcg_gen_or_i64(t0
, t0
, t1
);
18008 /* if all bits are zero then all elements are not zero */
18009 /* if some bit is non-zero then some element is zero */
18010 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18011 tcg_gen_trunc_i64_tl(tresult
, t0
);
18012 tcg_temp_free_i64(t0
);
18013 tcg_temp_free_i64(t1
);
18016 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18018 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18019 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18020 int64_t s16
= (int16_t)ctx
->opcode
;
18022 check_msa_access(ctx
);
18024 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18025 generate_exception_end(ctx
, EXCP_RI
);
18032 TCGv_i64 t0
= tcg_temp_new_i64();
18033 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18034 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18035 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18036 tcg_gen_trunc_i64_tl(bcond
, t0
);
18037 tcg_temp_free_i64(t0
);
18044 gen_check_zero_element(bcond
, df
, wt
);
18050 gen_check_zero_element(bcond
, df
, wt
);
18051 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18055 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18057 ctx
->hflags
|= MIPS_HFLAG_BC
;
18058 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18061 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18063 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18064 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18065 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18066 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18068 TCGv_i32 twd
= tcg_const_i32(wd
);
18069 TCGv_i32 tws
= tcg_const_i32(ws
);
18070 TCGv_i32 ti8
= tcg_const_i32(i8
);
18072 switch (MASK_MSA_I8(ctx
->opcode
)) {
18074 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18077 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18080 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18083 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18086 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18089 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18092 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18098 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18099 if (df
== DF_DOUBLE
) {
18100 generate_exception_end(ctx
, EXCP_RI
);
18102 TCGv_i32 tdf
= tcg_const_i32(df
);
18103 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18104 tcg_temp_free_i32(tdf
);
18109 MIPS_INVAL("MSA instruction");
18110 generate_exception_end(ctx
, EXCP_RI
);
18114 tcg_temp_free_i32(twd
);
18115 tcg_temp_free_i32(tws
);
18116 tcg_temp_free_i32(ti8
);
18119 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18121 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18122 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18123 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18124 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18125 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18126 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18128 TCGv_i32 tdf
= tcg_const_i32(df
);
18129 TCGv_i32 twd
= tcg_const_i32(wd
);
18130 TCGv_i32 tws
= tcg_const_i32(ws
);
18131 TCGv_i32 timm
= tcg_temp_new_i32();
18132 tcg_gen_movi_i32(timm
, u5
);
18134 switch (MASK_MSA_I5(ctx
->opcode
)) {
18136 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18139 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18141 case OPC_MAXI_S_df
:
18142 tcg_gen_movi_i32(timm
, s5
);
18143 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18145 case OPC_MAXI_U_df
:
18146 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18148 case OPC_MINI_S_df
:
18149 tcg_gen_movi_i32(timm
, s5
);
18150 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18152 case OPC_MINI_U_df
:
18153 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18156 tcg_gen_movi_i32(timm
, s5
);
18157 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18159 case OPC_CLTI_S_df
:
18160 tcg_gen_movi_i32(timm
, s5
);
18161 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18163 case OPC_CLTI_U_df
:
18164 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18166 case OPC_CLEI_S_df
:
18167 tcg_gen_movi_i32(timm
, s5
);
18168 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18170 case OPC_CLEI_U_df
:
18171 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18175 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18176 tcg_gen_movi_i32(timm
, s10
);
18177 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18181 MIPS_INVAL("MSA instruction");
18182 generate_exception_end(ctx
, EXCP_RI
);
18186 tcg_temp_free_i32(tdf
);
18187 tcg_temp_free_i32(twd
);
18188 tcg_temp_free_i32(tws
);
18189 tcg_temp_free_i32(timm
);
18192 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18194 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18195 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18196 uint32_t df
= 0, m
= 0;
18197 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18198 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18205 if ((dfm
& 0x40) == 0x00) {
18208 } else if ((dfm
& 0x60) == 0x40) {
18211 } else if ((dfm
& 0x70) == 0x60) {
18214 } else if ((dfm
& 0x78) == 0x70) {
18218 generate_exception_end(ctx
, EXCP_RI
);
18222 tdf
= tcg_const_i32(df
);
18223 tm
= tcg_const_i32(m
);
18224 twd
= tcg_const_i32(wd
);
18225 tws
= tcg_const_i32(ws
);
18227 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18229 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18232 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18235 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18238 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18241 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18244 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18246 case OPC_BINSLI_df
:
18247 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18249 case OPC_BINSRI_df
:
18250 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18253 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18256 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18259 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18262 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18265 MIPS_INVAL("MSA instruction");
18266 generate_exception_end(ctx
, EXCP_RI
);
18270 tcg_temp_free_i32(tdf
);
18271 tcg_temp_free_i32(tm
);
18272 tcg_temp_free_i32(twd
);
18273 tcg_temp_free_i32(tws
);
18276 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18278 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18279 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18280 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18281 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18282 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18284 TCGv_i32 tdf
= tcg_const_i32(df
);
18285 TCGv_i32 twd
= tcg_const_i32(wd
);
18286 TCGv_i32 tws
= tcg_const_i32(ws
);
18287 TCGv_i32 twt
= tcg_const_i32(wt
);
18289 switch (MASK_MSA_3R(ctx
->opcode
)) {
18291 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18294 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18297 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18300 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18302 case OPC_SUBS_S_df
:
18303 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18306 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18309 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18312 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18315 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18318 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18320 case OPC_ADDS_A_df
:
18321 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18323 case OPC_SUBS_U_df
:
18324 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18327 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18330 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18333 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18336 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18339 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18342 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18344 case OPC_ADDS_S_df
:
18345 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18347 case OPC_SUBSUS_U_df
:
18348 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18351 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18354 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18357 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18360 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18363 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18366 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18368 case OPC_ADDS_U_df
:
18369 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18371 case OPC_SUBSUU_S_df
:
18372 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18375 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18378 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18381 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18384 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18387 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18389 case OPC_ASUB_S_df
:
18390 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18393 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18396 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18399 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18402 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18405 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18408 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18410 case OPC_ASUB_U_df
:
18411 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18414 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18417 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18420 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18423 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18425 case OPC_AVER_S_df
:
18426 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18429 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18432 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18435 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18438 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18440 case OPC_AVER_U_df
:
18441 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18444 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18447 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18450 case OPC_DOTP_S_df
:
18451 case OPC_DOTP_U_df
:
18452 case OPC_DPADD_S_df
:
18453 case OPC_DPADD_U_df
:
18454 case OPC_DPSUB_S_df
:
18455 case OPC_HADD_S_df
:
18456 case OPC_DPSUB_U_df
:
18457 case OPC_HADD_U_df
:
18458 case OPC_HSUB_S_df
:
18459 case OPC_HSUB_U_df
:
18460 if (df
== DF_BYTE
) {
18461 generate_exception_end(ctx
, EXCP_RI
);
18464 switch (MASK_MSA_3R(ctx
->opcode
)) {
18465 case OPC_DOTP_S_df
:
18466 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18468 case OPC_DOTP_U_df
:
18469 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18471 case OPC_DPADD_S_df
:
18472 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18474 case OPC_DPADD_U_df
:
18475 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18477 case OPC_DPSUB_S_df
:
18478 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18480 case OPC_HADD_S_df
:
18481 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18483 case OPC_DPSUB_U_df
:
18484 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18486 case OPC_HADD_U_df
:
18487 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18489 case OPC_HSUB_S_df
:
18490 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18492 case OPC_HSUB_U_df
:
18493 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 MIPS_INVAL("MSA instruction");
18499 generate_exception_end(ctx
, EXCP_RI
);
18502 tcg_temp_free_i32(twd
);
18503 tcg_temp_free_i32(tws
);
18504 tcg_temp_free_i32(twt
);
18505 tcg_temp_free_i32(tdf
);
18508 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18510 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18511 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18512 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18513 TCGv telm
= tcg_temp_new();
18514 TCGv_i32 tsr
= tcg_const_i32(source
);
18515 TCGv_i32 tdt
= tcg_const_i32(dest
);
18517 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18519 gen_load_gpr(telm
, source
);
18520 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18523 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18524 gen_store_gpr(telm
, dest
);
18527 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18530 MIPS_INVAL("MSA instruction");
18531 generate_exception_end(ctx
, EXCP_RI
);
18535 tcg_temp_free(telm
);
18536 tcg_temp_free_i32(tdt
);
18537 tcg_temp_free_i32(tsr
);
18540 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18543 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18544 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18545 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18547 TCGv_i32 tws
= tcg_const_i32(ws
);
18548 TCGv_i32 twd
= tcg_const_i32(wd
);
18549 TCGv_i32 tn
= tcg_const_i32(n
);
18550 TCGv_i32 tdf
= tcg_const_i32(df
);
18552 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18554 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18556 case OPC_SPLATI_df
:
18557 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18560 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18562 case OPC_COPY_S_df
:
18563 case OPC_COPY_U_df
:
18564 case OPC_INSERT_df
:
18565 #if !defined(TARGET_MIPS64)
18566 /* Double format valid only for MIPS64 */
18567 if (df
== DF_DOUBLE
) {
18568 generate_exception_end(ctx
, EXCP_RI
);
18572 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18573 case OPC_COPY_S_df
:
18574 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18576 case OPC_COPY_U_df
:
18577 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18579 case OPC_INSERT_df
:
18580 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18585 MIPS_INVAL("MSA instruction");
18586 generate_exception_end(ctx
, EXCP_RI
);
18588 tcg_temp_free_i32(twd
);
18589 tcg_temp_free_i32(tws
);
18590 tcg_temp_free_i32(tn
);
18591 tcg_temp_free_i32(tdf
);
18594 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18596 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18597 uint32_t df
= 0, n
= 0;
18599 if ((dfn
& 0x30) == 0x00) {
18602 } else if ((dfn
& 0x38) == 0x20) {
18605 } else if ((dfn
& 0x3c) == 0x30) {
18608 } else if ((dfn
& 0x3e) == 0x38) {
18611 } else if (dfn
== 0x3E) {
18612 /* CTCMSA, CFCMSA, MOVE.V */
18613 gen_msa_elm_3e(env
, ctx
);
18616 generate_exception_end(ctx
, EXCP_RI
);
18620 gen_msa_elm_df(env
, ctx
, df
, n
);
18623 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18625 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18626 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18627 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18628 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18629 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18631 TCGv_i32 twd
= tcg_const_i32(wd
);
18632 TCGv_i32 tws
= tcg_const_i32(ws
);
18633 TCGv_i32 twt
= tcg_const_i32(wt
);
18634 TCGv_i32 tdf
= tcg_temp_new_i32();
18636 /* adjust df value for floating-point instruction */
18637 tcg_gen_movi_i32(tdf
, df
+ 2);
18639 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18641 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18644 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18647 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18650 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18653 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18656 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18659 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18662 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18665 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18668 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18671 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18674 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18677 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18680 tcg_gen_movi_i32(tdf
, df
+ 1);
18681 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18684 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18687 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18689 case OPC_MADD_Q_df
:
18690 tcg_gen_movi_i32(tdf
, df
+ 1);
18691 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18694 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18696 case OPC_MSUB_Q_df
:
18697 tcg_gen_movi_i32(tdf
, df
+ 1);
18698 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18701 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18704 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18707 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18710 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18713 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18716 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18719 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18722 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18725 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18728 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18731 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18734 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18737 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18739 case OPC_MULR_Q_df
:
18740 tcg_gen_movi_i32(tdf
, df
+ 1);
18741 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18744 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18746 case OPC_FMIN_A_df
:
18747 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18749 case OPC_MADDR_Q_df
:
18750 tcg_gen_movi_i32(tdf
, df
+ 1);
18751 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18754 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18757 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18759 case OPC_MSUBR_Q_df
:
18760 tcg_gen_movi_i32(tdf
, df
+ 1);
18761 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18764 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18766 case OPC_FMAX_A_df
:
18767 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18770 MIPS_INVAL("MSA instruction");
18771 generate_exception_end(ctx
, EXCP_RI
);
18775 tcg_temp_free_i32(twd
);
18776 tcg_temp_free_i32(tws
);
18777 tcg_temp_free_i32(twt
);
18778 tcg_temp_free_i32(tdf
);
18781 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18783 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18784 (op & (0x7 << 18)))
18785 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18786 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18787 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18788 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18789 TCGv_i32 twd
= tcg_const_i32(wd
);
18790 TCGv_i32 tws
= tcg_const_i32(ws
);
18791 TCGv_i32 twt
= tcg_const_i32(wt
);
18792 TCGv_i32 tdf
= tcg_const_i32(df
);
18794 switch (MASK_MSA_2R(ctx
->opcode
)) {
18796 #if !defined(TARGET_MIPS64)
18797 /* Double format valid only for MIPS64 */
18798 if (df
== DF_DOUBLE
) {
18799 generate_exception_end(ctx
, EXCP_RI
);
18803 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18806 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18809 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18812 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18815 MIPS_INVAL("MSA instruction");
18816 generate_exception_end(ctx
, EXCP_RI
);
18820 tcg_temp_free_i32(twd
);
18821 tcg_temp_free_i32(tws
);
18822 tcg_temp_free_i32(twt
);
18823 tcg_temp_free_i32(tdf
);
18826 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18828 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18829 (op & (0xf << 17)))
18830 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18831 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18832 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18833 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18834 TCGv_i32 twd
= tcg_const_i32(wd
);
18835 TCGv_i32 tws
= tcg_const_i32(ws
);
18836 TCGv_i32 twt
= tcg_const_i32(wt
);
18837 /* adjust df value for floating-point instruction */
18838 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18840 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18841 case OPC_FCLASS_df
:
18842 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18844 case OPC_FTRUNC_S_df
:
18845 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18847 case OPC_FTRUNC_U_df
:
18848 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18851 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18853 case OPC_FRSQRT_df
:
18854 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18857 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18860 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18863 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18865 case OPC_FEXUPL_df
:
18866 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18868 case OPC_FEXUPR_df
:
18869 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18872 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18875 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18877 case OPC_FTINT_S_df
:
18878 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18880 case OPC_FTINT_U_df
:
18881 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18883 case OPC_FFINT_S_df
:
18884 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18886 case OPC_FFINT_U_df
:
18887 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18891 tcg_temp_free_i32(twd
);
18892 tcg_temp_free_i32(tws
);
18893 tcg_temp_free_i32(twt
);
18894 tcg_temp_free_i32(tdf
);
18897 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18899 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18900 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18901 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18902 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18903 TCGv_i32 twd
= tcg_const_i32(wd
);
18904 TCGv_i32 tws
= tcg_const_i32(ws
);
18905 TCGv_i32 twt
= tcg_const_i32(wt
);
18907 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18909 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18912 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18915 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18918 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18921 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18924 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18927 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18930 MIPS_INVAL("MSA instruction");
18931 generate_exception_end(ctx
, EXCP_RI
);
18935 tcg_temp_free_i32(twd
);
18936 tcg_temp_free_i32(tws
);
18937 tcg_temp_free_i32(twt
);
18940 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18942 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18950 gen_msa_vec_v(env
, ctx
);
18953 gen_msa_2r(env
, ctx
);
18956 gen_msa_2rf(env
, ctx
);
18959 MIPS_INVAL("MSA instruction");
18960 generate_exception_end(ctx
, EXCP_RI
);
18965 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18967 uint32_t opcode
= ctx
->opcode
;
18968 check_insn(ctx
, ASE_MSA
);
18969 check_msa_access(ctx
);
18971 switch (MASK_MSA_MINOR(opcode
)) {
18972 case OPC_MSA_I8_00
:
18973 case OPC_MSA_I8_01
:
18974 case OPC_MSA_I8_02
:
18975 gen_msa_i8(env
, ctx
);
18977 case OPC_MSA_I5_06
:
18978 case OPC_MSA_I5_07
:
18979 gen_msa_i5(env
, ctx
);
18981 case OPC_MSA_BIT_09
:
18982 case OPC_MSA_BIT_0A
:
18983 gen_msa_bit(env
, ctx
);
18985 case OPC_MSA_3R_0D
:
18986 case OPC_MSA_3R_0E
:
18987 case OPC_MSA_3R_0F
:
18988 case OPC_MSA_3R_10
:
18989 case OPC_MSA_3R_11
:
18990 case OPC_MSA_3R_12
:
18991 case OPC_MSA_3R_13
:
18992 case OPC_MSA_3R_14
:
18993 case OPC_MSA_3R_15
:
18994 gen_msa_3r(env
, ctx
);
18997 gen_msa_elm(env
, ctx
);
18999 case OPC_MSA_3RF_1A
:
19000 case OPC_MSA_3RF_1B
:
19001 case OPC_MSA_3RF_1C
:
19002 gen_msa_3rf(env
, ctx
);
19005 gen_msa_vec(env
, ctx
);
19016 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19017 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19018 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19019 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19021 TCGv_i32 twd
= tcg_const_i32(wd
);
19022 TCGv taddr
= tcg_temp_new();
19023 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19025 switch (MASK_MSA_MINOR(opcode
)) {
19027 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19030 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19033 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19036 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19039 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19042 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19045 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19048 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19052 tcg_temp_free_i32(twd
);
19053 tcg_temp_free(taddr
);
19057 MIPS_INVAL("MSA instruction");
19058 generate_exception_end(ctx
, EXCP_RI
);
19064 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19067 int rs
, rt
, rd
, sa
;
19071 /* make sure instructions are on a word boundary */
19072 if (ctx
->pc
& 0x3) {
19073 env
->CP0_BadVAddr
= ctx
->pc
;
19074 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19078 /* Handle blikely not taken case */
19079 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19080 TCGLabel
*l1
= gen_new_label();
19082 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19083 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19084 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19088 op
= MASK_OP_MAJOR(ctx
->opcode
);
19089 rs
= (ctx
->opcode
>> 21) & 0x1f;
19090 rt
= (ctx
->opcode
>> 16) & 0x1f;
19091 rd
= (ctx
->opcode
>> 11) & 0x1f;
19092 sa
= (ctx
->opcode
>> 6) & 0x1f;
19093 imm
= (int16_t)ctx
->opcode
;
19096 decode_opc_special(env
, ctx
);
19099 decode_opc_special2_legacy(env
, ctx
);
19102 decode_opc_special3(env
, ctx
);
19105 op1
= MASK_REGIMM(ctx
->opcode
);
19107 case OPC_BLTZL
: /* REGIMM branches */
19111 check_insn(ctx
, ISA_MIPS2
);
19112 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19116 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19120 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19122 /* OPC_NAL, OPC_BAL */
19123 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19125 generate_exception_end(ctx
, EXCP_RI
);
19128 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19131 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19133 check_insn(ctx
, ISA_MIPS2
);
19134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19135 gen_trap(ctx
, op1
, rs
, -1, imm
);
19138 check_insn(ctx
, ISA_MIPS32R6
);
19139 generate_exception_end(ctx
, EXCP_RI
);
19142 check_insn(ctx
, ISA_MIPS32R2
);
19143 /* Break the TB to be able to sync copied instructions
19145 ctx
->bstate
= BS_STOP
;
19147 case OPC_BPOSGE32
: /* MIPS DSP branch */
19148 #if defined(TARGET_MIPS64)
19152 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19154 #if defined(TARGET_MIPS64)
19156 check_insn(ctx
, ISA_MIPS32R6
);
19157 check_mips_64(ctx
);
19159 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19163 check_insn(ctx
, ISA_MIPS32R6
);
19164 check_mips_64(ctx
);
19166 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19170 default: /* Invalid */
19171 MIPS_INVAL("regimm");
19172 generate_exception_end(ctx
, EXCP_RI
);
19177 check_cp0_enabled(ctx
);
19178 op1
= MASK_CP0(ctx
->opcode
);
19186 #if defined(TARGET_MIPS64)
19190 #ifndef CONFIG_USER_ONLY
19191 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19192 #endif /* !CONFIG_USER_ONLY */
19194 case OPC_C0_FIRST
... OPC_C0_LAST
:
19195 #ifndef CONFIG_USER_ONLY
19196 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19197 #endif /* !CONFIG_USER_ONLY */
19200 #ifndef CONFIG_USER_ONLY
19203 TCGv t0
= tcg_temp_new();
19205 op2
= MASK_MFMC0(ctx
->opcode
);
19208 check_insn(ctx
, ASE_MT
);
19209 gen_helper_dmt(t0
);
19210 gen_store_gpr(t0
, rt
);
19213 check_insn(ctx
, ASE_MT
);
19214 gen_helper_emt(t0
);
19215 gen_store_gpr(t0
, rt
);
19218 check_insn(ctx
, ASE_MT
);
19219 gen_helper_dvpe(t0
, cpu_env
);
19220 gen_store_gpr(t0
, rt
);
19223 check_insn(ctx
, ASE_MT
);
19224 gen_helper_evpe(t0
, cpu_env
);
19225 gen_store_gpr(t0
, rt
);
19228 check_insn(ctx
, ISA_MIPS32R6
);
19230 gen_helper_dvp(t0
, cpu_env
);
19231 gen_store_gpr(t0
, rt
);
19235 check_insn(ctx
, ISA_MIPS32R6
);
19237 gen_helper_evp(t0
, cpu_env
);
19238 gen_store_gpr(t0
, rt
);
19242 check_insn(ctx
, ISA_MIPS32R2
);
19243 save_cpu_state(ctx
, 1);
19244 gen_helper_di(t0
, cpu_env
);
19245 gen_store_gpr(t0
, rt
);
19246 /* Stop translation as we may have switched
19247 the execution mode. */
19248 ctx
->bstate
= BS_STOP
;
19251 check_insn(ctx
, ISA_MIPS32R2
);
19252 save_cpu_state(ctx
, 1);
19253 gen_helper_ei(t0
, cpu_env
);
19254 gen_store_gpr(t0
, rt
);
19255 /* Stop translation as we may have switched
19256 the execution mode. */
19257 ctx
->bstate
= BS_STOP
;
19259 default: /* Invalid */
19260 MIPS_INVAL("mfmc0");
19261 generate_exception_end(ctx
, EXCP_RI
);
19266 #endif /* !CONFIG_USER_ONLY */
19269 check_insn(ctx
, ISA_MIPS32R2
);
19270 gen_load_srsgpr(rt
, rd
);
19273 check_insn(ctx
, ISA_MIPS32R2
);
19274 gen_store_srsgpr(rt
, rd
);
19278 generate_exception_end(ctx
, EXCP_RI
);
19282 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19283 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19284 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19285 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19288 /* Arithmetic with immediate opcode */
19289 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19293 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19295 case OPC_SLTI
: /* Set on less than with immediate opcode */
19297 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19299 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19300 case OPC_LUI
: /* OPC_AUI */
19303 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19305 case OPC_J
... OPC_JAL
: /* Jump */
19306 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19307 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19310 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19311 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19313 generate_exception_end(ctx
, EXCP_RI
);
19316 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19317 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19320 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19323 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19324 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19326 generate_exception_end(ctx
, EXCP_RI
);
19329 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19330 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19333 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19336 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19339 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19341 check_insn(ctx
, ISA_MIPS32R6
);
19342 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19343 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19346 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19349 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19351 check_insn(ctx
, ISA_MIPS32R6
);
19352 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19353 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19358 check_insn(ctx
, ISA_MIPS2
);
19359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19363 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19365 case OPC_LL
: /* Load and stores */
19366 check_insn(ctx
, ISA_MIPS2
);
19370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19372 case OPC_LB
... OPC_LH
:
19373 case OPC_LW
... OPC_LHU
:
19374 gen_ld(ctx
, op
, rt
, rs
, imm
);
19378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19380 case OPC_SB
... OPC_SH
:
19382 gen_st(ctx
, op
, rt
, rs
, imm
);
19385 check_insn(ctx
, ISA_MIPS2
);
19386 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19387 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19390 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19391 check_cp0_enabled(ctx
);
19392 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19393 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19394 gen_cache_operation(ctx
, rt
, rs
, imm
);
19396 /* Treat as NOP. */
19399 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19400 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19401 /* Treat as NOP. */
19404 /* Floating point (COP1). */
19409 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19413 op1
= MASK_CP1(ctx
->opcode
);
19418 check_cp1_enabled(ctx
);
19419 check_insn(ctx
, ISA_MIPS32R2
);
19424 check_cp1_enabled(ctx
);
19425 gen_cp1(ctx
, op1
, rt
, rd
);
19427 #if defined(TARGET_MIPS64)
19430 check_cp1_enabled(ctx
);
19431 check_insn(ctx
, ISA_MIPS3
);
19432 check_mips_64(ctx
);
19433 gen_cp1(ctx
, op1
, rt
, rd
);
19436 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19437 check_cp1_enabled(ctx
);
19438 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19440 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19445 check_insn(ctx
, ASE_MIPS3D
);
19446 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19447 (rt
>> 2) & 0x7, imm
<< 2);
19451 check_cp1_enabled(ctx
);
19452 check_insn(ctx
, ISA_MIPS32R6
);
19453 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19457 check_cp1_enabled(ctx
);
19458 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19460 check_insn(ctx
, ASE_MIPS3D
);
19463 check_cp1_enabled(ctx
);
19464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19465 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19466 (rt
>> 2) & 0x7, imm
<< 2);
19473 check_cp1_enabled(ctx
);
19474 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19480 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19481 check_cp1_enabled(ctx
);
19482 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19484 case R6_OPC_CMP_AF_S
:
19485 case R6_OPC_CMP_UN_S
:
19486 case R6_OPC_CMP_EQ_S
:
19487 case R6_OPC_CMP_UEQ_S
:
19488 case R6_OPC_CMP_LT_S
:
19489 case R6_OPC_CMP_ULT_S
:
19490 case R6_OPC_CMP_LE_S
:
19491 case R6_OPC_CMP_ULE_S
:
19492 case R6_OPC_CMP_SAF_S
:
19493 case R6_OPC_CMP_SUN_S
:
19494 case R6_OPC_CMP_SEQ_S
:
19495 case R6_OPC_CMP_SEUQ_S
:
19496 case R6_OPC_CMP_SLT_S
:
19497 case R6_OPC_CMP_SULT_S
:
19498 case R6_OPC_CMP_SLE_S
:
19499 case R6_OPC_CMP_SULE_S
:
19500 case R6_OPC_CMP_OR_S
:
19501 case R6_OPC_CMP_UNE_S
:
19502 case R6_OPC_CMP_NE_S
:
19503 case R6_OPC_CMP_SOR_S
:
19504 case R6_OPC_CMP_SUNE_S
:
19505 case R6_OPC_CMP_SNE_S
:
19506 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19508 case R6_OPC_CMP_AF_D
:
19509 case R6_OPC_CMP_UN_D
:
19510 case R6_OPC_CMP_EQ_D
:
19511 case R6_OPC_CMP_UEQ_D
:
19512 case R6_OPC_CMP_LT_D
:
19513 case R6_OPC_CMP_ULT_D
:
19514 case R6_OPC_CMP_LE_D
:
19515 case R6_OPC_CMP_ULE_D
:
19516 case R6_OPC_CMP_SAF_D
:
19517 case R6_OPC_CMP_SUN_D
:
19518 case R6_OPC_CMP_SEQ_D
:
19519 case R6_OPC_CMP_SEUQ_D
:
19520 case R6_OPC_CMP_SLT_D
:
19521 case R6_OPC_CMP_SULT_D
:
19522 case R6_OPC_CMP_SLE_D
:
19523 case R6_OPC_CMP_SULE_D
:
19524 case R6_OPC_CMP_OR_D
:
19525 case R6_OPC_CMP_UNE_D
:
19526 case R6_OPC_CMP_NE_D
:
19527 case R6_OPC_CMP_SOR_D
:
19528 case R6_OPC_CMP_SUNE_D
:
19529 case R6_OPC_CMP_SNE_D
:
19530 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19533 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19534 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19539 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19554 check_insn(ctx
, ASE_MSA
);
19555 gen_msa_branch(env
, ctx
, op1
);
19559 generate_exception_end(ctx
, EXCP_RI
);
19564 /* Compact branches [R6] and COP2 [non-R6] */
19565 case OPC_BC
: /* OPC_LWC2 */
19566 case OPC_BALC
: /* OPC_SWC2 */
19567 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19568 /* OPC_BC, OPC_BALC */
19569 gen_compute_compact_branch(ctx
, op
, 0, 0,
19570 sextract32(ctx
->opcode
<< 2, 0, 28));
19572 /* OPC_LWC2, OPC_SWC2 */
19573 /* COP2: Not implemented. */
19574 generate_exception_err(ctx
, EXCP_CpU
, 2);
19577 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19578 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19579 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19581 /* OPC_BEQZC, OPC_BNEZC */
19582 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19583 sextract32(ctx
->opcode
<< 2, 0, 23));
19585 /* OPC_JIC, OPC_JIALC */
19586 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19589 /* OPC_LWC2, OPC_SWC2 */
19590 /* COP2: Not implemented. */
19591 generate_exception_err(ctx
, EXCP_CpU
, 2);
19595 check_insn(ctx
, INSN_LOONGSON2F
);
19596 /* Note that these instructions use different fields. */
19597 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19601 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19602 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19603 check_cp1_enabled(ctx
);
19604 op1
= MASK_CP3(ctx
->opcode
);
19608 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19614 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19615 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19618 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19619 /* Treat as NOP. */
19622 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19636 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19637 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19641 generate_exception_end(ctx
, EXCP_RI
);
19645 generate_exception_err(ctx
, EXCP_CpU
, 1);
19649 #if defined(TARGET_MIPS64)
19650 /* MIPS64 opcodes */
19651 case OPC_LDL
... OPC_LDR
:
19653 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19657 check_insn(ctx
, ISA_MIPS3
);
19658 check_mips_64(ctx
);
19659 gen_ld(ctx
, op
, rt
, rs
, imm
);
19661 case OPC_SDL
... OPC_SDR
:
19662 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19665 check_insn(ctx
, ISA_MIPS3
);
19666 check_mips_64(ctx
);
19667 gen_st(ctx
, op
, rt
, rs
, imm
);
19670 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19671 check_insn(ctx
, ISA_MIPS3
);
19672 check_mips_64(ctx
);
19673 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19675 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19676 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19677 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19678 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19681 check_insn(ctx
, ISA_MIPS3
);
19682 check_mips_64(ctx
);
19683 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19687 check_insn(ctx
, ISA_MIPS3
);
19688 check_mips_64(ctx
);
19689 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19692 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19693 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19694 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19696 MIPS_INVAL("major opcode");
19697 generate_exception_end(ctx
, EXCP_RI
);
19701 case OPC_DAUI
: /* OPC_JALX */
19702 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19703 #if defined(TARGET_MIPS64)
19705 check_mips_64(ctx
);
19707 generate_exception(ctx
, EXCP_RI
);
19708 } else if (rt
!= 0) {
19709 TCGv t0
= tcg_temp_new();
19710 gen_load_gpr(t0
, rs
);
19711 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19715 generate_exception_end(ctx
, EXCP_RI
);
19716 MIPS_INVAL("major opcode");
19720 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19721 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19722 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19725 case OPC_MSA
: /* OPC_MDMX */
19726 /* MDMX: Not implemented. */
19730 check_insn(ctx
, ISA_MIPS32R6
);
19731 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19733 default: /* Invalid */
19734 MIPS_INVAL("major opcode");
19735 generate_exception_end(ctx
, EXCP_RI
);
19740 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19742 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19743 CPUState
*cs
= CPU(cpu
);
19745 target_ulong pc_start
;
19746 target_ulong next_page_start
;
19753 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19756 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19757 ctx
.insn_flags
= env
->insn_flags
;
19758 ctx
.CP0_Config1
= env
->CP0_Config1
;
19760 ctx
.bstate
= BS_NONE
;
19762 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19763 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19764 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19765 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19766 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19767 ctx
.PAMask
= env
->PAMask
;
19768 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19769 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19770 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19771 /* Restore delay slot state from the tb context. */
19772 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19773 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19774 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19775 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19776 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19777 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19778 restore_cpu_state(env
, &ctx
);
19779 #ifdef CONFIG_USER_ONLY
19780 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19782 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19784 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19785 MO_UNALN
: MO_ALIGN
;
19787 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19788 if (max_insns
== 0) {
19789 max_insns
= CF_COUNT_MASK
;
19791 if (max_insns
> TCG_MAX_INSNS
) {
19792 max_insns
= TCG_MAX_INSNS
;
19795 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19797 while (ctx
.bstate
== BS_NONE
) {
19798 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19801 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19802 save_cpu_state(&ctx
, 1);
19803 ctx
.bstate
= BS_BRANCH
;
19804 gen_helper_raise_exception_debug(cpu_env
);
19805 /* The address covered by the breakpoint must be included in
19806 [tb->pc, tb->pc + tb->size) in order to for it to be
19807 properly cleared -- thus we increment the PC here so that
19808 the logic setting tb->size below does the right thing. */
19810 goto done_generating
;
19813 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19817 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19818 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19819 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19821 decode_opc(env
, &ctx
);
19822 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19823 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19824 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19825 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19826 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19827 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19829 generate_exception_end(&ctx
, EXCP_RI
);
19833 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19834 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19835 MIPS_HFLAG_FBNSLOT
))) {
19836 /* force to generate branch as there is neither delay nor
19840 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19841 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19842 /* Force to generate branch as microMIPS R6 doesn't restrict
19843 branches in the forbidden slot. */
19848 gen_branch(&ctx
, insn_bytes
);
19850 ctx
.pc
+= insn_bytes
;
19852 /* Execute a branch and its delay slot as a single instruction.
19853 This is what GDB expects and is consistent with what the
19854 hardware does (e.g. if a delay slot instruction faults, the
19855 reported PC is the PC of the branch). */
19856 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19860 if (ctx
.pc
>= next_page_start
) {
19864 if (tcg_op_buf_full()) {
19868 if (num_insns
>= max_insns
)
19874 if (tb
->cflags
& CF_LAST_IO
) {
19877 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19878 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19879 gen_helper_raise_exception_debug(cpu_env
);
19881 switch (ctx
.bstate
) {
19883 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19886 save_cpu_state(&ctx
, 0);
19887 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19890 tcg_gen_exit_tb(0);
19898 gen_tb_end(tb
, num_insns
);
19900 tb
->size
= ctx
.pc
- pc_start
;
19901 tb
->icount
= num_insns
;
19905 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19906 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19907 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19913 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19917 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19919 #define printfpr(fp) \
19922 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19923 " fd:%13g fs:%13g psu: %13g\n", \
19924 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19925 (double)(fp)->fd, \
19926 (double)(fp)->fs[FP_ENDIAN_IDX], \
19927 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19930 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19931 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19932 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19933 " fd:%13g fs:%13g psu:%13g\n", \
19934 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19936 (double)tmp.fs[FP_ENDIAN_IDX], \
19937 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19942 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19943 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19944 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19945 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19946 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19947 printfpr(&env
->active_fpu
.fpr
[i
]);
19953 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19956 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19957 CPUMIPSState
*env
= &cpu
->env
;
19960 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19961 " LO=0x" TARGET_FMT_lx
" ds %04x "
19962 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19963 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19964 env
->hflags
, env
->btarget
, env
->bcond
);
19965 for (i
= 0; i
< 32; i
++) {
19967 cpu_fprintf(f
, "GPR%02d:", i
);
19968 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19970 cpu_fprintf(f
, "\n");
19973 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19974 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19975 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19977 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19978 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19979 env
->CP0_Config2
, env
->CP0_Config3
);
19980 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19981 env
->CP0_Config4
, env
->CP0_Config5
);
19982 if (env
->hflags
& MIPS_HFLAG_FPU
)
19983 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19986 void mips_tcg_init(void)
19991 /* Initialize various static tables. */
19995 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19997 TCGV_UNUSED(cpu_gpr
[0]);
19998 for (i
= 1; i
< 32; i
++)
19999 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20000 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20003 for (i
= 0; i
< 32; i
++) {
20004 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20006 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20007 /* The scalar floating-point unit (FPU) registers are mapped on
20008 * the MSA vector registers. */
20009 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20010 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20011 msa_wr_d
[i
* 2 + 1] =
20012 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20015 cpu_PC
= tcg_global_mem_new(cpu_env
,
20016 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20017 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20018 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20019 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20021 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20022 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20025 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20026 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20028 bcond
= tcg_global_mem_new(cpu_env
,
20029 offsetof(CPUMIPSState
, bcond
), "bcond");
20030 btarget
= tcg_global_mem_new(cpu_env
,
20031 offsetof(CPUMIPSState
, btarget
), "btarget");
20032 hflags
= tcg_global_mem_new_i32(cpu_env
,
20033 offsetof(CPUMIPSState
, hflags
), "hflags");
20035 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20036 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20038 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20039 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20045 #include "translate_init.c"
20047 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20051 const mips_def_t
*def
;
20053 def
= cpu_mips_find_by_name(cpu_model
);
20056 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20058 env
->cpu_model
= def
;
20060 #ifndef CONFIG_USER_ONLY
20061 mmu_init(env
, def
);
20063 fpu_init(env
, def
);
20064 mvp_init(env
, def
);
20066 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20071 bool cpu_supports_cps_smp(const char *cpu_model
)
20073 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20078 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20081 void cpu_state_reset(CPUMIPSState
*env
)
20083 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20084 CPUState
*cs
= CPU(cpu
);
20086 /* Reset registers to their default values */
20087 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20088 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20089 #ifdef TARGET_WORDS_BIGENDIAN
20090 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20092 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20093 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20094 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20095 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20096 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20097 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20098 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20099 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20100 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20101 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20102 << env
->cpu_model
->CP0_LLAddr_shift
;
20103 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20104 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20105 env
->CCRes
= env
->cpu_model
->CCRes
;
20106 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20107 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20108 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20109 env
->current_tc
= 0;
20110 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20111 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20112 #if defined(TARGET_MIPS64)
20113 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20114 env
->SEGMask
|= 3ULL << 62;
20117 env
->PABITS
= env
->cpu_model
->PABITS
;
20118 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20119 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20120 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20121 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20122 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20123 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20124 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20125 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20126 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20127 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20128 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20129 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20130 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20131 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20132 env
->msair
= env
->cpu_model
->MSAIR
;
20133 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20135 #if defined(CONFIG_USER_ONLY)
20136 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20137 # ifdef TARGET_MIPS64
20138 /* Enable 64-bit register mode. */
20139 env
->CP0_Status
|= (1 << CP0St_PX
);
20141 # ifdef TARGET_ABI_MIPSN64
20142 /* Enable 64-bit address mode. */
20143 env
->CP0_Status
|= (1 << CP0St_UX
);
20145 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20146 hardware registers. */
20147 env
->CP0_HWREna
|= 0x0000000F;
20148 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20149 env
->CP0_Status
|= (1 << CP0St_CU1
);
20151 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20152 env
->CP0_Status
|= (1 << CP0St_MX
);
20154 # if defined(TARGET_MIPS64)
20155 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20156 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20157 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20158 env
->CP0_Status
|= (1 << CP0St_FR
);
20162 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20163 /* If the exception was raised from a delay slot,
20164 come back to the jump. */
20165 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20166 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20168 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20170 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20171 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20172 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20173 env
->CP0_Wired
= 0;
20174 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20175 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20176 if (kvm_enabled()) {
20177 env
->CP0_EBase
|= 0x40000000;
20179 env
->CP0_EBase
|= 0x80000000;
20181 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20182 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20184 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20185 /* vectored interrupts not implemented, timer on int 7,
20186 no performance counters. */
20187 env
->CP0_IntCtl
= 0xe0000000;
20191 for (i
= 0; i
< 7; i
++) {
20192 env
->CP0_WatchLo
[i
] = 0;
20193 env
->CP0_WatchHi
[i
] = 0x80000000;
20195 env
->CP0_WatchLo
[7] = 0;
20196 env
->CP0_WatchHi
[7] = 0;
20198 /* Count register increments in debug mode, EJTAG version 1 */
20199 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20201 cpu_mips_store_count(env
, 1);
20203 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20206 /* Only TC0 on VPE 0 starts as active. */
20207 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20208 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20209 env
->tcs
[i
].CP0_TCHalt
= 1;
20211 env
->active_tc
.CP0_TCHalt
= 1;
20214 if (cs
->cpu_index
== 0) {
20215 /* VPE0 starts up enabled. */
20216 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20217 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20219 /* TC0 starts up unhalted. */
20221 env
->active_tc
.CP0_TCHalt
= 0;
20222 env
->tcs
[0].CP0_TCHalt
= 0;
20223 /* With thread 0 active. */
20224 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20225 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20229 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20230 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20231 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20232 env
->CP0_Status
|= (1 << CP0St_FR
);
20236 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20240 compute_hflags(env
);
20241 restore_rounding_mode(env
);
20242 restore_flush_mode(env
);
20243 restore_pamask(env
);
20244 cs
->exception_index
= EXCP_NONE
;
20246 if (semihosting_get_argc()) {
20247 /* UHI interface can be used to obtain argc and argv */
20248 env
->active_tc
.gpr
[4] = -1;
20252 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20253 target_ulong
*data
)
20255 env
->active_tc
.PC
= data
[0];
20256 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20257 env
->hflags
|= data
[1];
20258 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20259 case MIPS_HFLAG_BR
:
20261 case MIPS_HFLAG_BC
:
20262 case MIPS_HFLAG_BL
:
20264 env
->btarget
= data
[2];