2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
898 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
901 /* Coprocessor 0 (with rs == C0) */
902 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
905 OPC_TLBR
= 0x01 | OPC_C0
,
906 OPC_TLBWI
= 0x02 | OPC_C0
,
907 OPC_TLBINV
= 0x03 | OPC_C0
,
908 OPC_TLBINVF
= 0x04 | OPC_C0
,
909 OPC_TLBWR
= 0x06 | OPC_C0
,
910 OPC_TLBP
= 0x08 | OPC_C0
,
911 OPC_RFE
= 0x10 | OPC_C0
,
912 OPC_ERET
= 0x18 | OPC_C0
,
913 OPC_DERET
= 0x1F | OPC_C0
,
914 OPC_WAIT
= 0x20 | OPC_C0
,
917 /* Coprocessor 1 (rs field) */
918 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
920 /* Values for the fmt field in FP instructions */
922 /* 0 - 15 are reserved */
923 FMT_S
= 16, /* single fp */
924 FMT_D
= 17, /* double fp */
925 FMT_E
= 18, /* extended fp */
926 FMT_Q
= 19, /* quad fp */
927 FMT_W
= 20, /* 32-bit fixed */
928 FMT_L
= 21, /* 64-bit fixed */
929 FMT_PS
= 22, /* paired single fp */
930 /* 23 - 31 are reserved */
934 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
935 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
936 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
937 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
938 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
939 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
940 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
941 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
942 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
943 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
944 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
945 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
946 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
947 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
948 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
949 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
950 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
951 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
952 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
953 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
954 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
955 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
956 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
957 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
958 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
959 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
960 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
961 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
962 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
963 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
966 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
967 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
970 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
971 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
972 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
973 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
977 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
978 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
982 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
983 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
986 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1127 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1128 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1129 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1130 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1131 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1132 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1133 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1134 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1135 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1136 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1137 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1138 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1139 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1140 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1141 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1142 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1143 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1144 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1145 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1146 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1147 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1149 /* MI10 instruction */
1150 OPC_LD_B
= (0x20) | OPC_MSA
,
1151 OPC_LD_H
= (0x21) | OPC_MSA
,
1152 OPC_LD_W
= (0x22) | OPC_MSA
,
1153 OPC_LD_D
= (0x23) | OPC_MSA
,
1154 OPC_ST_B
= (0x24) | OPC_MSA
,
1155 OPC_ST_H
= (0x25) | OPC_MSA
,
1156 OPC_ST_W
= (0x26) | OPC_MSA
,
1157 OPC_ST_D
= (0x27) | OPC_MSA
,
1161 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1162 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1163 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1164 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1165 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1167 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1173 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1175 /* I8 instruction */
1176 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1179 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1180 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1182 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1183 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1184 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1185 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1187 /* VEC/2R/2RF instruction */
1188 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1189 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1190 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1191 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1192 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1193 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1194 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1196 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1199 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1200 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1201 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1202 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1203 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1205 /* 2RF instruction df(bit 16) = _w, _d */
1206 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1207 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1209 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1210 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1211 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1212 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1213 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1214 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1216 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1217 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1218 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1220 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1223 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1224 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1225 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1226 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1227 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1228 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1229 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1230 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1231 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1232 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1233 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1234 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1235 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1236 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1237 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1238 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1239 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1240 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1241 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1242 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1243 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1244 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1245 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1246 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1247 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1248 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1249 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1250 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1251 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1252 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1253 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1254 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1255 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1256 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1257 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1258 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1259 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1260 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1261 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1262 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1263 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1264 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1265 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1266 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1267 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1268 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1269 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1270 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1271 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1272 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1273 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1274 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1275 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1276 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1277 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1278 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1279 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1280 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1281 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1282 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1283 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1284 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1285 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1286 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1288 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1289 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 /* 3RF instruction _df(bit 21) = _w, _d */
1300 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1342 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1343 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1357 /* global register indices */
1358 static TCGv_env cpu_env
;
1359 static TCGv cpu_gpr
[32], cpu_PC
;
1360 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1361 static TCGv cpu_dspctrl
, btarget
, bcond
;
1362 static TCGv_i32 hflags
;
1363 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1364 static TCGv_i64 fpu_f64
[32];
1365 static TCGv_i64 msa_wr_d
[64];
1367 #include "exec/gen-icount.h"
1369 #define gen_helper_0e0i(name, arg) do { \
1370 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1371 gen_helper_##name(cpu_env, helper_tmp); \
1372 tcg_temp_free_i32(helper_tmp); \
1375 #define gen_helper_0e1i(name, arg1, arg2) do { \
1376 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1377 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1378 tcg_temp_free_i32(helper_tmp); \
1381 #define gen_helper_1e0i(name, ret, arg1) do { \
1382 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1383 gen_helper_##name(ret, cpu_env, helper_tmp); \
1384 tcg_temp_free_i32(helper_tmp); \
1387 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1388 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1389 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1390 tcg_temp_free_i32(helper_tmp); \
1393 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1394 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1395 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1396 tcg_temp_free_i32(helper_tmp); \
1399 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1400 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1401 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1402 tcg_temp_free_i32(helper_tmp); \
1405 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1406 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1407 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1408 tcg_temp_free_i32(helper_tmp); \
1411 typedef struct DisasContext
{
1412 struct TranslationBlock
*tb
;
1413 target_ulong pc
, saved_pc
;
1415 int singlestep_enabled
;
1417 int32_t CP0_Config1
;
1418 /* Routine used to access memory */
1420 TCGMemOp default_tcg_memop_mask
;
1421 uint32_t hflags
, saved_hflags
;
1423 target_ulong btarget
;
1432 int CP0_LLAddr_shift
;
1438 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1439 * exception condition */
1440 BS_STOP
= 1, /* We want to stop translation for any reason */
1441 BS_BRANCH
= 2, /* We reached a branch condition */
1442 BS_EXCP
= 3, /* We reached an exception condition */
1445 static const char * const regnames
[] = {
1446 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1447 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1448 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1449 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1452 static const char * const regnames_HI
[] = {
1453 "HI0", "HI1", "HI2", "HI3",
1456 static const char * const regnames_LO
[] = {
1457 "LO0", "LO1", "LO2", "LO3",
1460 static const char * const fregnames
[] = {
1461 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1462 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1463 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1464 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1467 static const char * const msaregnames
[] = {
1468 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1469 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1470 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1471 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1472 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1473 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1474 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1475 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1476 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1477 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1478 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1479 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1480 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1481 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1482 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1483 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1486 #define LOG_DISAS(...) \
1488 if (MIPS_DEBUG_DISAS) { \
1489 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1493 #define MIPS_INVAL(op) \
1495 if (MIPS_DEBUG_DISAS) { \
1496 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1497 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1498 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1499 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1503 /* General purpose registers moves. */
1504 static inline void gen_load_gpr (TCGv t
, int reg
)
1507 tcg_gen_movi_tl(t
, 0);
1509 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1512 static inline void gen_store_gpr (TCGv t
, int reg
)
1515 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1518 /* Moves to/from shadow registers. */
1519 static inline void gen_load_srsgpr (int from
, int to
)
1521 TCGv t0
= tcg_temp_new();
1524 tcg_gen_movi_tl(t0
, 0);
1526 TCGv_i32 t2
= tcg_temp_new_i32();
1527 TCGv_ptr addr
= tcg_temp_new_ptr();
1529 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1530 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1531 tcg_gen_andi_i32(t2
, t2
, 0xf);
1532 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1533 tcg_gen_ext_i32_ptr(addr
, t2
);
1534 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1536 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1537 tcg_temp_free_ptr(addr
);
1538 tcg_temp_free_i32(t2
);
1540 gen_store_gpr(t0
, to
);
1544 static inline void gen_store_srsgpr (int from
, int to
)
1547 TCGv t0
= tcg_temp_new();
1548 TCGv_i32 t2
= tcg_temp_new_i32();
1549 TCGv_ptr addr
= tcg_temp_new_ptr();
1551 gen_load_gpr(t0
, from
);
1552 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1553 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1554 tcg_gen_andi_i32(t2
, t2
, 0xf);
1555 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1556 tcg_gen_ext_i32_ptr(addr
, t2
);
1557 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1559 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1560 tcg_temp_free_ptr(addr
);
1561 tcg_temp_free_i32(t2
);
1567 static inline void gen_save_pc(target_ulong pc
)
1569 tcg_gen_movi_tl(cpu_PC
, pc
);
1572 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1574 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1575 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1576 gen_save_pc(ctx
->pc
);
1577 ctx
->saved_pc
= ctx
->pc
;
1579 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1580 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1581 ctx
->saved_hflags
= ctx
->hflags
;
1582 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1588 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1594 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1596 ctx
->saved_hflags
= ctx
->hflags
;
1597 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1603 ctx
->btarget
= env
->btarget
;
1608 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1610 TCGv_i32 texcp
= tcg_const_i32(excp
);
1611 TCGv_i32 terr
= tcg_const_i32(err
);
1612 save_cpu_state(ctx
, 1);
1613 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1614 tcg_temp_free_i32(terr
);
1615 tcg_temp_free_i32(texcp
);
1616 ctx
->bstate
= BS_EXCP
;
1619 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1621 gen_helper_0e0i(raise_exception
, excp
);
1624 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1626 generate_exception_err(ctx
, excp
, 0);
1629 /* Floating point register moves. */
1630 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1632 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1633 generate_exception(ctx
, EXCP_RI
);
1635 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1638 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1641 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1642 generate_exception(ctx
, EXCP_RI
);
1644 t64
= tcg_temp_new_i64();
1645 tcg_gen_extu_i32_i64(t64
, t
);
1646 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1647 tcg_temp_free_i64(t64
);
1650 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1652 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1653 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1655 gen_load_fpr32(ctx
, t
, reg
| 1);
1659 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1661 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1662 TCGv_i64 t64
= tcg_temp_new_i64();
1663 tcg_gen_extu_i32_i64(t64
, t
);
1664 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1665 tcg_temp_free_i64(t64
);
1667 gen_store_fpr32(ctx
, t
, reg
| 1);
1671 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1673 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1674 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1676 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1680 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1682 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1683 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1686 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1687 t0
= tcg_temp_new_i64();
1688 tcg_gen_shri_i64(t0
, t
, 32);
1689 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1690 tcg_temp_free_i64(t0
);
1694 static inline int get_fp_bit (int cc
)
1702 /* Addresses computation */
1703 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1705 tcg_gen_add_tl(ret
, arg0
, arg1
);
1707 #if defined(TARGET_MIPS64)
1708 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1709 tcg_gen_ext32s_i64(ret
, ret
);
1714 /* Addresses computation (translation time) */
1715 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1718 target_long sum
= base
+ offset
;
1720 #if defined(TARGET_MIPS64)
1721 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1728 /* Sign-extract the low 32-bits to a target_long. */
1729 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1731 #if defined(TARGET_MIPS64)
1732 tcg_gen_ext32s_i64(ret
, arg
);
1734 tcg_gen_extrl_i64_i32(ret
, arg
);
1738 /* Sign-extract the high 32-bits to a target_long. */
1739 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1741 #if defined(TARGET_MIPS64)
1742 tcg_gen_sari_i64(ret
, arg
, 32);
1744 tcg_gen_extrh_i64_i32(ret
, arg
);
1748 static inline void check_cp0_enabled(DisasContext
*ctx
)
1750 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1751 generate_exception_err(ctx
, EXCP_CpU
, 0);
1754 static inline void check_cp1_enabled(DisasContext
*ctx
)
1756 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1757 generate_exception_err(ctx
, EXCP_CpU
, 1);
1760 /* Verify that the processor is running with COP1X instructions enabled.
1761 This is associated with the nabla symbol in the MIPS32 and MIPS64
1764 static inline void check_cop1x(DisasContext
*ctx
)
1766 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1767 generate_exception_end(ctx
, EXCP_RI
);
1770 /* Verify that the processor is running with 64-bit floating-point
1771 operations enabled. */
1773 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1775 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1776 generate_exception_end(ctx
, EXCP_RI
);
1780 * Verify if floating point register is valid; an operation is not defined
1781 * if bit 0 of any register specification is set and the FR bit in the
1782 * Status register equals zero, since the register numbers specify an
1783 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1784 * in the Status register equals one, both even and odd register numbers
1785 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1787 * Multiple 64 bit wide registers can be checked by calling
1788 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1790 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1792 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1793 generate_exception_end(ctx
, EXCP_RI
);
1796 /* Verify that the processor is running with DSP instructions enabled.
1797 This is enabled by CP0 Status register MX(24) bit.
1800 static inline void check_dsp(DisasContext
*ctx
)
1802 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1803 if (ctx
->insn_flags
& ASE_DSP
) {
1804 generate_exception_end(ctx
, EXCP_DSPDIS
);
1806 generate_exception_end(ctx
, EXCP_RI
);
1811 static inline void check_dspr2(DisasContext
*ctx
)
1813 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1814 if (ctx
->insn_flags
& ASE_DSP
) {
1815 generate_exception_end(ctx
, EXCP_DSPDIS
);
1817 generate_exception_end(ctx
, EXCP_RI
);
1822 /* This code generates a "reserved instruction" exception if the
1823 CPU does not support the instruction set corresponding to flags. */
1824 static inline void check_insn(DisasContext
*ctx
, int flags
)
1826 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1827 generate_exception_end(ctx
, EXCP_RI
);
1831 /* This code generates a "reserved instruction" exception if the
1832 CPU has corresponding flag set which indicates that the instruction
1833 has been removed. */
1834 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1836 if (unlikely(ctx
->insn_flags
& flags
)) {
1837 generate_exception_end(ctx
, EXCP_RI
);
1841 /* This code generates a "reserved instruction" exception if the
1842 CPU does not support 64-bit paired-single (PS) floating point data type */
1843 static inline void check_ps(DisasContext
*ctx
)
1845 if (unlikely(!ctx
->ps
)) {
1846 generate_exception(ctx
, EXCP_RI
);
1848 check_cp1_64bitmode(ctx
);
1851 #ifdef TARGET_MIPS64
1852 /* This code generates a "reserved instruction" exception if 64-bit
1853 instructions are not enabled. */
1854 static inline void check_mips_64(DisasContext
*ctx
)
1856 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1857 generate_exception_end(ctx
, EXCP_RI
);
1861 #ifndef CONFIG_USER_ONLY
1862 static inline void check_mvh(DisasContext
*ctx
)
1864 if (unlikely(!ctx
->mvh
)) {
1865 generate_exception(ctx
, EXCP_RI
);
1870 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1871 calling interface for 32 and 64-bit FPRs. No sense in changing
1872 all callers for gen_load_fpr32 when we need the CTX parameter for
1874 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1875 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1876 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1877 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1878 int ft, int fs, int cc) \
1880 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1881 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1890 check_cp1_registers(ctx, fs | ft); \
1898 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1899 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1901 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1902 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1903 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1904 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1905 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1906 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1907 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1908 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1909 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1910 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1911 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1912 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1913 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1914 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1915 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1916 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1919 tcg_temp_free_i##bits (fp0); \
1920 tcg_temp_free_i##bits (fp1); \
1923 FOP_CONDS(, 0, d
, FMT_D
, 64)
1924 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1925 FOP_CONDS(, 0, s
, FMT_S
, 32)
1926 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1927 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1928 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1931 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1932 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1933 int ft, int fs, int fd) \
1935 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1936 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1937 if (ifmt == FMT_D) { \
1938 check_cp1_registers(ctx, fs | ft | fd); \
1940 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1941 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1944 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1947 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1950 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2007 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2013 tcg_temp_free_i ## bits (fp0); \
2014 tcg_temp_free_i ## bits (fp1); \
2017 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2018 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2020 #undef gen_ldcmp_fpr32
2021 #undef gen_ldcmp_fpr64
2023 /* load/store instructions. */
2024 #ifdef CONFIG_USER_ONLY
2025 #define OP_LD_ATOMIC(insn,fname) \
2026 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2028 TCGv t0 = tcg_temp_new(); \
2029 tcg_gen_mov_tl(t0, arg1); \
2030 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2031 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2032 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2033 tcg_temp_free(t0); \
2036 #define OP_LD_ATOMIC(insn,fname) \
2037 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2039 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2042 OP_LD_ATOMIC(ll
,ld32s
);
2043 #if defined(TARGET_MIPS64)
2044 OP_LD_ATOMIC(lld
,ld64
);
2048 #ifdef CONFIG_USER_ONLY
2049 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2050 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2052 TCGv t0 = tcg_temp_new(); \
2053 TCGLabel *l1 = gen_new_label(); \
2054 TCGLabel *l2 = gen_new_label(); \
2056 tcg_gen_andi_tl(t0, arg2, almask); \
2057 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2058 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2059 generate_exception(ctx, EXCP_AdES); \
2060 gen_set_label(l1); \
2061 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2062 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2063 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2064 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2065 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2066 generate_exception_end(ctx, EXCP_SC); \
2067 gen_set_label(l2); \
2068 tcg_gen_movi_tl(t0, 0); \
2069 gen_store_gpr(t0, rt); \
2070 tcg_temp_free(t0); \
2073 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2074 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2076 TCGv t0 = tcg_temp_new(); \
2077 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2078 gen_store_gpr(t0, rt); \
2079 tcg_temp_free(t0); \
2082 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2083 #if defined(TARGET_MIPS64)
2084 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2088 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2089 int base
, int16_t offset
)
2092 tcg_gen_movi_tl(addr
, offset
);
2093 } else if (offset
== 0) {
2094 gen_load_gpr(addr
, base
);
2096 tcg_gen_movi_tl(addr
, offset
);
2097 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2101 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2103 target_ulong pc
= ctx
->pc
;
2105 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2106 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2111 pc
&= ~(target_ulong
)3;
2116 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2117 int rt
, int base
, int16_t offset
)
2121 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2122 /* Loongson CPU uses a load to zero register for prefetch.
2123 We emulate it as a NOP. On other CPU we must perform the
2124 actual memory access. */
2128 t0
= tcg_temp_new();
2129 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2132 #if defined(TARGET_MIPS64)
2134 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2135 ctx
->default_tcg_memop_mask
);
2136 gen_store_gpr(t0
, rt
);
2139 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2140 ctx
->default_tcg_memop_mask
);
2141 gen_store_gpr(t0
, rt
);
2145 op_ld_lld(t0
, t0
, ctx
);
2146 gen_store_gpr(t0
, rt
);
2149 t1
= tcg_temp_new();
2150 /* Do a byte access to possibly trigger a page
2151 fault with the unaligned address. */
2152 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2153 tcg_gen_andi_tl(t1
, t0
, 7);
2154 #ifndef TARGET_WORDS_BIGENDIAN
2155 tcg_gen_xori_tl(t1
, t1
, 7);
2157 tcg_gen_shli_tl(t1
, t1
, 3);
2158 tcg_gen_andi_tl(t0
, t0
, ~7);
2159 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2160 tcg_gen_shl_tl(t0
, t0
, t1
);
2161 t2
= tcg_const_tl(-1);
2162 tcg_gen_shl_tl(t2
, t2
, t1
);
2163 gen_load_gpr(t1
, rt
);
2164 tcg_gen_andc_tl(t1
, t1
, t2
);
2166 tcg_gen_or_tl(t0
, t0
, t1
);
2168 gen_store_gpr(t0
, rt
);
2171 t1
= tcg_temp_new();
2172 /* Do a byte access to possibly trigger a page
2173 fault with the unaligned address. */
2174 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2175 tcg_gen_andi_tl(t1
, t0
, 7);
2176 #ifdef TARGET_WORDS_BIGENDIAN
2177 tcg_gen_xori_tl(t1
, t1
, 7);
2179 tcg_gen_shli_tl(t1
, t1
, 3);
2180 tcg_gen_andi_tl(t0
, t0
, ~7);
2181 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2182 tcg_gen_shr_tl(t0
, t0
, t1
);
2183 tcg_gen_xori_tl(t1
, t1
, 63);
2184 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2185 tcg_gen_shl_tl(t2
, t2
, t1
);
2186 gen_load_gpr(t1
, rt
);
2187 tcg_gen_and_tl(t1
, t1
, t2
);
2189 tcg_gen_or_tl(t0
, t0
, t1
);
2191 gen_store_gpr(t0
, rt
);
2194 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2195 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2197 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2198 gen_store_gpr(t0
, rt
);
2202 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2203 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2205 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2206 gen_store_gpr(t0
, rt
);
2209 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2210 ctx
->default_tcg_memop_mask
);
2211 gen_store_gpr(t0
, rt
);
2214 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2215 ctx
->default_tcg_memop_mask
);
2216 gen_store_gpr(t0
, rt
);
2219 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2220 ctx
->default_tcg_memop_mask
);
2221 gen_store_gpr(t0
, rt
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2225 gen_store_gpr(t0
, rt
);
2228 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2229 gen_store_gpr(t0
, rt
);
2232 t1
= tcg_temp_new();
2233 /* Do a byte access to possibly trigger a page
2234 fault with the unaligned address. */
2235 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2236 tcg_gen_andi_tl(t1
, t0
, 3);
2237 #ifndef TARGET_WORDS_BIGENDIAN
2238 tcg_gen_xori_tl(t1
, t1
, 3);
2240 tcg_gen_shli_tl(t1
, t1
, 3);
2241 tcg_gen_andi_tl(t0
, t0
, ~3);
2242 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2243 tcg_gen_shl_tl(t0
, t0
, t1
);
2244 t2
= tcg_const_tl(-1);
2245 tcg_gen_shl_tl(t2
, t2
, t1
);
2246 gen_load_gpr(t1
, rt
);
2247 tcg_gen_andc_tl(t1
, t1
, t2
);
2249 tcg_gen_or_tl(t0
, t0
, t1
);
2251 tcg_gen_ext32s_tl(t0
, t0
);
2252 gen_store_gpr(t0
, rt
);
2255 t1
= tcg_temp_new();
2256 /* Do a byte access to possibly trigger a page
2257 fault with the unaligned address. */
2258 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2259 tcg_gen_andi_tl(t1
, t0
, 3);
2260 #ifdef TARGET_WORDS_BIGENDIAN
2261 tcg_gen_xori_tl(t1
, t1
, 3);
2263 tcg_gen_shli_tl(t1
, t1
, 3);
2264 tcg_gen_andi_tl(t0
, t0
, ~3);
2265 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2266 tcg_gen_shr_tl(t0
, t0
, t1
);
2267 tcg_gen_xori_tl(t1
, t1
, 31);
2268 t2
= tcg_const_tl(0xfffffffeull
);
2269 tcg_gen_shl_tl(t2
, t2
, t1
);
2270 gen_load_gpr(t1
, rt
);
2271 tcg_gen_and_tl(t1
, t1
, t2
);
2273 tcg_gen_or_tl(t0
, t0
, t1
);
2275 tcg_gen_ext32s_tl(t0
, t0
);
2276 gen_store_gpr(t0
, rt
);
2280 op_ld_ll(t0
, t0
, ctx
);
2281 gen_store_gpr(t0
, rt
);
2288 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2289 int base
, int16_t offset
)
2291 TCGv t0
= tcg_temp_new();
2292 TCGv t1
= tcg_temp_new();
2294 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2295 gen_load_gpr(t1
, rt
);
2297 #if defined(TARGET_MIPS64)
2299 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2300 ctx
->default_tcg_memop_mask
);
2303 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2306 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2310 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2311 ctx
->default_tcg_memop_mask
);
2314 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2315 ctx
->default_tcg_memop_mask
);
2318 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2321 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2324 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2332 /* Store conditional */
2333 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2334 int base
, int16_t offset
)
2338 #ifdef CONFIG_USER_ONLY
2339 t0
= tcg_temp_local_new();
2340 t1
= tcg_temp_local_new();
2342 t0
= tcg_temp_new();
2343 t1
= tcg_temp_new();
2345 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2346 gen_load_gpr(t1
, rt
);
2348 #if defined(TARGET_MIPS64)
2351 op_st_scd(t1
, t0
, rt
, ctx
);
2356 op_st_sc(t1
, t0
, rt
, ctx
);
2363 /* Load and store */
2364 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2365 int base
, int16_t offset
)
2367 TCGv t0
= tcg_temp_new();
2369 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2370 /* Don't do NOP if destination is zero: we must perform the actual
2375 TCGv_i32 fp0
= tcg_temp_new_i32();
2376 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2377 ctx
->default_tcg_memop_mask
);
2378 gen_store_fpr32(ctx
, fp0
, ft
);
2379 tcg_temp_free_i32(fp0
);
2384 TCGv_i32 fp0
= tcg_temp_new_i32();
2385 gen_load_fpr32(ctx
, fp0
, ft
);
2386 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2387 ctx
->default_tcg_memop_mask
);
2388 tcg_temp_free_i32(fp0
);
2393 TCGv_i64 fp0
= tcg_temp_new_i64();
2394 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2395 ctx
->default_tcg_memop_mask
);
2396 gen_store_fpr64(ctx
, fp0
, ft
);
2397 tcg_temp_free_i64(fp0
);
2402 TCGv_i64 fp0
= tcg_temp_new_i64();
2403 gen_load_fpr64(ctx
, fp0
, ft
);
2404 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2405 ctx
->default_tcg_memop_mask
);
2406 tcg_temp_free_i64(fp0
);
2410 MIPS_INVAL("flt_ldst");
2411 generate_exception_end(ctx
, EXCP_RI
);
2418 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2419 int rs
, int16_t imm
)
2421 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2422 check_cp1_enabled(ctx
);
2426 check_insn(ctx
, ISA_MIPS2
);
2429 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2432 generate_exception_err(ctx
, EXCP_CpU
, 1);
2436 /* Arithmetic with immediate operand */
2437 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2438 int rt
, int rs
, int16_t imm
)
2440 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2442 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2443 /* If no destination, treat it as a NOP.
2444 For addi, we must generate the overflow exception when needed. */
2450 TCGv t0
= tcg_temp_local_new();
2451 TCGv t1
= tcg_temp_new();
2452 TCGv t2
= tcg_temp_new();
2453 TCGLabel
*l1
= gen_new_label();
2455 gen_load_gpr(t1
, rs
);
2456 tcg_gen_addi_tl(t0
, t1
, uimm
);
2457 tcg_gen_ext32s_tl(t0
, t0
);
2459 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2460 tcg_gen_xori_tl(t2
, t0
, uimm
);
2461 tcg_gen_and_tl(t1
, t1
, t2
);
2463 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2465 /* operands of same sign, result different sign */
2466 generate_exception(ctx
, EXCP_OVERFLOW
);
2468 tcg_gen_ext32s_tl(t0
, t0
);
2469 gen_store_gpr(t0
, rt
);
2475 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2476 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2478 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2481 #if defined(TARGET_MIPS64)
2484 TCGv t0
= tcg_temp_local_new();
2485 TCGv t1
= tcg_temp_new();
2486 TCGv t2
= tcg_temp_new();
2487 TCGLabel
*l1
= gen_new_label();
2489 gen_load_gpr(t1
, rs
);
2490 tcg_gen_addi_tl(t0
, t1
, uimm
);
2492 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2493 tcg_gen_xori_tl(t2
, t0
, uimm
);
2494 tcg_gen_and_tl(t1
, t1
, t2
);
2496 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2498 /* operands of same sign, result different sign */
2499 generate_exception(ctx
, EXCP_OVERFLOW
);
2501 gen_store_gpr(t0
, rt
);
2507 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2509 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2516 /* Logic with immediate operand */
2517 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2518 int rt
, int rs
, int16_t imm
)
2523 /* If no destination, treat it as a NOP. */
2526 uimm
= (uint16_t)imm
;
2529 if (likely(rs
!= 0))
2530 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2532 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2536 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2541 if (likely(rs
!= 0))
2542 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2544 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2547 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2549 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2550 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2552 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2561 /* Set on less than with immediate operand */
2562 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2563 int rt
, int rs
, int16_t imm
)
2565 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2569 /* If no destination, treat it as a NOP. */
2572 t0
= tcg_temp_new();
2573 gen_load_gpr(t0
, rs
);
2576 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2579 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2585 /* Shifts with immediate operand */
2586 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2587 int rt
, int rs
, int16_t imm
)
2589 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2593 /* If no destination, treat it as a NOP. */
2597 t0
= tcg_temp_new();
2598 gen_load_gpr(t0
, rs
);
2601 tcg_gen_shli_tl(t0
, t0
, uimm
);
2602 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2605 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2609 tcg_gen_ext32u_tl(t0
, t0
);
2610 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2612 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2617 TCGv_i32 t1
= tcg_temp_new_i32();
2619 tcg_gen_trunc_tl_i32(t1
, t0
);
2620 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2621 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2622 tcg_temp_free_i32(t1
);
2624 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2627 #if defined(TARGET_MIPS64)
2629 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2632 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2639 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2645 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2648 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2651 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2654 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2662 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2663 int rd
, int rs
, int rt
)
2665 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2666 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2667 /* If no destination, treat it as a NOP.
2668 For add & sub, we must generate the overflow exception when needed. */
2675 TCGv t0
= tcg_temp_local_new();
2676 TCGv t1
= tcg_temp_new();
2677 TCGv t2
= tcg_temp_new();
2678 TCGLabel
*l1
= gen_new_label();
2680 gen_load_gpr(t1
, rs
);
2681 gen_load_gpr(t2
, rt
);
2682 tcg_gen_add_tl(t0
, t1
, t2
);
2683 tcg_gen_ext32s_tl(t0
, t0
);
2684 tcg_gen_xor_tl(t1
, t1
, t2
);
2685 tcg_gen_xor_tl(t2
, t0
, t2
);
2686 tcg_gen_andc_tl(t1
, t2
, t1
);
2688 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2690 /* operands of same sign, result different sign */
2691 generate_exception(ctx
, EXCP_OVERFLOW
);
2693 gen_store_gpr(t0
, rd
);
2698 if (rs
!= 0 && rt
!= 0) {
2699 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2700 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2701 } else if (rs
== 0 && rt
!= 0) {
2702 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2703 } else if (rs
!= 0 && rt
== 0) {
2704 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2706 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2711 TCGv t0
= tcg_temp_local_new();
2712 TCGv t1
= tcg_temp_new();
2713 TCGv t2
= tcg_temp_new();
2714 TCGLabel
*l1
= gen_new_label();
2716 gen_load_gpr(t1
, rs
);
2717 gen_load_gpr(t2
, rt
);
2718 tcg_gen_sub_tl(t0
, t1
, t2
);
2719 tcg_gen_ext32s_tl(t0
, t0
);
2720 tcg_gen_xor_tl(t2
, t1
, t2
);
2721 tcg_gen_xor_tl(t1
, t0
, t1
);
2722 tcg_gen_and_tl(t1
, t1
, t2
);
2724 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2726 /* operands of different sign, first operand and result different sign */
2727 generate_exception(ctx
, EXCP_OVERFLOW
);
2729 gen_store_gpr(t0
, rd
);
2734 if (rs
!= 0 && rt
!= 0) {
2735 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2736 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2737 } else if (rs
== 0 && rt
!= 0) {
2738 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2739 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2740 } else if (rs
!= 0 && rt
== 0) {
2741 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2743 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2746 #if defined(TARGET_MIPS64)
2749 TCGv t0
= tcg_temp_local_new();
2750 TCGv t1
= tcg_temp_new();
2751 TCGv t2
= tcg_temp_new();
2752 TCGLabel
*l1
= gen_new_label();
2754 gen_load_gpr(t1
, rs
);
2755 gen_load_gpr(t2
, rt
);
2756 tcg_gen_add_tl(t0
, t1
, t2
);
2757 tcg_gen_xor_tl(t1
, t1
, t2
);
2758 tcg_gen_xor_tl(t2
, t0
, t2
);
2759 tcg_gen_andc_tl(t1
, t2
, t1
);
2761 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2763 /* operands of same sign, result different sign */
2764 generate_exception(ctx
, EXCP_OVERFLOW
);
2766 gen_store_gpr(t0
, rd
);
2771 if (rs
!= 0 && rt
!= 0) {
2772 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2773 } else if (rs
== 0 && rt
!= 0) {
2774 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2775 } else if (rs
!= 0 && rt
== 0) {
2776 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2778 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2783 TCGv t0
= tcg_temp_local_new();
2784 TCGv t1
= tcg_temp_new();
2785 TCGv t2
= tcg_temp_new();
2786 TCGLabel
*l1
= gen_new_label();
2788 gen_load_gpr(t1
, rs
);
2789 gen_load_gpr(t2
, rt
);
2790 tcg_gen_sub_tl(t0
, t1
, t2
);
2791 tcg_gen_xor_tl(t2
, t1
, t2
);
2792 tcg_gen_xor_tl(t1
, t0
, t1
);
2793 tcg_gen_and_tl(t1
, t1
, t2
);
2795 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2797 /* operands of different sign, first operand and result different sign */
2798 generate_exception(ctx
, EXCP_OVERFLOW
);
2800 gen_store_gpr(t0
, rd
);
2805 if (rs
!= 0 && rt
!= 0) {
2806 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2807 } else if (rs
== 0 && rt
!= 0) {
2808 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2809 } else if (rs
!= 0 && rt
== 0) {
2810 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2812 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2817 if (likely(rs
!= 0 && rt
!= 0)) {
2818 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2819 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2827 /* Conditional move */
2828 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2829 int rd
, int rs
, int rt
)
2834 /* If no destination, treat it as a NOP. */
2838 t0
= tcg_temp_new();
2839 gen_load_gpr(t0
, rt
);
2840 t1
= tcg_const_tl(0);
2841 t2
= tcg_temp_new();
2842 gen_load_gpr(t2
, rs
);
2845 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2848 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2851 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2854 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2863 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2864 int rd
, int rs
, int rt
)
2867 /* If no destination, treat it as a NOP. */
2873 if (likely(rs
!= 0 && rt
!= 0)) {
2874 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2876 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2880 if (rs
!= 0 && rt
!= 0) {
2881 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 } else if (rs
== 0 && rt
!= 0) {
2883 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2884 } else if (rs
!= 0 && rt
== 0) {
2885 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2887 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2891 if (likely(rs
!= 0 && rt
!= 0)) {
2892 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2893 } else if (rs
== 0 && rt
!= 0) {
2894 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2895 } else if (rs
!= 0 && rt
== 0) {
2896 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2898 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2902 if (likely(rs
!= 0 && rt
!= 0)) {
2903 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2904 } else if (rs
== 0 && rt
!= 0) {
2905 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2906 } else if (rs
!= 0 && rt
== 0) {
2907 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2909 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2915 /* Set on lower than */
2916 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2917 int rd
, int rs
, int rt
)
2922 /* If no destination, treat it as a NOP. */
2926 t0
= tcg_temp_new();
2927 t1
= tcg_temp_new();
2928 gen_load_gpr(t0
, rs
);
2929 gen_load_gpr(t1
, rt
);
2932 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2935 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2943 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2944 int rd
, int rs
, int rt
)
2949 /* If no destination, treat it as a NOP.
2950 For add & sub, we must generate the overflow exception when needed. */
2954 t0
= tcg_temp_new();
2955 t1
= tcg_temp_new();
2956 gen_load_gpr(t0
, rs
);
2957 gen_load_gpr(t1
, rt
);
2960 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2961 tcg_gen_shl_tl(t0
, t1
, t0
);
2962 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2969 tcg_gen_ext32u_tl(t1
, t1
);
2970 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2971 tcg_gen_shr_tl(t0
, t1
, t0
);
2972 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2976 TCGv_i32 t2
= tcg_temp_new_i32();
2977 TCGv_i32 t3
= tcg_temp_new_i32();
2979 tcg_gen_trunc_tl_i32(t2
, t0
);
2980 tcg_gen_trunc_tl_i32(t3
, t1
);
2981 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2982 tcg_gen_rotr_i32(t2
, t3
, t2
);
2983 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2984 tcg_temp_free_i32(t2
);
2985 tcg_temp_free_i32(t3
);
2988 #if defined(TARGET_MIPS64)
2990 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2991 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2994 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2995 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2998 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2999 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3002 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3003 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3011 /* Arithmetic on HI/LO registers */
3012 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3014 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3025 #if defined(TARGET_MIPS64)
3027 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3031 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3035 #if defined(TARGET_MIPS64)
3037 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3041 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3046 #if defined(TARGET_MIPS64)
3048 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3052 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3055 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3060 #if defined(TARGET_MIPS64)
3062 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3066 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3069 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3075 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3078 TCGv t0
= tcg_const_tl(addr
);
3079 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3080 gen_store_gpr(t0
, reg
);
3084 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3090 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3093 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3094 addr
= addr_add(ctx
, pc
, offset
);
3095 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3099 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3100 addr
= addr_add(ctx
, pc
, offset
);
3101 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3103 #if defined(TARGET_MIPS64)
3106 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3107 addr
= addr_add(ctx
, pc
, offset
);
3108 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3112 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3115 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3116 addr
= addr_add(ctx
, pc
, offset
);
3117 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3122 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3123 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3124 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3127 #if defined(TARGET_MIPS64)
3128 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3129 case R6_OPC_LDPC
+ (1 << 16):
3130 case R6_OPC_LDPC
+ (2 << 16):
3131 case R6_OPC_LDPC
+ (3 << 16):
3133 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3134 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3135 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3139 MIPS_INVAL("OPC_PCREL");
3140 generate_exception_end(ctx
, EXCP_RI
);
3147 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3156 t0
= tcg_temp_new();
3157 t1
= tcg_temp_new();
3159 gen_load_gpr(t0
, rs
);
3160 gen_load_gpr(t1
, rt
);
3165 TCGv t2
= tcg_temp_new();
3166 TCGv t3
= tcg_temp_new();
3167 tcg_gen_ext32s_tl(t0
, t0
);
3168 tcg_gen_ext32s_tl(t1
, t1
);
3169 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3170 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3171 tcg_gen_and_tl(t2
, t2
, t3
);
3172 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3173 tcg_gen_or_tl(t2
, t2
, t3
);
3174 tcg_gen_movi_tl(t3
, 0);
3175 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3176 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3177 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3184 TCGv t2
= tcg_temp_new();
3185 TCGv t3
= tcg_temp_new();
3186 tcg_gen_ext32s_tl(t0
, t0
);
3187 tcg_gen_ext32s_tl(t1
, t1
);
3188 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3189 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3190 tcg_gen_and_tl(t2
, t2
, t3
);
3191 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3192 tcg_gen_or_tl(t2
, t2
, t3
);
3193 tcg_gen_movi_tl(t3
, 0);
3194 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3195 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3196 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3203 TCGv t2
= tcg_const_tl(0);
3204 TCGv t3
= tcg_const_tl(1);
3205 tcg_gen_ext32u_tl(t0
, t0
);
3206 tcg_gen_ext32u_tl(t1
, t1
);
3207 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3208 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3209 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3216 TCGv t2
= tcg_const_tl(0);
3217 TCGv t3
= tcg_const_tl(1);
3218 tcg_gen_ext32u_tl(t0
, t0
);
3219 tcg_gen_ext32u_tl(t1
, t1
);
3220 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3221 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3222 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3229 TCGv_i32 t2
= tcg_temp_new_i32();
3230 TCGv_i32 t3
= tcg_temp_new_i32();
3231 tcg_gen_trunc_tl_i32(t2
, t0
);
3232 tcg_gen_trunc_tl_i32(t3
, t1
);
3233 tcg_gen_mul_i32(t2
, t2
, t3
);
3234 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3235 tcg_temp_free_i32(t2
);
3236 tcg_temp_free_i32(t3
);
3241 TCGv_i32 t2
= tcg_temp_new_i32();
3242 TCGv_i32 t3
= tcg_temp_new_i32();
3243 tcg_gen_trunc_tl_i32(t2
, t0
);
3244 tcg_gen_trunc_tl_i32(t3
, t1
);
3245 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3246 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3247 tcg_temp_free_i32(t2
);
3248 tcg_temp_free_i32(t3
);
3253 TCGv_i32 t2
= tcg_temp_new_i32();
3254 TCGv_i32 t3
= tcg_temp_new_i32();
3255 tcg_gen_trunc_tl_i32(t2
, t0
);
3256 tcg_gen_trunc_tl_i32(t3
, t1
);
3257 tcg_gen_mul_i32(t2
, t2
, t3
);
3258 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3259 tcg_temp_free_i32(t2
);
3260 tcg_temp_free_i32(t3
);
3265 TCGv_i32 t2
= tcg_temp_new_i32();
3266 TCGv_i32 t3
= tcg_temp_new_i32();
3267 tcg_gen_trunc_tl_i32(t2
, t0
);
3268 tcg_gen_trunc_tl_i32(t3
, t1
);
3269 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3270 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3271 tcg_temp_free_i32(t2
);
3272 tcg_temp_free_i32(t3
);
3275 #if defined(TARGET_MIPS64)
3278 TCGv t2
= tcg_temp_new();
3279 TCGv t3
= tcg_temp_new();
3280 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3281 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3282 tcg_gen_and_tl(t2
, t2
, t3
);
3283 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3284 tcg_gen_or_tl(t2
, t2
, t3
);
3285 tcg_gen_movi_tl(t3
, 0);
3286 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3287 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3294 TCGv t2
= tcg_temp_new();
3295 TCGv t3
= tcg_temp_new();
3296 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3297 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3298 tcg_gen_and_tl(t2
, t2
, t3
);
3299 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3300 tcg_gen_or_tl(t2
, t2
, t3
);
3301 tcg_gen_movi_tl(t3
, 0);
3302 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3303 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3310 TCGv t2
= tcg_const_tl(0);
3311 TCGv t3
= tcg_const_tl(1);
3312 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3313 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3320 TCGv t2
= tcg_const_tl(0);
3321 TCGv t3
= tcg_const_tl(1);
3322 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3323 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3329 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3333 TCGv t2
= tcg_temp_new();
3334 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3339 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3343 TCGv t2
= tcg_temp_new();
3344 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3350 MIPS_INVAL("r6 mul/div");
3351 generate_exception_end(ctx
, EXCP_RI
);
3359 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3360 int acc
, int rs
, int rt
)
3364 t0
= tcg_temp_new();
3365 t1
= tcg_temp_new();
3367 gen_load_gpr(t0
, rs
);
3368 gen_load_gpr(t1
, rt
);
3377 TCGv t2
= tcg_temp_new();
3378 TCGv t3
= tcg_temp_new();
3379 tcg_gen_ext32s_tl(t0
, t0
);
3380 tcg_gen_ext32s_tl(t1
, t1
);
3381 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3382 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3383 tcg_gen_and_tl(t2
, t2
, t3
);
3384 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3385 tcg_gen_or_tl(t2
, t2
, t3
);
3386 tcg_gen_movi_tl(t3
, 0);
3387 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3388 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3389 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3390 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3391 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3398 TCGv t2
= tcg_const_tl(0);
3399 TCGv t3
= tcg_const_tl(1);
3400 tcg_gen_ext32u_tl(t0
, t0
);
3401 tcg_gen_ext32u_tl(t1
, t1
);
3402 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3403 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3404 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3405 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3406 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3413 TCGv_i32 t2
= tcg_temp_new_i32();
3414 TCGv_i32 t3
= tcg_temp_new_i32();
3415 tcg_gen_trunc_tl_i32(t2
, t0
);
3416 tcg_gen_trunc_tl_i32(t3
, t1
);
3417 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3418 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3419 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3420 tcg_temp_free_i32(t2
);
3421 tcg_temp_free_i32(t3
);
3426 TCGv_i32 t2
= tcg_temp_new_i32();
3427 TCGv_i32 t3
= tcg_temp_new_i32();
3428 tcg_gen_trunc_tl_i32(t2
, t0
);
3429 tcg_gen_trunc_tl_i32(t3
, t1
);
3430 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3431 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3432 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3433 tcg_temp_free_i32(t2
);
3434 tcg_temp_free_i32(t3
);
3437 #if defined(TARGET_MIPS64)
3440 TCGv t2
= tcg_temp_new();
3441 TCGv t3
= tcg_temp_new();
3442 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3443 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3444 tcg_gen_and_tl(t2
, t2
, t3
);
3445 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3446 tcg_gen_or_tl(t2
, t2
, t3
);
3447 tcg_gen_movi_tl(t3
, 0);
3448 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3449 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3450 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3457 TCGv t2
= tcg_const_tl(0);
3458 TCGv t3
= tcg_const_tl(1);
3459 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3460 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3461 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3467 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3470 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3475 TCGv_i64 t2
= tcg_temp_new_i64();
3476 TCGv_i64 t3
= tcg_temp_new_i64();
3478 tcg_gen_ext_tl_i64(t2
, t0
);
3479 tcg_gen_ext_tl_i64(t3
, t1
);
3480 tcg_gen_mul_i64(t2
, t2
, t3
);
3481 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3482 tcg_gen_add_i64(t2
, t2
, t3
);
3483 tcg_temp_free_i64(t3
);
3484 gen_move_low32(cpu_LO
[acc
], t2
);
3485 gen_move_high32(cpu_HI
[acc
], t2
);
3486 tcg_temp_free_i64(t2
);
3491 TCGv_i64 t2
= tcg_temp_new_i64();
3492 TCGv_i64 t3
= tcg_temp_new_i64();
3494 tcg_gen_ext32u_tl(t0
, t0
);
3495 tcg_gen_ext32u_tl(t1
, t1
);
3496 tcg_gen_extu_tl_i64(t2
, t0
);
3497 tcg_gen_extu_tl_i64(t3
, t1
);
3498 tcg_gen_mul_i64(t2
, t2
, t3
);
3499 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3500 tcg_gen_add_i64(t2
, t2
, t3
);
3501 tcg_temp_free_i64(t3
);
3502 gen_move_low32(cpu_LO
[acc
], t2
);
3503 gen_move_high32(cpu_HI
[acc
], t2
);
3504 tcg_temp_free_i64(t2
);
3509 TCGv_i64 t2
= tcg_temp_new_i64();
3510 TCGv_i64 t3
= tcg_temp_new_i64();
3512 tcg_gen_ext_tl_i64(t2
, t0
);
3513 tcg_gen_ext_tl_i64(t3
, t1
);
3514 tcg_gen_mul_i64(t2
, t2
, t3
);
3515 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3516 tcg_gen_sub_i64(t2
, t3
, t2
);
3517 tcg_temp_free_i64(t3
);
3518 gen_move_low32(cpu_LO
[acc
], t2
);
3519 gen_move_high32(cpu_HI
[acc
], t2
);
3520 tcg_temp_free_i64(t2
);
3525 TCGv_i64 t2
= tcg_temp_new_i64();
3526 TCGv_i64 t3
= tcg_temp_new_i64();
3528 tcg_gen_ext32u_tl(t0
, t0
);
3529 tcg_gen_ext32u_tl(t1
, t1
);
3530 tcg_gen_extu_tl_i64(t2
, t0
);
3531 tcg_gen_extu_tl_i64(t3
, t1
);
3532 tcg_gen_mul_i64(t2
, t2
, t3
);
3533 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3534 tcg_gen_sub_i64(t2
, t3
, t2
);
3535 tcg_temp_free_i64(t3
);
3536 gen_move_low32(cpu_LO
[acc
], t2
);
3537 gen_move_high32(cpu_HI
[acc
], t2
);
3538 tcg_temp_free_i64(t2
);
3542 MIPS_INVAL("mul/div");
3543 generate_exception_end(ctx
, EXCP_RI
);
3551 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3552 int rd
, int rs
, int rt
)
3554 TCGv t0
= tcg_temp_new();
3555 TCGv t1
= tcg_temp_new();
3557 gen_load_gpr(t0
, rs
);
3558 gen_load_gpr(t1
, rt
);
3561 case OPC_VR54XX_MULS
:
3562 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3564 case OPC_VR54XX_MULSU
:
3565 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MACC
:
3568 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MACCU
:
3571 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MSAC
:
3574 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MSACU
:
3577 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MULHI
:
3580 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULHIU
:
3583 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULSHI
:
3586 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MULSHIU
:
3589 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MACCHI
:
3592 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MACCHIU
:
3595 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MSACHI
:
3598 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3600 case OPC_VR54XX_MSACHIU
:
3601 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3604 MIPS_INVAL("mul vr54xx");
3605 generate_exception_end(ctx
, EXCP_RI
);
3608 gen_store_gpr(t0
, rd
);
3615 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3624 t0
= tcg_temp_new();
3625 gen_load_gpr(t0
, rs
);
3629 gen_helper_clo(cpu_gpr
[rd
], t0
);
3633 gen_helper_clz(cpu_gpr
[rd
], t0
);
3635 #if defined(TARGET_MIPS64)
3638 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3642 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3649 /* Godson integer instructions */
3650 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3651 int rd
, int rs
, int rt
)
3663 case OPC_MULTU_G_2E
:
3664 case OPC_MULTU_G_2F
:
3665 #if defined(TARGET_MIPS64)
3666 case OPC_DMULT_G_2E
:
3667 case OPC_DMULT_G_2F
:
3668 case OPC_DMULTU_G_2E
:
3669 case OPC_DMULTU_G_2F
:
3671 t0
= tcg_temp_new();
3672 t1
= tcg_temp_new();
3675 t0
= tcg_temp_local_new();
3676 t1
= tcg_temp_local_new();
3680 gen_load_gpr(t0
, rs
);
3681 gen_load_gpr(t1
, rt
);
3686 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3687 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3689 case OPC_MULTU_G_2E
:
3690 case OPC_MULTU_G_2F
:
3691 tcg_gen_ext32u_tl(t0
, t0
);
3692 tcg_gen_ext32u_tl(t1
, t1
);
3693 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3694 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3699 TCGLabel
*l1
= gen_new_label();
3700 TCGLabel
*l2
= gen_new_label();
3701 TCGLabel
*l3
= gen_new_label();
3702 tcg_gen_ext32s_tl(t0
, t0
);
3703 tcg_gen_ext32s_tl(t1
, t1
);
3704 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3705 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3708 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3710 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3713 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3714 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3721 TCGLabel
*l1
= gen_new_label();
3722 TCGLabel
*l2
= gen_new_label();
3723 tcg_gen_ext32u_tl(t0
, t0
);
3724 tcg_gen_ext32u_tl(t1
, t1
);
3725 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3726 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3729 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3730 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3737 TCGLabel
*l1
= gen_new_label();
3738 TCGLabel
*l2
= gen_new_label();
3739 TCGLabel
*l3
= gen_new_label();
3740 tcg_gen_ext32u_tl(t0
, t0
);
3741 tcg_gen_ext32u_tl(t1
, t1
);
3742 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3743 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3744 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3746 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3749 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3750 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3757 TCGLabel
*l1
= gen_new_label();
3758 TCGLabel
*l2
= gen_new_label();
3759 tcg_gen_ext32u_tl(t0
, t0
);
3760 tcg_gen_ext32u_tl(t1
, t1
);
3761 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3762 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3765 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3766 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3770 #if defined(TARGET_MIPS64)
3771 case OPC_DMULT_G_2E
:
3772 case OPC_DMULT_G_2F
:
3773 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3775 case OPC_DMULTU_G_2E
:
3776 case OPC_DMULTU_G_2F
:
3777 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3782 TCGLabel
*l1
= gen_new_label();
3783 TCGLabel
*l2
= gen_new_label();
3784 TCGLabel
*l3
= gen_new_label();
3785 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3786 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3789 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3791 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3794 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3798 case OPC_DDIVU_G_2E
:
3799 case OPC_DDIVU_G_2F
:
3801 TCGLabel
*l1
= gen_new_label();
3802 TCGLabel
*l2
= gen_new_label();
3803 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3804 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3807 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3814 TCGLabel
*l1
= gen_new_label();
3815 TCGLabel
*l2
= gen_new_label();
3816 TCGLabel
*l3
= gen_new_label();
3817 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3818 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3819 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3824 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3828 case OPC_DMODU_G_2E
:
3829 case OPC_DMODU_G_2F
:
3831 TCGLabel
*l1
= gen_new_label();
3832 TCGLabel
*l2
= gen_new_label();
3833 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3834 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3837 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3848 /* Loongson multimedia instructions */
3849 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3851 uint32_t opc
, shift_max
;
3854 opc
= MASK_LMI(ctx
->opcode
);
3860 t0
= tcg_temp_local_new_i64();
3861 t1
= tcg_temp_local_new_i64();
3864 t0
= tcg_temp_new_i64();
3865 t1
= tcg_temp_new_i64();
3869 gen_load_fpr64(ctx
, t0
, rs
);
3870 gen_load_fpr64(ctx
, t1
, rt
);
3872 #define LMI_HELPER(UP, LO) \
3873 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3874 #define LMI_HELPER_1(UP, LO) \
3875 case OPC_##UP: gen_helper_##LO(t0, t0); break
3876 #define LMI_DIRECT(UP, LO, OP) \
3877 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3880 LMI_HELPER(PADDSH
, paddsh
);
3881 LMI_HELPER(PADDUSH
, paddush
);
3882 LMI_HELPER(PADDH
, paddh
);
3883 LMI_HELPER(PADDW
, paddw
);
3884 LMI_HELPER(PADDSB
, paddsb
);
3885 LMI_HELPER(PADDUSB
, paddusb
);
3886 LMI_HELPER(PADDB
, paddb
);
3888 LMI_HELPER(PSUBSH
, psubsh
);
3889 LMI_HELPER(PSUBUSH
, psubush
);
3890 LMI_HELPER(PSUBH
, psubh
);
3891 LMI_HELPER(PSUBW
, psubw
);
3892 LMI_HELPER(PSUBSB
, psubsb
);
3893 LMI_HELPER(PSUBUSB
, psubusb
);
3894 LMI_HELPER(PSUBB
, psubb
);
3896 LMI_HELPER(PSHUFH
, pshufh
);
3897 LMI_HELPER(PACKSSWH
, packsswh
);
3898 LMI_HELPER(PACKSSHB
, packsshb
);
3899 LMI_HELPER(PACKUSHB
, packushb
);
3901 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3902 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3903 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3904 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3905 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3906 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3908 LMI_HELPER(PAVGH
, pavgh
);
3909 LMI_HELPER(PAVGB
, pavgb
);
3910 LMI_HELPER(PMAXSH
, pmaxsh
);
3911 LMI_HELPER(PMINSH
, pminsh
);
3912 LMI_HELPER(PMAXUB
, pmaxub
);
3913 LMI_HELPER(PMINUB
, pminub
);
3915 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3916 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3917 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3918 LMI_HELPER(PCMPGTH
, pcmpgth
);
3919 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3920 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3922 LMI_HELPER(PSLLW
, psllw
);
3923 LMI_HELPER(PSLLH
, psllh
);
3924 LMI_HELPER(PSRLW
, psrlw
);
3925 LMI_HELPER(PSRLH
, psrlh
);
3926 LMI_HELPER(PSRAW
, psraw
);
3927 LMI_HELPER(PSRAH
, psrah
);
3929 LMI_HELPER(PMULLH
, pmullh
);
3930 LMI_HELPER(PMULHH
, pmulhh
);
3931 LMI_HELPER(PMULHUH
, pmulhuh
);
3932 LMI_HELPER(PMADDHW
, pmaddhw
);
3934 LMI_HELPER(PASUBUB
, pasubub
);
3935 LMI_HELPER_1(BIADD
, biadd
);
3936 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3938 LMI_DIRECT(PADDD
, paddd
, add
);
3939 LMI_DIRECT(PSUBD
, psubd
, sub
);
3940 LMI_DIRECT(XOR_CP2
, xor, xor);
3941 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3942 LMI_DIRECT(AND_CP2
, and, and);
3943 LMI_DIRECT(PANDN
, pandn
, andc
);
3944 LMI_DIRECT(OR
, or, or);
3947 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3953 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3956 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3960 tcg_gen_andi_i64(t1
, t1
, 3);
3961 tcg_gen_shli_i64(t1
, t1
, 4);
3962 tcg_gen_shr_i64(t0
, t0
, t1
);
3963 tcg_gen_ext16u_i64(t0
, t0
);
3967 tcg_gen_add_i64(t0
, t0
, t1
);
3968 tcg_gen_ext32s_i64(t0
, t0
);
3971 tcg_gen_sub_i64(t0
, t0
, t1
);
3972 tcg_gen_ext32s_i64(t0
, t0
);
3994 /* Make sure shift count isn't TCG undefined behaviour. */
3995 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4000 tcg_gen_shl_i64(t0
, t0
, t1
);
4004 /* Since SRA is UndefinedResult without sign-extended inputs,
4005 we can treat SRA and DSRA the same. */
4006 tcg_gen_sar_i64(t0
, t0
, t1
);
4009 /* We want to shift in zeros for SRL; zero-extend first. */
4010 tcg_gen_ext32u_i64(t0
, t0
);
4013 tcg_gen_shr_i64(t0
, t0
, t1
);
4017 if (shift_max
== 32) {
4018 tcg_gen_ext32s_i64(t0
, t0
);
4021 /* Shifts larger than MAX produce zero. */
4022 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4023 tcg_gen_neg_i64(t1
, t1
);
4024 tcg_gen_and_i64(t0
, t0
, t1
);
4030 TCGv_i64 t2
= tcg_temp_new_i64();
4031 TCGLabel
*lab
= gen_new_label();
4033 tcg_gen_mov_i64(t2
, t0
);
4034 tcg_gen_add_i64(t0
, t1
, t2
);
4035 if (opc
== OPC_ADD_CP2
) {
4036 tcg_gen_ext32s_i64(t0
, t0
);
4038 tcg_gen_xor_i64(t1
, t1
, t2
);
4039 tcg_gen_xor_i64(t2
, t2
, t0
);
4040 tcg_gen_andc_i64(t1
, t2
, t1
);
4041 tcg_temp_free_i64(t2
);
4042 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4043 generate_exception(ctx
, EXCP_OVERFLOW
);
4051 TCGv_i64 t2
= tcg_temp_new_i64();
4052 TCGLabel
*lab
= gen_new_label();
4054 tcg_gen_mov_i64(t2
, t0
);
4055 tcg_gen_sub_i64(t0
, t1
, t2
);
4056 if (opc
== OPC_SUB_CP2
) {
4057 tcg_gen_ext32s_i64(t0
, t0
);
4059 tcg_gen_xor_i64(t1
, t1
, t2
);
4060 tcg_gen_xor_i64(t2
, t2
, t0
);
4061 tcg_gen_and_i64(t1
, t1
, t2
);
4062 tcg_temp_free_i64(t2
);
4063 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4064 generate_exception(ctx
, EXCP_OVERFLOW
);
4070 tcg_gen_ext32u_i64(t0
, t0
);
4071 tcg_gen_ext32u_i64(t1
, t1
);
4072 tcg_gen_mul_i64(t0
, t0
, t1
);
4081 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4082 FD field is the CC field? */
4084 MIPS_INVAL("loongson_cp2");
4085 generate_exception_end(ctx
, EXCP_RI
);
4092 gen_store_fpr64(ctx
, t0
, rd
);
4094 tcg_temp_free_i64(t0
);
4095 tcg_temp_free_i64(t1
);
4099 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4100 int rs
, int rt
, int16_t imm
)
4103 TCGv t0
= tcg_temp_new();
4104 TCGv t1
= tcg_temp_new();
4107 /* Load needed operands */
4115 /* Compare two registers */
4117 gen_load_gpr(t0
, rs
);
4118 gen_load_gpr(t1
, rt
);
4128 /* Compare register to immediate */
4129 if (rs
!= 0 || imm
!= 0) {
4130 gen_load_gpr(t0
, rs
);
4131 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4138 case OPC_TEQ
: /* rs == rs */
4139 case OPC_TEQI
: /* r0 == 0 */
4140 case OPC_TGE
: /* rs >= rs */
4141 case OPC_TGEI
: /* r0 >= 0 */
4142 case OPC_TGEU
: /* rs >= rs unsigned */
4143 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4145 generate_exception_end(ctx
, EXCP_TRAP
);
4147 case OPC_TLT
: /* rs < rs */
4148 case OPC_TLTI
: /* r0 < 0 */
4149 case OPC_TLTU
: /* rs < rs unsigned */
4150 case OPC_TLTIU
: /* r0 < 0 unsigned */
4151 case OPC_TNE
: /* rs != rs */
4152 case OPC_TNEI
: /* r0 != 0 */
4153 /* Never trap: treat as NOP. */
4157 TCGLabel
*l1
= gen_new_label();
4162 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4166 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4170 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4174 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4178 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4182 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4185 generate_exception(ctx
, EXCP_TRAP
);
4192 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4194 TranslationBlock
*tb
;
4196 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4197 likely(!ctx
->singlestep_enabled
)) {
4200 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4203 if (ctx
->singlestep_enabled
) {
4204 save_cpu_state(ctx
, 0);
4205 gen_helper_raise_exception_debug(cpu_env
);
4211 /* Branches (before delay slot) */
4212 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4214 int rs
, int rt
, int32_t offset
,
4217 target_ulong btgt
= -1;
4219 int bcond_compute
= 0;
4220 TCGv t0
= tcg_temp_new();
4221 TCGv t1
= tcg_temp_new();
4223 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4224 #ifdef MIPS_DEBUG_DISAS
4225 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4226 TARGET_FMT_lx
"\n", ctx
->pc
);
4228 generate_exception_end(ctx
, EXCP_RI
);
4232 /* Load needed operands */
4238 /* Compare two registers */
4240 gen_load_gpr(t0
, rs
);
4241 gen_load_gpr(t1
, rt
);
4244 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4258 /* Compare to zero */
4260 gen_load_gpr(t0
, rs
);
4263 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4266 #if defined(TARGET_MIPS64)
4268 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4270 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4273 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4278 /* Jump to immediate */
4279 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4283 /* Jump to register */
4284 if (offset
!= 0 && offset
!= 16) {
4285 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4286 others are reserved. */
4287 MIPS_INVAL("jump hint");
4288 generate_exception_end(ctx
, EXCP_RI
);
4291 gen_load_gpr(btarget
, rs
);
4294 MIPS_INVAL("branch/jump");
4295 generate_exception_end(ctx
, EXCP_RI
);
4298 if (bcond_compute
== 0) {
4299 /* No condition to be computed */
4301 case OPC_BEQ
: /* rx == rx */
4302 case OPC_BEQL
: /* rx == rx likely */
4303 case OPC_BGEZ
: /* 0 >= 0 */
4304 case OPC_BGEZL
: /* 0 >= 0 likely */
4305 case OPC_BLEZ
: /* 0 <= 0 */
4306 case OPC_BLEZL
: /* 0 <= 0 likely */
4308 ctx
->hflags
|= MIPS_HFLAG_B
;
4310 case OPC_BGEZAL
: /* 0 >= 0 */
4311 case OPC_BGEZALL
: /* 0 >= 0 likely */
4312 /* Always take and link */
4314 ctx
->hflags
|= MIPS_HFLAG_B
;
4316 case OPC_BNE
: /* rx != rx */
4317 case OPC_BGTZ
: /* 0 > 0 */
4318 case OPC_BLTZ
: /* 0 < 0 */
4321 case OPC_BLTZAL
: /* 0 < 0 */
4322 /* Handle as an unconditional branch to get correct delay
4325 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4326 ctx
->hflags
|= MIPS_HFLAG_B
;
4328 case OPC_BLTZALL
: /* 0 < 0 likely */
4329 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4330 /* Skip the instruction in the delay slot */
4333 case OPC_BNEL
: /* rx != rx likely */
4334 case OPC_BGTZL
: /* 0 > 0 likely */
4335 case OPC_BLTZL
: /* 0 < 0 likely */
4336 /* Skip the instruction in the delay slot */
4340 ctx
->hflags
|= MIPS_HFLAG_B
;
4343 ctx
->hflags
|= MIPS_HFLAG_BX
;
4347 ctx
->hflags
|= MIPS_HFLAG_B
;
4350 ctx
->hflags
|= MIPS_HFLAG_BR
;
4354 ctx
->hflags
|= MIPS_HFLAG_BR
;
4357 MIPS_INVAL("branch/jump");
4358 generate_exception_end(ctx
, EXCP_RI
);
4364 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4367 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4370 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4373 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4376 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4379 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4382 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4386 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4390 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4393 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4396 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4410 #if defined(TARGET_MIPS64)
4412 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4416 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4419 ctx
->hflags
|= MIPS_HFLAG_BC
;
4422 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4425 ctx
->hflags
|= MIPS_HFLAG_BL
;
4428 MIPS_INVAL("conditional branch/jump");
4429 generate_exception_end(ctx
, EXCP_RI
);
4434 ctx
->btarget
= btgt
;
4436 switch (delayslot_size
) {
4438 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4441 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4446 int post_delay
= insn_bytes
+ delayslot_size
;
4447 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4449 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4453 if (insn_bytes
== 2)
4454 ctx
->hflags
|= MIPS_HFLAG_B16
;
4459 /* special3 bitfield operations */
4460 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4461 int rs
, int lsb
, int msb
)
4463 TCGv t0
= tcg_temp_new();
4464 TCGv t1
= tcg_temp_new();
4466 gen_load_gpr(t1
, rs
);
4469 if (lsb
+ msb
> 31) {
4472 tcg_gen_shri_tl(t0
, t1
, lsb
);
4474 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4476 tcg_gen_ext32s_tl(t0
, t0
);
4479 #if defined(TARGET_MIPS64)
4488 if (lsb
+ msb
> 63) {
4491 tcg_gen_shri_tl(t0
, t1
, lsb
);
4493 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4501 gen_load_gpr(t0
, rt
);
4502 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4503 tcg_gen_ext32s_tl(t0
, t0
);
4505 #if defined(TARGET_MIPS64)
4516 gen_load_gpr(t0
, rt
);
4517 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4522 MIPS_INVAL("bitops");
4523 generate_exception_end(ctx
, EXCP_RI
);
4528 gen_store_gpr(t0
, rt
);
4533 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4538 /* If no destination, treat it as a NOP. */
4542 t0
= tcg_temp_new();
4543 gen_load_gpr(t0
, rt
);
4547 TCGv t1
= tcg_temp_new();
4549 tcg_gen_shri_tl(t1
, t0
, 8);
4550 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4551 tcg_gen_shli_tl(t0
, t0
, 8);
4552 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4553 tcg_gen_or_tl(t0
, t0
, t1
);
4555 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4559 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4562 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4564 #if defined(TARGET_MIPS64)
4567 TCGv t1
= tcg_temp_new();
4569 tcg_gen_shri_tl(t1
, t0
, 8);
4570 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4571 tcg_gen_shli_tl(t0
, t0
, 8);
4572 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4573 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4579 TCGv t1
= tcg_temp_new();
4581 tcg_gen_shri_tl(t1
, t0
, 16);
4582 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4583 tcg_gen_shli_tl(t0
, t0
, 16);
4584 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4585 tcg_gen_or_tl(t0
, t0
, t1
);
4586 tcg_gen_shri_tl(t1
, t0
, 32);
4587 tcg_gen_shli_tl(t0
, t0
, 32);
4588 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4594 MIPS_INVAL("bsfhl");
4595 generate_exception_end(ctx
, EXCP_RI
);
4602 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4611 t0
= tcg_temp_new();
4612 t1
= tcg_temp_new();
4613 gen_load_gpr(t0
, rs
);
4614 gen_load_gpr(t1
, rt
);
4615 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4616 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4617 if (opc
== OPC_LSA
) {
4618 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4627 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4635 t0
= tcg_temp_new();
4636 gen_load_gpr(t0
, rt
);
4640 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4642 #if defined(TARGET_MIPS64)
4644 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4649 TCGv t1
= tcg_temp_new();
4650 gen_load_gpr(t1
, rs
);
4654 TCGv_i64 t2
= tcg_temp_new_i64();
4655 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4656 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4657 gen_move_low32(cpu_gpr
[rd
], t2
);
4658 tcg_temp_free_i64(t2
);
4661 #if defined(TARGET_MIPS64)
4663 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4664 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4665 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4675 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4682 t0
= tcg_temp_new();
4683 gen_load_gpr(t0
, rt
);
4686 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4688 #if defined(TARGET_MIPS64)
4690 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4697 #ifndef CONFIG_USER_ONLY
4698 /* CP0 (MMU and control) */
4699 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4701 TCGv_i64 t0
= tcg_temp_new_i64();
4702 TCGv_i64 t1
= tcg_temp_new_i64();
4704 tcg_gen_ext_tl_i64(t0
, arg
);
4705 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4706 #if defined(TARGET_MIPS64)
4707 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4709 tcg_gen_concat32_i64(t1
, t1
, t0
);
4711 tcg_gen_st_i64(t1
, cpu_env
, off
);
4712 tcg_temp_free_i64(t1
);
4713 tcg_temp_free_i64(t0
);
4716 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4718 TCGv_i64 t0
= tcg_temp_new_i64();
4719 TCGv_i64 t1
= tcg_temp_new_i64();
4721 tcg_gen_ext_tl_i64(t0
, arg
);
4722 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4723 tcg_gen_concat32_i64(t1
, t1
, t0
);
4724 tcg_gen_st_i64(t1
, cpu_env
, off
);
4725 tcg_temp_free_i64(t1
);
4726 tcg_temp_free_i64(t0
);
4729 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4731 TCGv_i64 t0
= tcg_temp_new_i64();
4733 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4734 #if defined(TARGET_MIPS64)
4735 tcg_gen_shri_i64(t0
, t0
, 30);
4737 tcg_gen_shri_i64(t0
, t0
, 32);
4739 gen_move_low32(arg
, t0
);
4740 tcg_temp_free_i64(t0
);
4743 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4745 TCGv_i64 t0
= tcg_temp_new_i64();
4747 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4748 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4749 gen_move_low32(arg
, t0
);
4750 tcg_temp_free_i64(t0
);
4753 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4755 TCGv_i32 t0
= tcg_temp_new_i32();
4757 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4758 tcg_gen_ext_i32_tl(arg
, t0
);
4759 tcg_temp_free_i32(t0
);
4762 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4764 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4765 tcg_gen_ext32s_tl(arg
, arg
);
4768 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4770 TCGv_i32 t0
= tcg_temp_new_i32();
4772 tcg_gen_trunc_tl_i32(t0
, arg
);
4773 tcg_gen_st_i32(t0
, cpu_env
, off
);
4774 tcg_temp_free_i32(t0
);
4777 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4779 const char *rn
= "invalid";
4781 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4782 goto mfhc0_read_zero
;
4789 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4793 goto mfhc0_read_zero
;
4799 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4803 goto mfhc0_read_zero
;
4809 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4810 ctx
->CP0_LLAddr_shift
);
4814 goto mfhc0_read_zero
;
4823 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4827 goto mfhc0_read_zero
;
4831 goto mfhc0_read_zero
;
4834 (void)rn
; /* avoid a compiler warning */
4835 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4839 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4840 tcg_gen_movi_tl(arg
, 0);
4843 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4845 const char *rn
= "invalid";
4846 uint64_t mask
= ctx
->PAMask
>> 36;
4848 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4856 tcg_gen_andi_tl(arg
, arg
, mask
);
4857 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4867 tcg_gen_andi_tl(arg
, arg
, mask
);
4868 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4878 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4879 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4880 relevant for modern MIPS cores supporting MTHC0, therefore
4881 treating MTHC0 to LLAddr as NOP. */
4894 tcg_gen_andi_tl(arg
, arg
, mask
);
4895 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4906 (void)rn
; /* avoid a compiler warning */
4908 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4911 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4913 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4914 tcg_gen_movi_tl(arg
, 0);
4916 tcg_gen_movi_tl(arg
, ~0);
4920 #define CP0_CHECK(c) \
4923 goto cp0_unimplemented; \
4927 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4929 const char *rn
= "invalid";
4932 check_insn(ctx
, ISA_MIPS32
);
4938 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4942 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4943 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4947 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4948 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4952 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4953 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4962 goto cp0_unimplemented
;
4968 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4969 gen_helper_mfc0_random(arg
, cpu_env
);
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4979 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4984 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4988 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4989 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4993 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4994 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4998 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4999 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5000 rn
= "VPEScheFBack";
5003 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5004 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5008 goto cp0_unimplemented
;
5015 TCGv_i64 tmp
= tcg_temp_new_i64();
5016 tcg_gen_ld_i64(tmp
, cpu_env
,
5017 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5018 #if defined(TARGET_MIPS64)
5020 /* Move RI/XI fields to bits 31:30 */
5021 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5022 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5025 gen_move_low32(arg
, tmp
);
5026 tcg_temp_free_i64(tmp
);
5031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5032 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5037 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5042 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5047 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5052 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5056 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5057 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5061 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5062 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5066 goto cp0_unimplemented
;
5073 TCGv_i64 tmp
= tcg_temp_new_i64();
5074 tcg_gen_ld_i64(tmp
, cpu_env
,
5075 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5076 #if defined(TARGET_MIPS64)
5078 /* Move RI/XI fields to bits 31:30 */
5079 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5080 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5083 gen_move_low32(arg
, tmp
);
5084 tcg_temp_free_i64(tmp
);
5090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5091 rn
= "GlobalNumber";
5094 goto cp0_unimplemented
;
5100 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5101 tcg_gen_ext32s_tl(arg
, arg
);
5105 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5106 rn
= "ContextConfig";
5107 goto cp0_unimplemented
;
5110 CP0_CHECK(ctx
->ulri
);
5111 tcg_gen_ld32s_tl(arg
, cpu_env
,
5112 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5116 goto cp0_unimplemented
;
5122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5126 check_insn(ctx
, ISA_MIPS32R2
);
5127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5131 goto cp0_unimplemented
;
5137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5141 check_insn(ctx
, ISA_MIPS32R2
);
5142 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5146 check_insn(ctx
, ISA_MIPS32R2
);
5147 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5151 check_insn(ctx
, ISA_MIPS32R2
);
5152 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5156 check_insn(ctx
, ISA_MIPS32R2
);
5157 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5161 check_insn(ctx
, ISA_MIPS32R2
);
5162 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5166 goto cp0_unimplemented
;
5172 check_insn(ctx
, ISA_MIPS32R2
);
5173 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5177 goto cp0_unimplemented
;
5183 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5184 tcg_gen_ext32s_tl(arg
, arg
);
5189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5198 goto cp0_unimplemented
;
5204 /* Mark as an IO operation because we read the time. */
5205 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5208 gen_helper_mfc0_count(arg
, cpu_env
);
5209 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5212 /* Break the TB to be able to take timer interrupts immediately
5213 after reading count. */
5214 ctx
->bstate
= BS_STOP
;
5217 /* 6,7 are implementation dependent */
5219 goto cp0_unimplemented
;
5225 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5226 tcg_gen_ext32s_tl(arg
, arg
);
5230 goto cp0_unimplemented
;
5236 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5239 /* 6,7 are implementation dependent */
5241 goto cp0_unimplemented
;
5247 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5251 check_insn(ctx
, ISA_MIPS32R2
);
5252 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5256 check_insn(ctx
, ISA_MIPS32R2
);
5257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5261 check_insn(ctx
, ISA_MIPS32R2
);
5262 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5266 goto cp0_unimplemented
;
5272 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5276 goto cp0_unimplemented
;
5282 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5283 tcg_gen_ext32s_tl(arg
, arg
);
5287 goto cp0_unimplemented
;
5293 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5297 check_insn(ctx
, ISA_MIPS32R2
);
5298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5302 goto cp0_unimplemented
;
5308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5328 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5331 /* 6,7 are implementation dependent */
5333 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5337 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5341 goto cp0_unimplemented
;
5347 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5351 goto cp0_unimplemented
;
5357 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5361 goto cp0_unimplemented
;
5367 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5371 goto cp0_unimplemented
;
5377 #if defined(TARGET_MIPS64)
5378 check_insn(ctx
, ISA_MIPS3
);
5379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5380 tcg_gen_ext32s_tl(arg
, arg
);
5385 goto cp0_unimplemented
;
5389 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5390 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5393 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5397 goto cp0_unimplemented
;
5401 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5402 rn
= "'Diagnostic"; /* implementation dependent */
5407 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5411 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5412 rn
= "TraceControl";
5415 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5416 rn
= "TraceControl2";
5419 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5420 rn
= "UserTraceData";
5423 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5427 goto cp0_unimplemented
;
5434 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5435 tcg_gen_ext32s_tl(arg
, arg
);
5439 goto cp0_unimplemented
;
5445 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5446 rn
= "Performance0";
5449 // gen_helper_mfc0_performance1(arg);
5450 rn
= "Performance1";
5453 // gen_helper_mfc0_performance2(arg);
5454 rn
= "Performance2";
5457 // gen_helper_mfc0_performance3(arg);
5458 rn
= "Performance3";
5461 // gen_helper_mfc0_performance4(arg);
5462 rn
= "Performance4";
5465 // gen_helper_mfc0_performance5(arg);
5466 rn
= "Performance5";
5469 // gen_helper_mfc0_performance6(arg);
5470 rn
= "Performance6";
5473 // gen_helper_mfc0_performance7(arg);
5474 rn
= "Performance7";
5477 goto cp0_unimplemented
;
5481 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5487 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5491 goto cp0_unimplemented
;
5501 TCGv_i64 tmp
= tcg_temp_new_i64();
5502 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5503 gen_move_low32(arg
, tmp
);
5504 tcg_temp_free_i64(tmp
);
5512 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5516 goto cp0_unimplemented
;
5525 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5536 goto cp0_unimplemented
;
5542 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5543 tcg_gen_ext32s_tl(arg
, arg
);
5547 goto cp0_unimplemented
;
5554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5558 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5559 tcg_gen_ld_tl(arg
, cpu_env
,
5560 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5561 tcg_gen_ext32s_tl(arg
, arg
);
5565 goto cp0_unimplemented
;
5569 goto cp0_unimplemented
;
5571 (void)rn
; /* avoid a compiler warning */
5572 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5576 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5577 gen_mfc0_unimplemented(ctx
, arg
);
5580 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5582 const char *rn
= "invalid";
5585 check_insn(ctx
, ISA_MIPS32
);
5587 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5595 gen_helper_mtc0_index(cpu_env
, arg
);
5599 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5600 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5604 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5609 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5619 goto cp0_unimplemented
;
5629 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5630 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5634 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5635 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5639 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5640 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5644 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5645 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5649 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5650 tcg_gen_st_tl(arg
, cpu_env
,
5651 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5655 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5656 tcg_gen_st_tl(arg
, cpu_env
,
5657 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5658 rn
= "VPEScheFBack";
5661 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5662 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5666 goto cp0_unimplemented
;
5672 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5682 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5687 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5691 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5692 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5696 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5697 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5701 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5702 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5706 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5707 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5711 goto cp0_unimplemented
;
5717 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5723 rn
= "GlobalNumber";
5726 goto cp0_unimplemented
;
5732 gen_helper_mtc0_context(cpu_env
, arg
);
5736 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5737 rn
= "ContextConfig";
5738 goto cp0_unimplemented
;
5741 CP0_CHECK(ctx
->ulri
);
5742 tcg_gen_st_tl(arg
, cpu_env
,
5743 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5747 goto cp0_unimplemented
;
5753 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5757 check_insn(ctx
, ISA_MIPS32R2
);
5758 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5760 ctx
->bstate
= BS_STOP
;
5763 goto cp0_unimplemented
;
5769 gen_helper_mtc0_wired(cpu_env
, arg
);
5773 check_insn(ctx
, ISA_MIPS32R2
);
5774 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5778 check_insn(ctx
, ISA_MIPS32R2
);
5779 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5783 check_insn(ctx
, ISA_MIPS32R2
);
5784 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5788 check_insn(ctx
, ISA_MIPS32R2
);
5789 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5793 check_insn(ctx
, ISA_MIPS32R2
);
5794 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5798 goto cp0_unimplemented
;
5804 check_insn(ctx
, ISA_MIPS32R2
);
5805 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5806 ctx
->bstate
= BS_STOP
;
5810 goto cp0_unimplemented
;
5828 goto cp0_unimplemented
;
5834 gen_helper_mtc0_count(cpu_env
, arg
);
5837 /* 6,7 are implementation dependent */
5839 goto cp0_unimplemented
;
5845 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5849 goto cp0_unimplemented
;
5855 gen_helper_mtc0_compare(cpu_env
, arg
);
5858 /* 6,7 are implementation dependent */
5860 goto cp0_unimplemented
;
5866 save_cpu_state(ctx
, 1);
5867 gen_helper_mtc0_status(cpu_env
, arg
);
5868 /* BS_STOP isn't good enough here, hflags may have changed. */
5869 gen_save_pc(ctx
->pc
+ 4);
5870 ctx
->bstate
= BS_EXCP
;
5874 check_insn(ctx
, ISA_MIPS32R2
);
5875 gen_helper_mtc0_intctl(cpu_env
, arg
);
5876 /* Stop translation as we may have switched the execution mode */
5877 ctx
->bstate
= BS_STOP
;
5881 check_insn(ctx
, ISA_MIPS32R2
);
5882 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5883 /* Stop translation as we may have switched the execution mode */
5884 ctx
->bstate
= BS_STOP
;
5888 check_insn(ctx
, ISA_MIPS32R2
);
5889 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5890 /* Stop translation as we may have switched the execution mode */
5891 ctx
->bstate
= BS_STOP
;
5895 goto cp0_unimplemented
;
5901 save_cpu_state(ctx
, 1);
5902 gen_helper_mtc0_cause(cpu_env
, arg
);
5906 goto cp0_unimplemented
;
5912 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5916 goto cp0_unimplemented
;
5926 check_insn(ctx
, ISA_MIPS32R2
);
5927 gen_helper_mtc0_ebase(cpu_env
, arg
);
5931 goto cp0_unimplemented
;
5937 gen_helper_mtc0_config0(cpu_env
, arg
);
5939 /* Stop translation as we may have switched the execution mode */
5940 ctx
->bstate
= BS_STOP
;
5943 /* ignored, read only */
5947 gen_helper_mtc0_config2(cpu_env
, arg
);
5949 /* Stop translation as we may have switched the execution mode */
5950 ctx
->bstate
= BS_STOP
;
5953 gen_helper_mtc0_config3(cpu_env
, arg
);
5955 /* Stop translation as we may have switched the execution mode */
5956 ctx
->bstate
= BS_STOP
;
5959 gen_helper_mtc0_config4(cpu_env
, arg
);
5961 ctx
->bstate
= BS_STOP
;
5964 gen_helper_mtc0_config5(cpu_env
, arg
);
5966 /* Stop translation as we may have switched the execution mode */
5967 ctx
->bstate
= BS_STOP
;
5969 /* 6,7 are implementation dependent */
5979 rn
= "Invalid config selector";
5980 goto cp0_unimplemented
;
5986 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5990 goto cp0_unimplemented
;
5996 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6000 goto cp0_unimplemented
;
6006 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6010 goto cp0_unimplemented
;
6016 #if defined(TARGET_MIPS64)
6017 check_insn(ctx
, ISA_MIPS3
);
6018 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6023 goto cp0_unimplemented
;
6027 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6028 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6031 gen_helper_mtc0_framemask(cpu_env
, arg
);
6035 goto cp0_unimplemented
;
6040 rn
= "Diagnostic"; /* implementation dependent */
6045 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6046 /* BS_STOP isn't good enough here, hflags may have changed. */
6047 gen_save_pc(ctx
->pc
+ 4);
6048 ctx
->bstate
= BS_EXCP
;
6052 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6053 rn
= "TraceControl";
6054 /* Stop translation as we may have switched the execution mode */
6055 ctx
->bstate
= BS_STOP
;
6058 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6059 rn
= "TraceControl2";
6060 /* Stop translation as we may have switched the execution mode */
6061 ctx
->bstate
= BS_STOP
;
6064 /* Stop translation as we may have switched the execution mode */
6065 ctx
->bstate
= BS_STOP
;
6066 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6067 rn
= "UserTraceData";
6068 /* Stop translation as we may have switched the execution mode */
6069 ctx
->bstate
= BS_STOP
;
6072 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6073 /* Stop translation as we may have switched the execution mode */
6074 ctx
->bstate
= BS_STOP
;
6078 goto cp0_unimplemented
;
6085 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6089 goto cp0_unimplemented
;
6095 gen_helper_mtc0_performance0(cpu_env
, arg
);
6096 rn
= "Performance0";
6099 // gen_helper_mtc0_performance1(arg);
6100 rn
= "Performance1";
6103 // gen_helper_mtc0_performance2(arg);
6104 rn
= "Performance2";
6107 // gen_helper_mtc0_performance3(arg);
6108 rn
= "Performance3";
6111 // gen_helper_mtc0_performance4(arg);
6112 rn
= "Performance4";
6115 // gen_helper_mtc0_performance5(arg);
6116 rn
= "Performance5";
6119 // gen_helper_mtc0_performance6(arg);
6120 rn
= "Performance6";
6123 // gen_helper_mtc0_performance7(arg);
6124 rn
= "Performance7";
6127 goto cp0_unimplemented
;
6141 goto cp0_unimplemented
;
6150 gen_helper_mtc0_taglo(cpu_env
, arg
);
6157 gen_helper_mtc0_datalo(cpu_env
, arg
);
6161 goto cp0_unimplemented
;
6170 gen_helper_mtc0_taghi(cpu_env
, arg
);
6177 gen_helper_mtc0_datahi(cpu_env
, arg
);
6182 goto cp0_unimplemented
;
6188 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6192 goto cp0_unimplemented
;
6199 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6203 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6204 tcg_gen_st_tl(arg
, cpu_env
,
6205 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6209 goto cp0_unimplemented
;
6211 /* Stop translation as we may have switched the execution mode */
6212 ctx
->bstate
= BS_STOP
;
6215 goto cp0_unimplemented
;
6217 (void)rn
; /* avoid a compiler warning */
6218 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6219 /* For simplicity assume that all writes can cause interrupts. */
6220 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6222 ctx
->bstate
= BS_STOP
;
6227 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6230 #if defined(TARGET_MIPS64)
6231 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6233 const char *rn
= "invalid";
6236 check_insn(ctx
, ISA_MIPS64
);
6242 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6246 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6247 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6251 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6252 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6256 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6257 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6262 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6266 goto cp0_unimplemented
;
6272 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6273 gen_helper_mfc0_random(arg
, cpu_env
);
6277 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6282 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6287 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6288 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6293 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6298 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6303 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6304 rn
= "VPEScheFBack";
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6312 goto cp0_unimplemented
;
6318 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6322 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6323 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6327 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6328 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6332 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6333 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6337 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6338 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6342 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6343 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6347 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6348 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6352 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6353 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6357 goto cp0_unimplemented
;
6363 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6369 rn
= "GlobalNumber";
6372 goto cp0_unimplemented
;
6378 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6382 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6383 rn
= "ContextConfig";
6384 goto cp0_unimplemented
;
6387 CP0_CHECK(ctx
->ulri
);
6388 tcg_gen_ld_tl(arg
, cpu_env
,
6389 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6393 goto cp0_unimplemented
;
6399 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6403 check_insn(ctx
, ISA_MIPS32R2
);
6404 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6408 goto cp0_unimplemented
;
6414 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6418 check_insn(ctx
, ISA_MIPS32R2
);
6419 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6423 check_insn(ctx
, ISA_MIPS32R2
);
6424 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6428 check_insn(ctx
, ISA_MIPS32R2
);
6429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6433 check_insn(ctx
, ISA_MIPS32R2
);
6434 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6438 check_insn(ctx
, ISA_MIPS32R2
);
6439 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6443 goto cp0_unimplemented
;
6449 check_insn(ctx
, ISA_MIPS32R2
);
6450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6454 goto cp0_unimplemented
;
6460 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6470 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6474 goto cp0_unimplemented
;
6480 /* Mark as an IO operation because we read the time. */
6481 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6484 gen_helper_mfc0_count(arg
, cpu_env
);
6485 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6488 /* Break the TB to be able to take timer interrupts immediately
6489 after reading count. */
6490 ctx
->bstate
= BS_STOP
;
6493 /* 6,7 are implementation dependent */
6495 goto cp0_unimplemented
;
6501 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6505 goto cp0_unimplemented
;
6511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6514 /* 6,7 are implementation dependent */
6516 goto cp0_unimplemented
;
6522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6526 check_insn(ctx
, ISA_MIPS32R2
);
6527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6531 check_insn(ctx
, ISA_MIPS32R2
);
6532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6536 check_insn(ctx
, ISA_MIPS32R2
);
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6541 goto cp0_unimplemented
;
6547 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6551 goto cp0_unimplemented
;
6557 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6561 goto cp0_unimplemented
;
6567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6571 check_insn(ctx
, ISA_MIPS32R2
);
6572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6576 goto cp0_unimplemented
;
6582 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6594 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6602 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6605 /* 6,7 are implementation dependent */
6607 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6611 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6615 goto cp0_unimplemented
;
6621 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6625 goto cp0_unimplemented
;
6631 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6635 goto cp0_unimplemented
;
6641 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6645 goto cp0_unimplemented
;
6651 check_insn(ctx
, ISA_MIPS3
);
6652 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6656 goto cp0_unimplemented
;
6660 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6661 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6668 goto cp0_unimplemented
;
6672 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6673 rn
= "'Diagnostic"; /* implementation dependent */
6678 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6682 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6683 rn
= "TraceControl";
6686 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6687 rn
= "TraceControl2";
6690 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6691 rn
= "UserTraceData";
6694 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6698 goto cp0_unimplemented
;
6705 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6709 goto cp0_unimplemented
;
6715 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6716 rn
= "Performance0";
6719 // gen_helper_dmfc0_performance1(arg);
6720 rn
= "Performance1";
6723 // gen_helper_dmfc0_performance2(arg);
6724 rn
= "Performance2";
6727 // gen_helper_dmfc0_performance3(arg);
6728 rn
= "Performance3";
6731 // gen_helper_dmfc0_performance4(arg);
6732 rn
= "Performance4";
6735 // gen_helper_dmfc0_performance5(arg);
6736 rn
= "Performance5";
6739 // gen_helper_dmfc0_performance6(arg);
6740 rn
= "Performance6";
6743 // gen_helper_dmfc0_performance7(arg);
6744 rn
= "Performance7";
6747 goto cp0_unimplemented
;
6751 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6758 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6762 goto cp0_unimplemented
;
6771 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6782 goto cp0_unimplemented
;
6791 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6802 goto cp0_unimplemented
;
6808 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6812 goto cp0_unimplemented
;
6819 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6823 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6824 tcg_gen_ld_tl(arg
, cpu_env
,
6825 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6829 goto cp0_unimplemented
;
6833 goto cp0_unimplemented
;
6835 (void)rn
; /* avoid a compiler warning */
6836 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6840 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6841 gen_mfc0_unimplemented(ctx
, arg
);
6844 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6846 const char *rn
= "invalid";
6849 check_insn(ctx
, ISA_MIPS64
);
6851 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6859 gen_helper_mtc0_index(cpu_env
, arg
);
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6883 goto cp0_unimplemented
;
6893 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6894 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6898 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6899 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6904 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6909 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6914 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6918 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6919 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6920 rn
= "VPEScheFBack";
6923 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6924 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6928 goto cp0_unimplemented
;
6934 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6938 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6939 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6944 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6948 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6949 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6954 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6958 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6959 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6964 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6969 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6973 goto cp0_unimplemented
;
6979 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6985 rn
= "GlobalNumber";
6988 goto cp0_unimplemented
;
6994 gen_helper_mtc0_context(cpu_env
, arg
);
6998 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6999 rn
= "ContextConfig";
7000 goto cp0_unimplemented
;
7003 CP0_CHECK(ctx
->ulri
);
7004 tcg_gen_st_tl(arg
, cpu_env
,
7005 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7009 goto cp0_unimplemented
;
7015 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7019 check_insn(ctx
, ISA_MIPS32R2
);
7020 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7024 goto cp0_unimplemented
;
7030 gen_helper_mtc0_wired(cpu_env
, arg
);
7034 check_insn(ctx
, ISA_MIPS32R2
);
7035 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7039 check_insn(ctx
, ISA_MIPS32R2
);
7040 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7044 check_insn(ctx
, ISA_MIPS32R2
);
7045 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7049 check_insn(ctx
, ISA_MIPS32R2
);
7050 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7054 check_insn(ctx
, ISA_MIPS32R2
);
7055 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7059 goto cp0_unimplemented
;
7065 check_insn(ctx
, ISA_MIPS32R2
);
7066 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7067 ctx
->bstate
= BS_STOP
;
7071 goto cp0_unimplemented
;
7089 goto cp0_unimplemented
;
7095 gen_helper_mtc0_count(cpu_env
, arg
);
7098 /* 6,7 are implementation dependent */
7100 goto cp0_unimplemented
;
7102 /* Stop translation as we may have switched the execution mode */
7103 ctx
->bstate
= BS_STOP
;
7108 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7112 goto cp0_unimplemented
;
7118 gen_helper_mtc0_compare(cpu_env
, arg
);
7121 /* 6,7 are implementation dependent */
7123 goto cp0_unimplemented
;
7125 /* Stop translation as we may have switched the execution mode */
7126 ctx
->bstate
= BS_STOP
;
7131 save_cpu_state(ctx
, 1);
7132 gen_helper_mtc0_status(cpu_env
, arg
);
7133 /* BS_STOP isn't good enough here, hflags may have changed. */
7134 gen_save_pc(ctx
->pc
+ 4);
7135 ctx
->bstate
= BS_EXCP
;
7139 check_insn(ctx
, ISA_MIPS32R2
);
7140 gen_helper_mtc0_intctl(cpu_env
, arg
);
7141 /* Stop translation as we may have switched the execution mode */
7142 ctx
->bstate
= BS_STOP
;
7146 check_insn(ctx
, ISA_MIPS32R2
);
7147 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7148 /* Stop translation as we may have switched the execution mode */
7149 ctx
->bstate
= BS_STOP
;
7153 check_insn(ctx
, ISA_MIPS32R2
);
7154 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7155 /* Stop translation as we may have switched the execution mode */
7156 ctx
->bstate
= BS_STOP
;
7160 goto cp0_unimplemented
;
7166 save_cpu_state(ctx
, 1);
7167 /* Mark as an IO operation because we may trigger a software
7169 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7172 gen_helper_mtc0_cause(cpu_env
, arg
);
7173 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7176 /* Stop translation as we may have triggered an intetrupt */
7177 ctx
->bstate
= BS_STOP
;
7181 goto cp0_unimplemented
;
7187 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7191 goto cp0_unimplemented
;
7201 check_insn(ctx
, ISA_MIPS32R2
);
7202 gen_helper_mtc0_ebase(cpu_env
, arg
);
7206 goto cp0_unimplemented
;
7212 gen_helper_mtc0_config0(cpu_env
, arg
);
7214 /* Stop translation as we may have switched the execution mode */
7215 ctx
->bstate
= BS_STOP
;
7218 /* ignored, read only */
7222 gen_helper_mtc0_config2(cpu_env
, arg
);
7224 /* Stop translation as we may have switched the execution mode */
7225 ctx
->bstate
= BS_STOP
;
7228 gen_helper_mtc0_config3(cpu_env
, arg
);
7230 /* Stop translation as we may have switched the execution mode */
7231 ctx
->bstate
= BS_STOP
;
7234 /* currently ignored */
7238 gen_helper_mtc0_config5(cpu_env
, arg
);
7240 /* Stop translation as we may have switched the execution mode */
7241 ctx
->bstate
= BS_STOP
;
7243 /* 6,7 are implementation dependent */
7245 rn
= "Invalid config selector";
7246 goto cp0_unimplemented
;
7252 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7256 goto cp0_unimplemented
;
7262 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7266 goto cp0_unimplemented
;
7272 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7276 goto cp0_unimplemented
;
7282 check_insn(ctx
, ISA_MIPS3
);
7283 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7287 goto cp0_unimplemented
;
7291 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7292 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7295 gen_helper_mtc0_framemask(cpu_env
, arg
);
7299 goto cp0_unimplemented
;
7304 rn
= "Diagnostic"; /* implementation dependent */
7309 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7310 /* BS_STOP isn't good enough here, hflags may have changed. */
7311 gen_save_pc(ctx
->pc
+ 4);
7312 ctx
->bstate
= BS_EXCP
;
7316 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7317 /* Stop translation as we may have switched the execution mode */
7318 ctx
->bstate
= BS_STOP
;
7319 rn
= "TraceControl";
7322 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7323 /* Stop translation as we may have switched the execution mode */
7324 ctx
->bstate
= BS_STOP
;
7325 rn
= "TraceControl2";
7328 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7329 /* Stop translation as we may have switched the execution mode */
7330 ctx
->bstate
= BS_STOP
;
7331 rn
= "UserTraceData";
7334 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7335 /* Stop translation as we may have switched the execution mode */
7336 ctx
->bstate
= BS_STOP
;
7340 goto cp0_unimplemented
;
7347 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7351 goto cp0_unimplemented
;
7357 gen_helper_mtc0_performance0(cpu_env
, arg
);
7358 rn
= "Performance0";
7361 // gen_helper_mtc0_performance1(cpu_env, arg);
7362 rn
= "Performance1";
7365 // gen_helper_mtc0_performance2(cpu_env, arg);
7366 rn
= "Performance2";
7369 // gen_helper_mtc0_performance3(cpu_env, arg);
7370 rn
= "Performance3";
7373 // gen_helper_mtc0_performance4(cpu_env, arg);
7374 rn
= "Performance4";
7377 // gen_helper_mtc0_performance5(cpu_env, arg);
7378 rn
= "Performance5";
7381 // gen_helper_mtc0_performance6(cpu_env, arg);
7382 rn
= "Performance6";
7385 // gen_helper_mtc0_performance7(cpu_env, arg);
7386 rn
= "Performance7";
7389 goto cp0_unimplemented
;
7403 goto cp0_unimplemented
;
7412 gen_helper_mtc0_taglo(cpu_env
, arg
);
7419 gen_helper_mtc0_datalo(cpu_env
, arg
);
7423 goto cp0_unimplemented
;
7432 gen_helper_mtc0_taghi(cpu_env
, arg
);
7439 gen_helper_mtc0_datahi(cpu_env
, arg
);
7444 goto cp0_unimplemented
;
7450 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7454 goto cp0_unimplemented
;
7461 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7465 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7466 tcg_gen_st_tl(arg
, cpu_env
,
7467 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7471 goto cp0_unimplemented
;
7473 /* Stop translation as we may have switched the execution mode */
7474 ctx
->bstate
= BS_STOP
;
7477 goto cp0_unimplemented
;
7479 (void)rn
; /* avoid a compiler warning */
7480 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7481 /* For simplicity assume that all writes can cause interrupts. */
7482 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7484 ctx
->bstate
= BS_STOP
;
7489 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7491 #endif /* TARGET_MIPS64 */
7493 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7494 int u
, int sel
, int h
)
7496 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7497 TCGv t0
= tcg_temp_local_new();
7499 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7500 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7501 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7502 tcg_gen_movi_tl(t0
, -1);
7503 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7504 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7505 tcg_gen_movi_tl(t0
, -1);
7511 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7514 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7524 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7527 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7530 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7533 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7536 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7539 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7542 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7545 gen_mfc0(ctx
, t0
, rt
, sel
);
7552 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7555 gen_mfc0(ctx
, t0
, rt
, sel
);
7561 gen_helper_mftc0_status(t0
, cpu_env
);
7564 gen_mfc0(ctx
, t0
, rt
, sel
);
7570 gen_helper_mftc0_cause(t0
, cpu_env
);
7580 gen_helper_mftc0_epc(t0
, cpu_env
);
7590 gen_helper_mftc0_ebase(t0
, cpu_env
);
7600 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7610 gen_helper_mftc0_debug(t0
, cpu_env
);
7613 gen_mfc0(ctx
, t0
, rt
, sel
);
7618 gen_mfc0(ctx
, t0
, rt
, sel
);
7620 } else switch (sel
) {
7621 /* GPR registers. */
7623 gen_helper_1e0i(mftgpr
, t0
, rt
);
7625 /* Auxiliary CPU registers */
7629 gen_helper_1e0i(mftlo
, t0
, 0);
7632 gen_helper_1e0i(mfthi
, t0
, 0);
7635 gen_helper_1e0i(mftacx
, t0
, 0);
7638 gen_helper_1e0i(mftlo
, t0
, 1);
7641 gen_helper_1e0i(mfthi
, t0
, 1);
7644 gen_helper_1e0i(mftacx
, t0
, 1);
7647 gen_helper_1e0i(mftlo
, t0
, 2);
7650 gen_helper_1e0i(mfthi
, t0
, 2);
7653 gen_helper_1e0i(mftacx
, t0
, 2);
7656 gen_helper_1e0i(mftlo
, t0
, 3);
7659 gen_helper_1e0i(mfthi
, t0
, 3);
7662 gen_helper_1e0i(mftacx
, t0
, 3);
7665 gen_helper_mftdsp(t0
, cpu_env
);
7671 /* Floating point (COP1). */
7673 /* XXX: For now we support only a single FPU context. */
7675 TCGv_i32 fp0
= tcg_temp_new_i32();
7677 gen_load_fpr32(ctx
, fp0
, rt
);
7678 tcg_gen_ext_i32_tl(t0
, fp0
);
7679 tcg_temp_free_i32(fp0
);
7681 TCGv_i32 fp0
= tcg_temp_new_i32();
7683 gen_load_fpr32h(ctx
, fp0
, rt
);
7684 tcg_gen_ext_i32_tl(t0
, fp0
);
7685 tcg_temp_free_i32(fp0
);
7689 /* XXX: For now we support only a single FPU context. */
7690 gen_helper_1e0i(cfc1
, t0
, rt
);
7692 /* COP2: Not implemented. */
7699 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7700 gen_store_gpr(t0
, rd
);
7706 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7707 generate_exception_end(ctx
, EXCP_RI
);
7710 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7711 int u
, int sel
, int h
)
7713 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7714 TCGv t0
= tcg_temp_local_new();
7716 gen_load_gpr(t0
, rt
);
7717 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7718 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7719 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7721 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7722 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7729 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7732 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7742 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7745 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7748 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7751 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7754 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7757 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7760 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7763 gen_mtc0(ctx
, t0
, rd
, sel
);
7770 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7773 gen_mtc0(ctx
, t0
, rd
, sel
);
7779 gen_helper_mttc0_status(cpu_env
, t0
);
7782 gen_mtc0(ctx
, t0
, rd
, sel
);
7788 gen_helper_mttc0_cause(cpu_env
, t0
);
7798 gen_helper_mttc0_ebase(cpu_env
, t0
);
7808 gen_helper_mttc0_debug(cpu_env
, t0
);
7811 gen_mtc0(ctx
, t0
, rd
, sel
);
7816 gen_mtc0(ctx
, t0
, rd
, sel
);
7818 } else switch (sel
) {
7819 /* GPR registers. */
7821 gen_helper_0e1i(mttgpr
, t0
, rd
);
7823 /* Auxiliary CPU registers */
7827 gen_helper_0e1i(mttlo
, t0
, 0);
7830 gen_helper_0e1i(mtthi
, t0
, 0);
7833 gen_helper_0e1i(mttacx
, t0
, 0);
7836 gen_helper_0e1i(mttlo
, t0
, 1);
7839 gen_helper_0e1i(mtthi
, t0
, 1);
7842 gen_helper_0e1i(mttacx
, t0
, 1);
7845 gen_helper_0e1i(mttlo
, t0
, 2);
7848 gen_helper_0e1i(mtthi
, t0
, 2);
7851 gen_helper_0e1i(mttacx
, t0
, 2);
7854 gen_helper_0e1i(mttlo
, t0
, 3);
7857 gen_helper_0e1i(mtthi
, t0
, 3);
7860 gen_helper_0e1i(mttacx
, t0
, 3);
7863 gen_helper_mttdsp(cpu_env
, t0
);
7869 /* Floating point (COP1). */
7871 /* XXX: For now we support only a single FPU context. */
7873 TCGv_i32 fp0
= tcg_temp_new_i32();
7875 tcg_gen_trunc_tl_i32(fp0
, t0
);
7876 gen_store_fpr32(ctx
, fp0
, rd
);
7877 tcg_temp_free_i32(fp0
);
7879 TCGv_i32 fp0
= tcg_temp_new_i32();
7881 tcg_gen_trunc_tl_i32(fp0
, t0
);
7882 gen_store_fpr32h(ctx
, fp0
, rd
);
7883 tcg_temp_free_i32(fp0
);
7887 /* XXX: For now we support only a single FPU context. */
7889 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7891 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7892 tcg_temp_free_i32(fs_tmp
);
7894 /* Stop translation as we may have changed hflags */
7895 ctx
->bstate
= BS_STOP
;
7897 /* COP2: Not implemented. */
7904 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7910 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7911 generate_exception_end(ctx
, EXCP_RI
);
7914 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7916 const char *opn
= "ldst";
7918 check_cp0_enabled(ctx
);
7925 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7930 TCGv t0
= tcg_temp_new();
7932 gen_load_gpr(t0
, rt
);
7933 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7938 #if defined(TARGET_MIPS64)
7940 check_insn(ctx
, ISA_MIPS3
);
7945 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7949 check_insn(ctx
, ISA_MIPS3
);
7951 TCGv t0
= tcg_temp_new();
7953 gen_load_gpr(t0
, rt
);
7954 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7966 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7972 TCGv t0
= tcg_temp_new();
7973 gen_load_gpr(t0
, rt
);
7974 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7980 check_insn(ctx
, ASE_MT
);
7985 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7986 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7990 check_insn(ctx
, ASE_MT
);
7991 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7992 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7997 if (!env
->tlb
->helper_tlbwi
)
7999 gen_helper_tlbwi(cpu_env
);
8004 if (!env
->tlb
->helper_tlbinv
) {
8007 gen_helper_tlbinv(cpu_env
);
8008 } /* treat as nop if TLBINV not supported */
8013 if (!env
->tlb
->helper_tlbinvf
) {
8016 gen_helper_tlbinvf(cpu_env
);
8017 } /* treat as nop if TLBINV not supported */
8021 if (!env
->tlb
->helper_tlbwr
)
8023 gen_helper_tlbwr(cpu_env
);
8027 if (!env
->tlb
->helper_tlbp
)
8029 gen_helper_tlbp(cpu_env
);
8033 if (!env
->tlb
->helper_tlbr
)
8035 gen_helper_tlbr(cpu_env
);
8037 case OPC_ERET
: /* OPC_ERETNC */
8038 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8039 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8042 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8043 if (ctx
->opcode
& (1 << bit_shift
)) {
8046 check_insn(ctx
, ISA_MIPS32R5
);
8047 gen_helper_eretnc(cpu_env
);
8051 check_insn(ctx
, ISA_MIPS2
);
8052 gen_helper_eret(cpu_env
);
8054 ctx
->bstate
= BS_EXCP
;
8059 check_insn(ctx
, ISA_MIPS32
);
8060 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8061 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8064 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8066 generate_exception_end(ctx
, EXCP_RI
);
8068 gen_helper_deret(cpu_env
);
8069 ctx
->bstate
= BS_EXCP
;
8074 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8075 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8076 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8079 /* If we get an exception, we want to restart at next instruction */
8081 save_cpu_state(ctx
, 1);
8083 gen_helper_wait(cpu_env
);
8084 ctx
->bstate
= BS_EXCP
;
8089 generate_exception_end(ctx
, EXCP_RI
);
8092 (void)opn
; /* avoid a compiler warning */
8094 #endif /* !CONFIG_USER_ONLY */
8096 /* CP1 Branches (before delay slot) */
8097 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8098 int32_t cc
, int32_t offset
)
8100 target_ulong btarget
;
8101 TCGv_i32 t0
= tcg_temp_new_i32();
8103 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8104 generate_exception_end(ctx
, EXCP_RI
);
8109 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8111 btarget
= ctx
->pc
+ 4 + offset
;
8115 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8116 tcg_gen_not_i32(t0
, t0
);
8117 tcg_gen_andi_i32(t0
, t0
, 1);
8118 tcg_gen_extu_i32_tl(bcond
, t0
);
8121 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8122 tcg_gen_not_i32(t0
, t0
);
8123 tcg_gen_andi_i32(t0
, t0
, 1);
8124 tcg_gen_extu_i32_tl(bcond
, t0
);
8127 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8128 tcg_gen_andi_i32(t0
, t0
, 1);
8129 tcg_gen_extu_i32_tl(bcond
, t0
);
8132 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8133 tcg_gen_andi_i32(t0
, t0
, 1);
8134 tcg_gen_extu_i32_tl(bcond
, t0
);
8136 ctx
->hflags
|= MIPS_HFLAG_BL
;
8140 TCGv_i32 t1
= tcg_temp_new_i32();
8141 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8142 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8143 tcg_gen_nand_i32(t0
, t0
, t1
);
8144 tcg_temp_free_i32(t1
);
8145 tcg_gen_andi_i32(t0
, t0
, 1);
8146 tcg_gen_extu_i32_tl(bcond
, t0
);
8151 TCGv_i32 t1
= tcg_temp_new_i32();
8152 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8153 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8154 tcg_gen_or_i32(t0
, t0
, t1
);
8155 tcg_temp_free_i32(t1
);
8156 tcg_gen_andi_i32(t0
, t0
, 1);
8157 tcg_gen_extu_i32_tl(bcond
, t0
);
8162 TCGv_i32 t1
= tcg_temp_new_i32();
8163 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8164 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8165 tcg_gen_and_i32(t0
, t0
, t1
);
8166 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8167 tcg_gen_and_i32(t0
, t0
, t1
);
8168 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8169 tcg_gen_nand_i32(t0
, t0
, t1
);
8170 tcg_temp_free_i32(t1
);
8171 tcg_gen_andi_i32(t0
, t0
, 1);
8172 tcg_gen_extu_i32_tl(bcond
, t0
);
8177 TCGv_i32 t1
= tcg_temp_new_i32();
8178 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8179 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8180 tcg_gen_or_i32(t0
, t0
, t1
);
8181 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8182 tcg_gen_or_i32(t0
, t0
, t1
);
8183 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8184 tcg_gen_or_i32(t0
, t0
, t1
);
8185 tcg_temp_free_i32(t1
);
8186 tcg_gen_andi_i32(t0
, t0
, 1);
8187 tcg_gen_extu_i32_tl(bcond
, t0
);
8190 ctx
->hflags
|= MIPS_HFLAG_BC
;
8193 MIPS_INVAL("cp1 cond branch");
8194 generate_exception_end(ctx
, EXCP_RI
);
8197 ctx
->btarget
= btarget
;
8198 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8200 tcg_temp_free_i32(t0
);
8203 /* R6 CP1 Branches */
8204 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8205 int32_t ft
, int32_t offset
,
8208 target_ulong btarget
;
8209 TCGv_i64 t0
= tcg_temp_new_i64();
8211 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8212 #ifdef MIPS_DEBUG_DISAS
8213 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8216 generate_exception_end(ctx
, EXCP_RI
);
8220 gen_load_fpr64(ctx
, t0
, ft
);
8221 tcg_gen_andi_i64(t0
, t0
, 1);
8223 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8227 tcg_gen_xori_i64(t0
, t0
, 1);
8228 ctx
->hflags
|= MIPS_HFLAG_BC
;
8231 /* t0 already set */
8232 ctx
->hflags
|= MIPS_HFLAG_BC
;
8235 MIPS_INVAL("cp1 cond branch");
8236 generate_exception_end(ctx
, EXCP_RI
);
8240 tcg_gen_trunc_i64_tl(bcond
, t0
);
8242 ctx
->btarget
= btarget
;
8244 switch (delayslot_size
) {
8246 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8249 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8254 tcg_temp_free_i64(t0
);
8257 /* Coprocessor 1 (FPU) */
8259 #define FOP(func, fmt) (((fmt) << 21) | (func))
8262 OPC_ADD_S
= FOP(0, FMT_S
),
8263 OPC_SUB_S
= FOP(1, FMT_S
),
8264 OPC_MUL_S
= FOP(2, FMT_S
),
8265 OPC_DIV_S
= FOP(3, FMT_S
),
8266 OPC_SQRT_S
= FOP(4, FMT_S
),
8267 OPC_ABS_S
= FOP(5, FMT_S
),
8268 OPC_MOV_S
= FOP(6, FMT_S
),
8269 OPC_NEG_S
= FOP(7, FMT_S
),
8270 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8271 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8272 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8273 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8274 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8275 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8276 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8277 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8278 OPC_SEL_S
= FOP(16, FMT_S
),
8279 OPC_MOVCF_S
= FOP(17, FMT_S
),
8280 OPC_MOVZ_S
= FOP(18, FMT_S
),
8281 OPC_MOVN_S
= FOP(19, FMT_S
),
8282 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8283 OPC_RECIP_S
= FOP(21, FMT_S
),
8284 OPC_RSQRT_S
= FOP(22, FMT_S
),
8285 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8286 OPC_MADDF_S
= FOP(24, FMT_S
),
8287 OPC_MSUBF_S
= FOP(25, FMT_S
),
8288 OPC_RINT_S
= FOP(26, FMT_S
),
8289 OPC_CLASS_S
= FOP(27, FMT_S
),
8290 OPC_MIN_S
= FOP(28, FMT_S
),
8291 OPC_RECIP2_S
= FOP(28, FMT_S
),
8292 OPC_MINA_S
= FOP(29, FMT_S
),
8293 OPC_RECIP1_S
= FOP(29, FMT_S
),
8294 OPC_MAX_S
= FOP(30, FMT_S
),
8295 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8296 OPC_MAXA_S
= FOP(31, FMT_S
),
8297 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8298 OPC_CVT_D_S
= FOP(33, FMT_S
),
8299 OPC_CVT_W_S
= FOP(36, FMT_S
),
8300 OPC_CVT_L_S
= FOP(37, FMT_S
),
8301 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8302 OPC_CMP_F_S
= FOP (48, FMT_S
),
8303 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8304 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8305 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8306 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8307 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8308 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8309 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8310 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8311 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8312 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8313 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8314 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8315 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8316 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8317 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8319 OPC_ADD_D
= FOP(0, FMT_D
),
8320 OPC_SUB_D
= FOP(1, FMT_D
),
8321 OPC_MUL_D
= FOP(2, FMT_D
),
8322 OPC_DIV_D
= FOP(3, FMT_D
),
8323 OPC_SQRT_D
= FOP(4, FMT_D
),
8324 OPC_ABS_D
= FOP(5, FMT_D
),
8325 OPC_MOV_D
= FOP(6, FMT_D
),
8326 OPC_NEG_D
= FOP(7, FMT_D
),
8327 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8328 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8329 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8330 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8331 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8332 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8333 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8334 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8335 OPC_SEL_D
= FOP(16, FMT_D
),
8336 OPC_MOVCF_D
= FOP(17, FMT_D
),
8337 OPC_MOVZ_D
= FOP(18, FMT_D
),
8338 OPC_MOVN_D
= FOP(19, FMT_D
),
8339 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8340 OPC_RECIP_D
= FOP(21, FMT_D
),
8341 OPC_RSQRT_D
= FOP(22, FMT_D
),
8342 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8343 OPC_MADDF_D
= FOP(24, FMT_D
),
8344 OPC_MSUBF_D
= FOP(25, FMT_D
),
8345 OPC_RINT_D
= FOP(26, FMT_D
),
8346 OPC_CLASS_D
= FOP(27, FMT_D
),
8347 OPC_MIN_D
= FOP(28, FMT_D
),
8348 OPC_RECIP2_D
= FOP(28, FMT_D
),
8349 OPC_MINA_D
= FOP(29, FMT_D
),
8350 OPC_RECIP1_D
= FOP(29, FMT_D
),
8351 OPC_MAX_D
= FOP(30, FMT_D
),
8352 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8353 OPC_MAXA_D
= FOP(31, FMT_D
),
8354 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8355 OPC_CVT_S_D
= FOP(32, FMT_D
),
8356 OPC_CVT_W_D
= FOP(36, FMT_D
),
8357 OPC_CVT_L_D
= FOP(37, FMT_D
),
8358 OPC_CMP_F_D
= FOP (48, FMT_D
),
8359 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8360 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8361 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8362 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8363 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8364 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8365 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8366 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8367 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8368 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8369 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8370 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8371 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8372 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8373 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8375 OPC_CVT_S_W
= FOP(32, FMT_W
),
8376 OPC_CVT_D_W
= FOP(33, FMT_W
),
8377 OPC_CVT_S_L
= FOP(32, FMT_L
),
8378 OPC_CVT_D_L
= FOP(33, FMT_L
),
8379 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8381 OPC_ADD_PS
= FOP(0, FMT_PS
),
8382 OPC_SUB_PS
= FOP(1, FMT_PS
),
8383 OPC_MUL_PS
= FOP(2, FMT_PS
),
8384 OPC_DIV_PS
= FOP(3, FMT_PS
),
8385 OPC_ABS_PS
= FOP(5, FMT_PS
),
8386 OPC_MOV_PS
= FOP(6, FMT_PS
),
8387 OPC_NEG_PS
= FOP(7, FMT_PS
),
8388 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8389 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8390 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8391 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8392 OPC_MULR_PS
= FOP(26, FMT_PS
),
8393 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8394 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8395 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8396 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8398 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8399 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8400 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8401 OPC_PLL_PS
= FOP(44, FMT_PS
),
8402 OPC_PLU_PS
= FOP(45, FMT_PS
),
8403 OPC_PUL_PS
= FOP(46, FMT_PS
),
8404 OPC_PUU_PS
= FOP(47, FMT_PS
),
8405 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8406 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8407 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8408 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8409 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8410 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8411 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8412 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8413 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8414 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8415 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8416 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8417 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8418 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8419 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8420 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8424 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8425 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8426 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8427 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8428 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8429 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8430 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8431 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8432 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8433 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8434 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8435 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8436 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8437 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8438 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8439 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8440 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8441 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8442 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8443 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8444 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8445 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8447 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8448 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8449 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8450 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8451 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8452 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8453 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8454 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8455 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8456 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8457 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8458 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8459 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8460 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8461 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8462 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8463 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8464 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8465 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8466 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8467 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8468 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8470 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8472 TCGv t0
= tcg_temp_new();
8477 TCGv_i32 fp0
= tcg_temp_new_i32();
8479 gen_load_fpr32(ctx
, fp0
, fs
);
8480 tcg_gen_ext_i32_tl(t0
, fp0
);
8481 tcg_temp_free_i32(fp0
);
8483 gen_store_gpr(t0
, rt
);
8486 gen_load_gpr(t0
, rt
);
8488 TCGv_i32 fp0
= tcg_temp_new_i32();
8490 tcg_gen_trunc_tl_i32(fp0
, t0
);
8491 gen_store_fpr32(ctx
, fp0
, fs
);
8492 tcg_temp_free_i32(fp0
);
8496 gen_helper_1e0i(cfc1
, t0
, fs
);
8497 gen_store_gpr(t0
, rt
);
8500 gen_load_gpr(t0
, rt
);
8501 save_cpu_state(ctx
, 0);
8503 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8505 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8506 tcg_temp_free_i32(fs_tmp
);
8508 /* Stop translation as we may have changed hflags */
8509 ctx
->bstate
= BS_STOP
;
8511 #if defined(TARGET_MIPS64)
8513 gen_load_fpr64(ctx
, t0
, fs
);
8514 gen_store_gpr(t0
, rt
);
8517 gen_load_gpr(t0
, rt
);
8518 gen_store_fpr64(ctx
, t0
, fs
);
8523 TCGv_i32 fp0
= tcg_temp_new_i32();
8525 gen_load_fpr32h(ctx
, fp0
, fs
);
8526 tcg_gen_ext_i32_tl(t0
, fp0
);
8527 tcg_temp_free_i32(fp0
);
8529 gen_store_gpr(t0
, rt
);
8532 gen_load_gpr(t0
, rt
);
8534 TCGv_i32 fp0
= tcg_temp_new_i32();
8536 tcg_gen_trunc_tl_i32(fp0
, t0
);
8537 gen_store_fpr32h(ctx
, fp0
, fs
);
8538 tcg_temp_free_i32(fp0
);
8542 MIPS_INVAL("cp1 move");
8543 generate_exception_end(ctx
, EXCP_RI
);
8551 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8567 l1
= gen_new_label();
8568 t0
= tcg_temp_new_i32();
8569 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8570 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8571 tcg_temp_free_i32(t0
);
8573 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8575 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8580 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8584 TCGv_i32 t0
= tcg_temp_new_i32();
8585 TCGLabel
*l1
= gen_new_label();
8592 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8593 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8594 gen_load_fpr32(ctx
, t0
, fs
);
8595 gen_store_fpr32(ctx
, t0
, fd
);
8597 tcg_temp_free_i32(t0
);
8600 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8603 TCGv_i32 t0
= tcg_temp_new_i32();
8605 TCGLabel
*l1
= gen_new_label();
8612 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8613 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8614 tcg_temp_free_i32(t0
);
8615 fp0
= tcg_temp_new_i64();
8616 gen_load_fpr64(ctx
, fp0
, fs
);
8617 gen_store_fpr64(ctx
, fp0
, fd
);
8618 tcg_temp_free_i64(fp0
);
8622 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8626 TCGv_i32 t0
= tcg_temp_new_i32();
8627 TCGLabel
*l1
= gen_new_label();
8628 TCGLabel
*l2
= gen_new_label();
8635 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8636 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8637 gen_load_fpr32(ctx
, t0
, fs
);
8638 gen_store_fpr32(ctx
, t0
, fd
);
8641 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8642 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8643 gen_load_fpr32h(ctx
, t0
, fs
);
8644 gen_store_fpr32h(ctx
, t0
, fd
);
8645 tcg_temp_free_i32(t0
);
8649 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8652 TCGv_i32 t1
= tcg_const_i32(0);
8653 TCGv_i32 fp0
= tcg_temp_new_i32();
8654 TCGv_i32 fp1
= tcg_temp_new_i32();
8655 TCGv_i32 fp2
= tcg_temp_new_i32();
8656 gen_load_fpr32(ctx
, fp0
, fd
);
8657 gen_load_fpr32(ctx
, fp1
, ft
);
8658 gen_load_fpr32(ctx
, fp2
, fs
);
8662 tcg_gen_andi_i32(fp0
, fp0
, 1);
8663 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8666 tcg_gen_andi_i32(fp1
, fp1
, 1);
8667 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8670 tcg_gen_andi_i32(fp1
, fp1
, 1);
8671 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8674 MIPS_INVAL("gen_sel_s");
8675 generate_exception_end(ctx
, EXCP_RI
);
8679 gen_store_fpr32(ctx
, fp0
, fd
);
8680 tcg_temp_free_i32(fp2
);
8681 tcg_temp_free_i32(fp1
);
8682 tcg_temp_free_i32(fp0
);
8683 tcg_temp_free_i32(t1
);
8686 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8689 TCGv_i64 t1
= tcg_const_i64(0);
8690 TCGv_i64 fp0
= tcg_temp_new_i64();
8691 TCGv_i64 fp1
= tcg_temp_new_i64();
8692 TCGv_i64 fp2
= tcg_temp_new_i64();
8693 gen_load_fpr64(ctx
, fp0
, fd
);
8694 gen_load_fpr64(ctx
, fp1
, ft
);
8695 gen_load_fpr64(ctx
, fp2
, fs
);
8699 tcg_gen_andi_i64(fp0
, fp0
, 1);
8700 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8703 tcg_gen_andi_i64(fp1
, fp1
, 1);
8704 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8707 tcg_gen_andi_i64(fp1
, fp1
, 1);
8708 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8711 MIPS_INVAL("gen_sel_d");
8712 generate_exception_end(ctx
, EXCP_RI
);
8716 gen_store_fpr64(ctx
, fp0
, fd
);
8717 tcg_temp_free_i64(fp2
);
8718 tcg_temp_free_i64(fp1
);
8719 tcg_temp_free_i64(fp0
);
8720 tcg_temp_free_i64(t1
);
8723 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8724 int ft
, int fs
, int fd
, int cc
)
8726 uint32_t func
= ctx
->opcode
& 0x3f;
8730 TCGv_i32 fp0
= tcg_temp_new_i32();
8731 TCGv_i32 fp1
= tcg_temp_new_i32();
8733 gen_load_fpr32(ctx
, fp0
, fs
);
8734 gen_load_fpr32(ctx
, fp1
, ft
);
8735 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8736 tcg_temp_free_i32(fp1
);
8737 gen_store_fpr32(ctx
, fp0
, fd
);
8738 tcg_temp_free_i32(fp0
);
8743 TCGv_i32 fp0
= tcg_temp_new_i32();
8744 TCGv_i32 fp1
= tcg_temp_new_i32();
8746 gen_load_fpr32(ctx
, fp0
, fs
);
8747 gen_load_fpr32(ctx
, fp1
, ft
);
8748 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8749 tcg_temp_free_i32(fp1
);
8750 gen_store_fpr32(ctx
, fp0
, fd
);
8751 tcg_temp_free_i32(fp0
);
8756 TCGv_i32 fp0
= tcg_temp_new_i32();
8757 TCGv_i32 fp1
= tcg_temp_new_i32();
8759 gen_load_fpr32(ctx
, fp0
, fs
);
8760 gen_load_fpr32(ctx
, fp1
, ft
);
8761 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8762 tcg_temp_free_i32(fp1
);
8763 gen_store_fpr32(ctx
, fp0
, fd
);
8764 tcg_temp_free_i32(fp0
);
8769 TCGv_i32 fp0
= tcg_temp_new_i32();
8770 TCGv_i32 fp1
= tcg_temp_new_i32();
8772 gen_load_fpr32(ctx
, fp0
, fs
);
8773 gen_load_fpr32(ctx
, fp1
, ft
);
8774 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8775 tcg_temp_free_i32(fp1
);
8776 gen_store_fpr32(ctx
, fp0
, fd
);
8777 tcg_temp_free_i32(fp0
);
8782 TCGv_i32 fp0
= tcg_temp_new_i32();
8784 gen_load_fpr32(ctx
, fp0
, fs
);
8785 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8786 gen_store_fpr32(ctx
, fp0
, fd
);
8787 tcg_temp_free_i32(fp0
);
8792 TCGv_i32 fp0
= tcg_temp_new_i32();
8794 gen_load_fpr32(ctx
, fp0
, fs
);
8795 gen_helper_float_abs_s(fp0
, fp0
);
8796 gen_store_fpr32(ctx
, fp0
, fd
);
8797 tcg_temp_free_i32(fp0
);
8802 TCGv_i32 fp0
= tcg_temp_new_i32();
8804 gen_load_fpr32(ctx
, fp0
, fs
);
8805 gen_store_fpr32(ctx
, fp0
, fd
);
8806 tcg_temp_free_i32(fp0
);
8811 TCGv_i32 fp0
= tcg_temp_new_i32();
8813 gen_load_fpr32(ctx
, fp0
, fs
);
8814 gen_helper_float_chs_s(fp0
, fp0
);
8815 gen_store_fpr32(ctx
, fp0
, fd
);
8816 tcg_temp_free_i32(fp0
);
8820 check_cp1_64bitmode(ctx
);
8822 TCGv_i32 fp32
= tcg_temp_new_i32();
8823 TCGv_i64 fp64
= tcg_temp_new_i64();
8825 gen_load_fpr32(ctx
, fp32
, fs
);
8826 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8827 tcg_temp_free_i32(fp32
);
8828 gen_store_fpr64(ctx
, fp64
, fd
);
8829 tcg_temp_free_i64(fp64
);
8833 check_cp1_64bitmode(ctx
);
8835 TCGv_i32 fp32
= tcg_temp_new_i32();
8836 TCGv_i64 fp64
= tcg_temp_new_i64();
8838 gen_load_fpr32(ctx
, fp32
, fs
);
8839 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8840 tcg_temp_free_i32(fp32
);
8841 gen_store_fpr64(ctx
, fp64
, fd
);
8842 tcg_temp_free_i64(fp64
);
8846 check_cp1_64bitmode(ctx
);
8848 TCGv_i32 fp32
= tcg_temp_new_i32();
8849 TCGv_i64 fp64
= tcg_temp_new_i64();
8851 gen_load_fpr32(ctx
, fp32
, fs
);
8852 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8853 tcg_temp_free_i32(fp32
);
8854 gen_store_fpr64(ctx
, fp64
, fd
);
8855 tcg_temp_free_i64(fp64
);
8859 check_cp1_64bitmode(ctx
);
8861 TCGv_i32 fp32
= tcg_temp_new_i32();
8862 TCGv_i64 fp64
= tcg_temp_new_i64();
8864 gen_load_fpr32(ctx
, fp32
, fs
);
8865 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8866 tcg_temp_free_i32(fp32
);
8867 gen_store_fpr64(ctx
, fp64
, fd
);
8868 tcg_temp_free_i64(fp64
);
8873 TCGv_i32 fp0
= tcg_temp_new_i32();
8875 gen_load_fpr32(ctx
, fp0
, fs
);
8876 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8877 gen_store_fpr32(ctx
, fp0
, fd
);
8878 tcg_temp_free_i32(fp0
);
8883 TCGv_i32 fp0
= tcg_temp_new_i32();
8885 gen_load_fpr32(ctx
, fp0
, fs
);
8886 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8887 gen_store_fpr32(ctx
, fp0
, fd
);
8888 tcg_temp_free_i32(fp0
);
8893 TCGv_i32 fp0
= tcg_temp_new_i32();
8895 gen_load_fpr32(ctx
, fp0
, fs
);
8896 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8897 gen_store_fpr32(ctx
, fp0
, fd
);
8898 tcg_temp_free_i32(fp0
);
8903 TCGv_i32 fp0
= tcg_temp_new_i32();
8905 gen_load_fpr32(ctx
, fp0
, fs
);
8906 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8907 gen_store_fpr32(ctx
, fp0
, fd
);
8908 tcg_temp_free_i32(fp0
);
8912 check_insn(ctx
, ISA_MIPS32R6
);
8913 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8916 check_insn(ctx
, ISA_MIPS32R6
);
8917 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8920 check_insn(ctx
, ISA_MIPS32R6
);
8921 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8924 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8925 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8928 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8930 TCGLabel
*l1
= gen_new_label();
8934 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8936 fp0
= tcg_temp_new_i32();
8937 gen_load_fpr32(ctx
, fp0
, fs
);
8938 gen_store_fpr32(ctx
, fp0
, fd
);
8939 tcg_temp_free_i32(fp0
);
8944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8946 TCGLabel
*l1
= gen_new_label();
8950 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8951 fp0
= tcg_temp_new_i32();
8952 gen_load_fpr32(ctx
, fp0
, fs
);
8953 gen_store_fpr32(ctx
, fp0
, fd
);
8954 tcg_temp_free_i32(fp0
);
8961 TCGv_i32 fp0
= tcg_temp_new_i32();
8963 gen_load_fpr32(ctx
, fp0
, fs
);
8964 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8965 gen_store_fpr32(ctx
, fp0
, fd
);
8966 tcg_temp_free_i32(fp0
);
8971 TCGv_i32 fp0
= tcg_temp_new_i32();
8973 gen_load_fpr32(ctx
, fp0
, fs
);
8974 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8975 gen_store_fpr32(ctx
, fp0
, fd
);
8976 tcg_temp_free_i32(fp0
);
8980 check_insn(ctx
, ISA_MIPS32R6
);
8982 TCGv_i32 fp0
= tcg_temp_new_i32();
8983 TCGv_i32 fp1
= tcg_temp_new_i32();
8984 TCGv_i32 fp2
= tcg_temp_new_i32();
8985 gen_load_fpr32(ctx
, fp0
, fs
);
8986 gen_load_fpr32(ctx
, fp1
, ft
);
8987 gen_load_fpr32(ctx
, fp2
, fd
);
8988 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8989 gen_store_fpr32(ctx
, fp2
, fd
);
8990 tcg_temp_free_i32(fp2
);
8991 tcg_temp_free_i32(fp1
);
8992 tcg_temp_free_i32(fp0
);
8996 check_insn(ctx
, ISA_MIPS32R6
);
8998 TCGv_i32 fp0
= tcg_temp_new_i32();
8999 TCGv_i32 fp1
= tcg_temp_new_i32();
9000 TCGv_i32 fp2
= tcg_temp_new_i32();
9001 gen_load_fpr32(ctx
, fp0
, fs
);
9002 gen_load_fpr32(ctx
, fp1
, ft
);
9003 gen_load_fpr32(ctx
, fp2
, fd
);
9004 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9005 gen_store_fpr32(ctx
, fp2
, fd
);
9006 tcg_temp_free_i32(fp2
);
9007 tcg_temp_free_i32(fp1
);
9008 tcg_temp_free_i32(fp0
);
9012 check_insn(ctx
, ISA_MIPS32R6
);
9014 TCGv_i32 fp0
= tcg_temp_new_i32();
9015 gen_load_fpr32(ctx
, fp0
, fs
);
9016 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9017 gen_store_fpr32(ctx
, fp0
, fd
);
9018 tcg_temp_free_i32(fp0
);
9022 check_insn(ctx
, ISA_MIPS32R6
);
9024 TCGv_i32 fp0
= tcg_temp_new_i32();
9025 gen_load_fpr32(ctx
, fp0
, fs
);
9026 gen_helper_float_class_s(fp0
, fp0
);
9027 gen_store_fpr32(ctx
, fp0
, fd
);
9028 tcg_temp_free_i32(fp0
);
9031 case OPC_MIN_S
: /* OPC_RECIP2_S */
9032 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9034 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 TCGv_i32 fp1
= tcg_temp_new_i32();
9036 TCGv_i32 fp2
= tcg_temp_new_i32();
9037 gen_load_fpr32(ctx
, fp0
, fs
);
9038 gen_load_fpr32(ctx
, fp1
, ft
);
9039 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9040 gen_store_fpr32(ctx
, fp2
, fd
);
9041 tcg_temp_free_i32(fp2
);
9042 tcg_temp_free_i32(fp1
);
9043 tcg_temp_free_i32(fp0
);
9046 check_cp1_64bitmode(ctx
);
9048 TCGv_i32 fp0
= tcg_temp_new_i32();
9049 TCGv_i32 fp1
= tcg_temp_new_i32();
9051 gen_load_fpr32(ctx
, fp0
, fs
);
9052 gen_load_fpr32(ctx
, fp1
, ft
);
9053 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9054 tcg_temp_free_i32(fp1
);
9055 gen_store_fpr32(ctx
, fp0
, fd
);
9056 tcg_temp_free_i32(fp0
);
9060 case OPC_MINA_S
: /* OPC_RECIP1_S */
9061 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9063 TCGv_i32 fp0
= tcg_temp_new_i32();
9064 TCGv_i32 fp1
= tcg_temp_new_i32();
9065 TCGv_i32 fp2
= tcg_temp_new_i32();
9066 gen_load_fpr32(ctx
, fp0
, fs
);
9067 gen_load_fpr32(ctx
, fp1
, ft
);
9068 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9069 gen_store_fpr32(ctx
, fp2
, fd
);
9070 tcg_temp_free_i32(fp2
);
9071 tcg_temp_free_i32(fp1
);
9072 tcg_temp_free_i32(fp0
);
9075 check_cp1_64bitmode(ctx
);
9077 TCGv_i32 fp0
= tcg_temp_new_i32();
9079 gen_load_fpr32(ctx
, fp0
, fs
);
9080 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9081 gen_store_fpr32(ctx
, fp0
, fd
);
9082 tcg_temp_free_i32(fp0
);
9086 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9087 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9089 TCGv_i32 fp0
= tcg_temp_new_i32();
9090 TCGv_i32 fp1
= tcg_temp_new_i32();
9091 gen_load_fpr32(ctx
, fp0
, fs
);
9092 gen_load_fpr32(ctx
, fp1
, ft
);
9093 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9094 gen_store_fpr32(ctx
, fp1
, fd
);
9095 tcg_temp_free_i32(fp1
);
9096 tcg_temp_free_i32(fp0
);
9099 check_cp1_64bitmode(ctx
);
9101 TCGv_i32 fp0
= tcg_temp_new_i32();
9103 gen_load_fpr32(ctx
, fp0
, fs
);
9104 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9105 gen_store_fpr32(ctx
, fp0
, fd
);
9106 tcg_temp_free_i32(fp0
);
9110 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9111 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9113 TCGv_i32 fp0
= tcg_temp_new_i32();
9114 TCGv_i32 fp1
= tcg_temp_new_i32();
9115 gen_load_fpr32(ctx
, fp0
, fs
);
9116 gen_load_fpr32(ctx
, fp1
, ft
);
9117 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9118 gen_store_fpr32(ctx
, fp1
, fd
);
9119 tcg_temp_free_i32(fp1
);
9120 tcg_temp_free_i32(fp0
);
9123 check_cp1_64bitmode(ctx
);
9125 TCGv_i32 fp0
= tcg_temp_new_i32();
9126 TCGv_i32 fp1
= tcg_temp_new_i32();
9128 gen_load_fpr32(ctx
, fp0
, fs
);
9129 gen_load_fpr32(ctx
, fp1
, ft
);
9130 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9131 tcg_temp_free_i32(fp1
);
9132 gen_store_fpr32(ctx
, fp0
, fd
);
9133 tcg_temp_free_i32(fp0
);
9138 check_cp1_registers(ctx
, fd
);
9140 TCGv_i32 fp32
= tcg_temp_new_i32();
9141 TCGv_i64 fp64
= tcg_temp_new_i64();
9143 gen_load_fpr32(ctx
, fp32
, fs
);
9144 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9145 tcg_temp_free_i32(fp32
);
9146 gen_store_fpr64(ctx
, fp64
, fd
);
9147 tcg_temp_free_i64(fp64
);
9152 TCGv_i32 fp0
= tcg_temp_new_i32();
9154 gen_load_fpr32(ctx
, fp0
, fs
);
9155 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9156 gen_store_fpr32(ctx
, fp0
, fd
);
9157 tcg_temp_free_i32(fp0
);
9161 check_cp1_64bitmode(ctx
);
9163 TCGv_i32 fp32
= tcg_temp_new_i32();
9164 TCGv_i64 fp64
= tcg_temp_new_i64();
9166 gen_load_fpr32(ctx
, fp32
, fs
);
9167 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9168 tcg_temp_free_i32(fp32
);
9169 gen_store_fpr64(ctx
, fp64
, fd
);
9170 tcg_temp_free_i64(fp64
);
9176 TCGv_i64 fp64
= tcg_temp_new_i64();
9177 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9178 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9180 gen_load_fpr32(ctx
, fp32_0
, fs
);
9181 gen_load_fpr32(ctx
, fp32_1
, ft
);
9182 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9183 tcg_temp_free_i32(fp32_1
);
9184 tcg_temp_free_i32(fp32_0
);
9185 gen_store_fpr64(ctx
, fp64
, fd
);
9186 tcg_temp_free_i64(fp64
);
9198 case OPC_CMP_NGLE_S
:
9205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9206 if (ctx
->opcode
& (1 << 6)) {
9207 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9209 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9213 check_cp1_registers(ctx
, fs
| ft
| fd
);
9215 TCGv_i64 fp0
= tcg_temp_new_i64();
9216 TCGv_i64 fp1
= tcg_temp_new_i64();
9218 gen_load_fpr64(ctx
, fp0
, fs
);
9219 gen_load_fpr64(ctx
, fp1
, ft
);
9220 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9221 tcg_temp_free_i64(fp1
);
9222 gen_store_fpr64(ctx
, fp0
, fd
);
9223 tcg_temp_free_i64(fp0
);
9227 check_cp1_registers(ctx
, fs
| ft
| fd
);
9229 TCGv_i64 fp0
= tcg_temp_new_i64();
9230 TCGv_i64 fp1
= tcg_temp_new_i64();
9232 gen_load_fpr64(ctx
, fp0
, fs
);
9233 gen_load_fpr64(ctx
, fp1
, ft
);
9234 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9235 tcg_temp_free_i64(fp1
);
9236 gen_store_fpr64(ctx
, fp0
, fd
);
9237 tcg_temp_free_i64(fp0
);
9241 check_cp1_registers(ctx
, fs
| ft
| fd
);
9243 TCGv_i64 fp0
= tcg_temp_new_i64();
9244 TCGv_i64 fp1
= tcg_temp_new_i64();
9246 gen_load_fpr64(ctx
, fp0
, fs
);
9247 gen_load_fpr64(ctx
, fp1
, ft
);
9248 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9249 tcg_temp_free_i64(fp1
);
9250 gen_store_fpr64(ctx
, fp0
, fd
);
9251 tcg_temp_free_i64(fp0
);
9255 check_cp1_registers(ctx
, fs
| ft
| fd
);
9257 TCGv_i64 fp0
= tcg_temp_new_i64();
9258 TCGv_i64 fp1
= tcg_temp_new_i64();
9260 gen_load_fpr64(ctx
, fp0
, fs
);
9261 gen_load_fpr64(ctx
, fp1
, ft
);
9262 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9263 tcg_temp_free_i64(fp1
);
9264 gen_store_fpr64(ctx
, fp0
, fd
);
9265 tcg_temp_free_i64(fp0
);
9269 check_cp1_registers(ctx
, fs
| fd
);
9271 TCGv_i64 fp0
= tcg_temp_new_i64();
9273 gen_load_fpr64(ctx
, fp0
, fs
);
9274 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9275 gen_store_fpr64(ctx
, fp0
, fd
);
9276 tcg_temp_free_i64(fp0
);
9280 check_cp1_registers(ctx
, fs
| fd
);
9282 TCGv_i64 fp0
= tcg_temp_new_i64();
9284 gen_load_fpr64(ctx
, fp0
, fs
);
9285 gen_helper_float_abs_d(fp0
, fp0
);
9286 gen_store_fpr64(ctx
, fp0
, fd
);
9287 tcg_temp_free_i64(fp0
);
9291 check_cp1_registers(ctx
, fs
| fd
);
9293 TCGv_i64 fp0
= tcg_temp_new_i64();
9295 gen_load_fpr64(ctx
, fp0
, fs
);
9296 gen_store_fpr64(ctx
, fp0
, fd
);
9297 tcg_temp_free_i64(fp0
);
9301 check_cp1_registers(ctx
, fs
| fd
);
9303 TCGv_i64 fp0
= tcg_temp_new_i64();
9305 gen_load_fpr64(ctx
, fp0
, fs
);
9306 gen_helper_float_chs_d(fp0
, fp0
);
9307 gen_store_fpr64(ctx
, fp0
, fd
);
9308 tcg_temp_free_i64(fp0
);
9312 check_cp1_64bitmode(ctx
);
9314 TCGv_i64 fp0
= tcg_temp_new_i64();
9316 gen_load_fpr64(ctx
, fp0
, fs
);
9317 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9318 gen_store_fpr64(ctx
, fp0
, fd
);
9319 tcg_temp_free_i64(fp0
);
9323 check_cp1_64bitmode(ctx
);
9325 TCGv_i64 fp0
= tcg_temp_new_i64();
9327 gen_load_fpr64(ctx
, fp0
, fs
);
9328 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9329 gen_store_fpr64(ctx
, fp0
, fd
);
9330 tcg_temp_free_i64(fp0
);
9334 check_cp1_64bitmode(ctx
);
9336 TCGv_i64 fp0
= tcg_temp_new_i64();
9338 gen_load_fpr64(ctx
, fp0
, fs
);
9339 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9340 gen_store_fpr64(ctx
, fp0
, fd
);
9341 tcg_temp_free_i64(fp0
);
9345 check_cp1_64bitmode(ctx
);
9347 TCGv_i64 fp0
= tcg_temp_new_i64();
9349 gen_load_fpr64(ctx
, fp0
, fs
);
9350 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9351 gen_store_fpr64(ctx
, fp0
, fd
);
9352 tcg_temp_free_i64(fp0
);
9356 check_cp1_registers(ctx
, fs
);
9358 TCGv_i32 fp32
= tcg_temp_new_i32();
9359 TCGv_i64 fp64
= tcg_temp_new_i64();
9361 gen_load_fpr64(ctx
, fp64
, fs
);
9362 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9363 tcg_temp_free_i64(fp64
);
9364 gen_store_fpr32(ctx
, fp32
, fd
);
9365 tcg_temp_free_i32(fp32
);
9369 check_cp1_registers(ctx
, fs
);
9371 TCGv_i32 fp32
= tcg_temp_new_i32();
9372 TCGv_i64 fp64
= tcg_temp_new_i64();
9374 gen_load_fpr64(ctx
, fp64
, fs
);
9375 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9376 tcg_temp_free_i64(fp64
);
9377 gen_store_fpr32(ctx
, fp32
, fd
);
9378 tcg_temp_free_i32(fp32
);
9382 check_cp1_registers(ctx
, fs
);
9384 TCGv_i32 fp32
= tcg_temp_new_i32();
9385 TCGv_i64 fp64
= tcg_temp_new_i64();
9387 gen_load_fpr64(ctx
, fp64
, fs
);
9388 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9389 tcg_temp_free_i64(fp64
);
9390 gen_store_fpr32(ctx
, fp32
, fd
);
9391 tcg_temp_free_i32(fp32
);
9395 check_cp1_registers(ctx
, fs
);
9397 TCGv_i32 fp32
= tcg_temp_new_i32();
9398 TCGv_i64 fp64
= tcg_temp_new_i64();
9400 gen_load_fpr64(ctx
, fp64
, fs
);
9401 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9402 tcg_temp_free_i64(fp64
);
9403 gen_store_fpr32(ctx
, fp32
, fd
);
9404 tcg_temp_free_i32(fp32
);
9408 check_insn(ctx
, ISA_MIPS32R6
);
9409 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9412 check_insn(ctx
, ISA_MIPS32R6
);
9413 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9416 check_insn(ctx
, ISA_MIPS32R6
);
9417 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9420 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9421 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9426 TCGLabel
*l1
= gen_new_label();
9430 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9432 fp0
= tcg_temp_new_i64();
9433 gen_load_fpr64(ctx
, fp0
, fs
);
9434 gen_store_fpr64(ctx
, fp0
, fd
);
9435 tcg_temp_free_i64(fp0
);
9440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9442 TCGLabel
*l1
= gen_new_label();
9446 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9447 fp0
= tcg_temp_new_i64();
9448 gen_load_fpr64(ctx
, fp0
, fs
);
9449 gen_store_fpr64(ctx
, fp0
, fd
);
9450 tcg_temp_free_i64(fp0
);
9456 check_cp1_registers(ctx
, fs
| fd
);
9458 TCGv_i64 fp0
= tcg_temp_new_i64();
9460 gen_load_fpr64(ctx
, fp0
, fs
);
9461 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9462 gen_store_fpr64(ctx
, fp0
, fd
);
9463 tcg_temp_free_i64(fp0
);
9467 check_cp1_registers(ctx
, fs
| fd
);
9469 TCGv_i64 fp0
= tcg_temp_new_i64();
9471 gen_load_fpr64(ctx
, fp0
, fs
);
9472 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9473 gen_store_fpr64(ctx
, fp0
, fd
);
9474 tcg_temp_free_i64(fp0
);
9478 check_insn(ctx
, ISA_MIPS32R6
);
9480 TCGv_i64 fp0
= tcg_temp_new_i64();
9481 TCGv_i64 fp1
= tcg_temp_new_i64();
9482 TCGv_i64 fp2
= tcg_temp_new_i64();
9483 gen_load_fpr64(ctx
, fp0
, fs
);
9484 gen_load_fpr64(ctx
, fp1
, ft
);
9485 gen_load_fpr64(ctx
, fp2
, fd
);
9486 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9487 gen_store_fpr64(ctx
, fp2
, fd
);
9488 tcg_temp_free_i64(fp2
);
9489 tcg_temp_free_i64(fp1
);
9490 tcg_temp_free_i64(fp0
);
9494 check_insn(ctx
, ISA_MIPS32R6
);
9496 TCGv_i64 fp0
= tcg_temp_new_i64();
9497 TCGv_i64 fp1
= tcg_temp_new_i64();
9498 TCGv_i64 fp2
= tcg_temp_new_i64();
9499 gen_load_fpr64(ctx
, fp0
, fs
);
9500 gen_load_fpr64(ctx
, fp1
, ft
);
9501 gen_load_fpr64(ctx
, fp2
, fd
);
9502 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9503 gen_store_fpr64(ctx
, fp2
, fd
);
9504 tcg_temp_free_i64(fp2
);
9505 tcg_temp_free_i64(fp1
);
9506 tcg_temp_free_i64(fp0
);
9510 check_insn(ctx
, ISA_MIPS32R6
);
9512 TCGv_i64 fp0
= tcg_temp_new_i64();
9513 gen_load_fpr64(ctx
, fp0
, fs
);
9514 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9515 gen_store_fpr64(ctx
, fp0
, fd
);
9516 tcg_temp_free_i64(fp0
);
9520 check_insn(ctx
, ISA_MIPS32R6
);
9522 TCGv_i64 fp0
= tcg_temp_new_i64();
9523 gen_load_fpr64(ctx
, fp0
, fs
);
9524 gen_helper_float_class_d(fp0
, fp0
);
9525 gen_store_fpr64(ctx
, fp0
, fd
);
9526 tcg_temp_free_i64(fp0
);
9529 case OPC_MIN_D
: /* OPC_RECIP2_D */
9530 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9532 TCGv_i64 fp0
= tcg_temp_new_i64();
9533 TCGv_i64 fp1
= tcg_temp_new_i64();
9534 gen_load_fpr64(ctx
, fp0
, fs
);
9535 gen_load_fpr64(ctx
, fp1
, ft
);
9536 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9537 gen_store_fpr64(ctx
, fp1
, fd
);
9538 tcg_temp_free_i64(fp1
);
9539 tcg_temp_free_i64(fp0
);
9542 check_cp1_64bitmode(ctx
);
9544 TCGv_i64 fp0
= tcg_temp_new_i64();
9545 TCGv_i64 fp1
= tcg_temp_new_i64();
9547 gen_load_fpr64(ctx
, fp0
, fs
);
9548 gen_load_fpr64(ctx
, fp1
, ft
);
9549 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9550 tcg_temp_free_i64(fp1
);
9551 gen_store_fpr64(ctx
, fp0
, fd
);
9552 tcg_temp_free_i64(fp0
);
9556 case OPC_MINA_D
: /* OPC_RECIP1_D */
9557 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9559 TCGv_i64 fp0
= tcg_temp_new_i64();
9560 TCGv_i64 fp1
= tcg_temp_new_i64();
9561 gen_load_fpr64(ctx
, fp0
, fs
);
9562 gen_load_fpr64(ctx
, fp1
, ft
);
9563 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9564 gen_store_fpr64(ctx
, fp1
, fd
);
9565 tcg_temp_free_i64(fp1
);
9566 tcg_temp_free_i64(fp0
);
9569 check_cp1_64bitmode(ctx
);
9571 TCGv_i64 fp0
= tcg_temp_new_i64();
9573 gen_load_fpr64(ctx
, fp0
, fs
);
9574 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9575 gen_store_fpr64(ctx
, fp0
, fd
);
9576 tcg_temp_free_i64(fp0
);
9580 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9581 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9583 TCGv_i64 fp0
= tcg_temp_new_i64();
9584 TCGv_i64 fp1
= tcg_temp_new_i64();
9585 gen_load_fpr64(ctx
, fp0
, fs
);
9586 gen_load_fpr64(ctx
, fp1
, ft
);
9587 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9588 gen_store_fpr64(ctx
, fp1
, fd
);
9589 tcg_temp_free_i64(fp1
);
9590 tcg_temp_free_i64(fp0
);
9593 check_cp1_64bitmode(ctx
);
9595 TCGv_i64 fp0
= tcg_temp_new_i64();
9597 gen_load_fpr64(ctx
, fp0
, fs
);
9598 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9599 gen_store_fpr64(ctx
, fp0
, fd
);
9600 tcg_temp_free_i64(fp0
);
9604 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9605 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9607 TCGv_i64 fp0
= tcg_temp_new_i64();
9608 TCGv_i64 fp1
= tcg_temp_new_i64();
9609 gen_load_fpr64(ctx
, fp0
, fs
);
9610 gen_load_fpr64(ctx
, fp1
, ft
);
9611 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9612 gen_store_fpr64(ctx
, fp1
, fd
);
9613 tcg_temp_free_i64(fp1
);
9614 tcg_temp_free_i64(fp0
);
9617 check_cp1_64bitmode(ctx
);
9619 TCGv_i64 fp0
= tcg_temp_new_i64();
9620 TCGv_i64 fp1
= tcg_temp_new_i64();
9622 gen_load_fpr64(ctx
, fp0
, fs
);
9623 gen_load_fpr64(ctx
, fp1
, ft
);
9624 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9625 tcg_temp_free_i64(fp1
);
9626 gen_store_fpr64(ctx
, fp0
, fd
);
9627 tcg_temp_free_i64(fp0
);
9640 case OPC_CMP_NGLE_D
:
9647 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9648 if (ctx
->opcode
& (1 << 6)) {
9649 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9651 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9655 check_cp1_registers(ctx
, fs
);
9657 TCGv_i32 fp32
= tcg_temp_new_i32();
9658 TCGv_i64 fp64
= tcg_temp_new_i64();
9660 gen_load_fpr64(ctx
, fp64
, fs
);
9661 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9662 tcg_temp_free_i64(fp64
);
9663 gen_store_fpr32(ctx
, fp32
, fd
);
9664 tcg_temp_free_i32(fp32
);
9668 check_cp1_registers(ctx
, fs
);
9670 TCGv_i32 fp32
= tcg_temp_new_i32();
9671 TCGv_i64 fp64
= tcg_temp_new_i64();
9673 gen_load_fpr64(ctx
, fp64
, fs
);
9674 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9675 tcg_temp_free_i64(fp64
);
9676 gen_store_fpr32(ctx
, fp32
, fd
);
9677 tcg_temp_free_i32(fp32
);
9681 check_cp1_64bitmode(ctx
);
9683 TCGv_i64 fp0
= tcg_temp_new_i64();
9685 gen_load_fpr64(ctx
, fp0
, fs
);
9686 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9687 gen_store_fpr64(ctx
, fp0
, fd
);
9688 tcg_temp_free_i64(fp0
);
9693 TCGv_i32 fp0
= tcg_temp_new_i32();
9695 gen_load_fpr32(ctx
, fp0
, fs
);
9696 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9697 gen_store_fpr32(ctx
, fp0
, fd
);
9698 tcg_temp_free_i32(fp0
);
9702 check_cp1_registers(ctx
, fd
);
9704 TCGv_i32 fp32
= tcg_temp_new_i32();
9705 TCGv_i64 fp64
= tcg_temp_new_i64();
9707 gen_load_fpr32(ctx
, fp32
, fs
);
9708 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9709 tcg_temp_free_i32(fp32
);
9710 gen_store_fpr64(ctx
, fp64
, fd
);
9711 tcg_temp_free_i64(fp64
);
9715 check_cp1_64bitmode(ctx
);
9717 TCGv_i32 fp32
= tcg_temp_new_i32();
9718 TCGv_i64 fp64
= tcg_temp_new_i64();
9720 gen_load_fpr64(ctx
, fp64
, fs
);
9721 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9722 tcg_temp_free_i64(fp64
);
9723 gen_store_fpr32(ctx
, fp32
, fd
);
9724 tcg_temp_free_i32(fp32
);
9728 check_cp1_64bitmode(ctx
);
9730 TCGv_i64 fp0
= tcg_temp_new_i64();
9732 gen_load_fpr64(ctx
, fp0
, fs
);
9733 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9734 gen_store_fpr64(ctx
, fp0
, fd
);
9735 tcg_temp_free_i64(fp0
);
9741 TCGv_i64 fp0
= tcg_temp_new_i64();
9743 gen_load_fpr64(ctx
, fp0
, fs
);
9744 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9745 gen_store_fpr64(ctx
, fp0
, fd
);
9746 tcg_temp_free_i64(fp0
);
9752 TCGv_i64 fp0
= tcg_temp_new_i64();
9753 TCGv_i64 fp1
= tcg_temp_new_i64();
9755 gen_load_fpr64(ctx
, fp0
, fs
);
9756 gen_load_fpr64(ctx
, fp1
, ft
);
9757 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9758 tcg_temp_free_i64(fp1
);
9759 gen_store_fpr64(ctx
, fp0
, fd
);
9760 tcg_temp_free_i64(fp0
);
9766 TCGv_i64 fp0
= tcg_temp_new_i64();
9767 TCGv_i64 fp1
= tcg_temp_new_i64();
9769 gen_load_fpr64(ctx
, fp0
, fs
);
9770 gen_load_fpr64(ctx
, fp1
, ft
);
9771 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9772 tcg_temp_free_i64(fp1
);
9773 gen_store_fpr64(ctx
, fp0
, fd
);
9774 tcg_temp_free_i64(fp0
);
9780 TCGv_i64 fp0
= tcg_temp_new_i64();
9781 TCGv_i64 fp1
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_load_fpr64(ctx
, fp1
, ft
);
9785 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9786 tcg_temp_free_i64(fp1
);
9787 gen_store_fpr64(ctx
, fp0
, fd
);
9788 tcg_temp_free_i64(fp0
);
9794 TCGv_i64 fp0
= tcg_temp_new_i64();
9796 gen_load_fpr64(ctx
, fp0
, fs
);
9797 gen_helper_float_abs_ps(fp0
, fp0
);
9798 gen_store_fpr64(ctx
, fp0
, fd
);
9799 tcg_temp_free_i64(fp0
);
9805 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp0
, fs
);
9808 gen_store_fpr64(ctx
, fp0
, fd
);
9809 tcg_temp_free_i64(fp0
);
9815 TCGv_i64 fp0
= tcg_temp_new_i64();
9817 gen_load_fpr64(ctx
, fp0
, fs
);
9818 gen_helper_float_chs_ps(fp0
, fp0
);
9819 gen_store_fpr64(ctx
, fp0
, fd
);
9820 tcg_temp_free_i64(fp0
);
9825 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9830 TCGLabel
*l1
= gen_new_label();
9834 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9835 fp0
= tcg_temp_new_i64();
9836 gen_load_fpr64(ctx
, fp0
, fs
);
9837 gen_store_fpr64(ctx
, fp0
, fd
);
9838 tcg_temp_free_i64(fp0
);
9845 TCGLabel
*l1
= gen_new_label();
9849 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9850 fp0
= tcg_temp_new_i64();
9851 gen_load_fpr64(ctx
, fp0
, fs
);
9852 gen_store_fpr64(ctx
, fp0
, fd
);
9853 tcg_temp_free_i64(fp0
);
9861 TCGv_i64 fp0
= tcg_temp_new_i64();
9862 TCGv_i64 fp1
= tcg_temp_new_i64();
9864 gen_load_fpr64(ctx
, fp0
, ft
);
9865 gen_load_fpr64(ctx
, fp1
, fs
);
9866 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9867 tcg_temp_free_i64(fp1
);
9868 gen_store_fpr64(ctx
, fp0
, fd
);
9869 tcg_temp_free_i64(fp0
);
9875 TCGv_i64 fp0
= tcg_temp_new_i64();
9876 TCGv_i64 fp1
= tcg_temp_new_i64();
9878 gen_load_fpr64(ctx
, fp0
, ft
);
9879 gen_load_fpr64(ctx
, fp1
, fs
);
9880 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9881 tcg_temp_free_i64(fp1
);
9882 gen_store_fpr64(ctx
, fp0
, fd
);
9883 tcg_temp_free_i64(fp0
);
9889 TCGv_i64 fp0
= tcg_temp_new_i64();
9890 TCGv_i64 fp1
= tcg_temp_new_i64();
9892 gen_load_fpr64(ctx
, fp0
, fs
);
9893 gen_load_fpr64(ctx
, fp1
, ft
);
9894 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9895 tcg_temp_free_i64(fp1
);
9896 gen_store_fpr64(ctx
, fp0
, fd
);
9897 tcg_temp_free_i64(fp0
);
9903 TCGv_i64 fp0
= tcg_temp_new_i64();
9905 gen_load_fpr64(ctx
, fp0
, fs
);
9906 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9907 gen_store_fpr64(ctx
, fp0
, fd
);
9908 tcg_temp_free_i64(fp0
);
9914 TCGv_i64 fp0
= tcg_temp_new_i64();
9916 gen_load_fpr64(ctx
, fp0
, fs
);
9917 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9918 gen_store_fpr64(ctx
, fp0
, fd
);
9919 tcg_temp_free_i64(fp0
);
9925 TCGv_i64 fp0
= tcg_temp_new_i64();
9926 TCGv_i64 fp1
= tcg_temp_new_i64();
9928 gen_load_fpr64(ctx
, fp0
, fs
);
9929 gen_load_fpr64(ctx
, fp1
, ft
);
9930 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9931 tcg_temp_free_i64(fp1
);
9932 gen_store_fpr64(ctx
, fp0
, fd
);
9933 tcg_temp_free_i64(fp0
);
9937 check_cp1_64bitmode(ctx
);
9939 TCGv_i32 fp0
= tcg_temp_new_i32();
9941 gen_load_fpr32h(ctx
, fp0
, fs
);
9942 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9943 gen_store_fpr32(ctx
, fp0
, fd
);
9944 tcg_temp_free_i32(fp0
);
9950 TCGv_i64 fp0
= tcg_temp_new_i64();
9952 gen_load_fpr64(ctx
, fp0
, fs
);
9953 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9954 gen_store_fpr64(ctx
, fp0
, fd
);
9955 tcg_temp_free_i64(fp0
);
9959 check_cp1_64bitmode(ctx
);
9961 TCGv_i32 fp0
= tcg_temp_new_i32();
9963 gen_load_fpr32(ctx
, fp0
, fs
);
9964 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9965 gen_store_fpr32(ctx
, fp0
, fd
);
9966 tcg_temp_free_i32(fp0
);
9972 TCGv_i32 fp0
= tcg_temp_new_i32();
9973 TCGv_i32 fp1
= tcg_temp_new_i32();
9975 gen_load_fpr32(ctx
, fp0
, fs
);
9976 gen_load_fpr32(ctx
, fp1
, ft
);
9977 gen_store_fpr32h(ctx
, fp0
, fd
);
9978 gen_store_fpr32(ctx
, fp1
, fd
);
9979 tcg_temp_free_i32(fp0
);
9980 tcg_temp_free_i32(fp1
);
9986 TCGv_i32 fp0
= tcg_temp_new_i32();
9987 TCGv_i32 fp1
= tcg_temp_new_i32();
9989 gen_load_fpr32(ctx
, fp0
, fs
);
9990 gen_load_fpr32h(ctx
, fp1
, ft
);
9991 gen_store_fpr32(ctx
, fp1
, fd
);
9992 gen_store_fpr32h(ctx
, fp0
, fd
);
9993 tcg_temp_free_i32(fp0
);
9994 tcg_temp_free_i32(fp1
);
10000 TCGv_i32 fp0
= tcg_temp_new_i32();
10001 TCGv_i32 fp1
= tcg_temp_new_i32();
10003 gen_load_fpr32h(ctx
, fp0
, fs
);
10004 gen_load_fpr32(ctx
, fp1
, ft
);
10005 gen_store_fpr32(ctx
, fp1
, fd
);
10006 gen_store_fpr32h(ctx
, fp0
, fd
);
10007 tcg_temp_free_i32(fp0
);
10008 tcg_temp_free_i32(fp1
);
10014 TCGv_i32 fp0
= tcg_temp_new_i32();
10015 TCGv_i32 fp1
= tcg_temp_new_i32();
10017 gen_load_fpr32h(ctx
, fp0
, fs
);
10018 gen_load_fpr32h(ctx
, fp1
, ft
);
10019 gen_store_fpr32(ctx
, fp1
, fd
);
10020 gen_store_fpr32h(ctx
, fp0
, fd
);
10021 tcg_temp_free_i32(fp0
);
10022 tcg_temp_free_i32(fp1
);
10026 case OPC_CMP_UN_PS
:
10027 case OPC_CMP_EQ_PS
:
10028 case OPC_CMP_UEQ_PS
:
10029 case OPC_CMP_OLT_PS
:
10030 case OPC_CMP_ULT_PS
:
10031 case OPC_CMP_OLE_PS
:
10032 case OPC_CMP_ULE_PS
:
10033 case OPC_CMP_SF_PS
:
10034 case OPC_CMP_NGLE_PS
:
10035 case OPC_CMP_SEQ_PS
:
10036 case OPC_CMP_NGL_PS
:
10037 case OPC_CMP_LT_PS
:
10038 case OPC_CMP_NGE_PS
:
10039 case OPC_CMP_LE_PS
:
10040 case OPC_CMP_NGT_PS
:
10041 if (ctx
->opcode
& (1 << 6)) {
10042 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10044 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10048 MIPS_INVAL("farith");
10049 generate_exception_end(ctx
, EXCP_RI
);
10054 /* Coprocessor 3 (FPU) */
10055 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10056 int fd
, int fs
, int base
, int index
)
10058 TCGv t0
= tcg_temp_new();
10061 gen_load_gpr(t0
, index
);
10062 } else if (index
== 0) {
10063 gen_load_gpr(t0
, base
);
10065 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10067 /* Don't do NOP if destination is zero: we must perform the actual
10073 TCGv_i32 fp0
= tcg_temp_new_i32();
10075 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10076 tcg_gen_trunc_tl_i32(fp0
, t0
);
10077 gen_store_fpr32(ctx
, fp0
, fd
);
10078 tcg_temp_free_i32(fp0
);
10083 check_cp1_registers(ctx
, fd
);
10085 TCGv_i64 fp0
= tcg_temp_new_i64();
10086 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10087 gen_store_fpr64(ctx
, fp0
, fd
);
10088 tcg_temp_free_i64(fp0
);
10092 check_cp1_64bitmode(ctx
);
10093 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10095 TCGv_i64 fp0
= tcg_temp_new_i64();
10097 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10098 gen_store_fpr64(ctx
, fp0
, fd
);
10099 tcg_temp_free_i64(fp0
);
10105 TCGv_i32 fp0
= tcg_temp_new_i32();
10106 gen_load_fpr32(ctx
, fp0
, fs
);
10107 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10108 tcg_temp_free_i32(fp0
);
10113 check_cp1_registers(ctx
, fs
);
10115 TCGv_i64 fp0
= tcg_temp_new_i64();
10116 gen_load_fpr64(ctx
, fp0
, fs
);
10117 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10118 tcg_temp_free_i64(fp0
);
10122 check_cp1_64bitmode(ctx
);
10123 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10125 TCGv_i64 fp0
= tcg_temp_new_i64();
10126 gen_load_fpr64(ctx
, fp0
, fs
);
10127 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10128 tcg_temp_free_i64(fp0
);
10135 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10136 int fd
, int fr
, int fs
, int ft
)
10142 TCGv t0
= tcg_temp_local_new();
10143 TCGv_i32 fp
= tcg_temp_new_i32();
10144 TCGv_i32 fph
= tcg_temp_new_i32();
10145 TCGLabel
*l1
= gen_new_label();
10146 TCGLabel
*l2
= gen_new_label();
10148 gen_load_gpr(t0
, fr
);
10149 tcg_gen_andi_tl(t0
, t0
, 0x7);
10151 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10152 gen_load_fpr32(ctx
, fp
, fs
);
10153 gen_load_fpr32h(ctx
, fph
, fs
);
10154 gen_store_fpr32(ctx
, fp
, fd
);
10155 gen_store_fpr32h(ctx
, fph
, fd
);
10158 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10160 #ifdef TARGET_WORDS_BIGENDIAN
10161 gen_load_fpr32(ctx
, fp
, fs
);
10162 gen_load_fpr32h(ctx
, fph
, ft
);
10163 gen_store_fpr32h(ctx
, fp
, fd
);
10164 gen_store_fpr32(ctx
, fph
, fd
);
10166 gen_load_fpr32h(ctx
, fph
, fs
);
10167 gen_load_fpr32(ctx
, fp
, ft
);
10168 gen_store_fpr32(ctx
, fph
, fd
);
10169 gen_store_fpr32h(ctx
, fp
, fd
);
10172 tcg_temp_free_i32(fp
);
10173 tcg_temp_free_i32(fph
);
10179 TCGv_i32 fp0
= tcg_temp_new_i32();
10180 TCGv_i32 fp1
= tcg_temp_new_i32();
10181 TCGv_i32 fp2
= tcg_temp_new_i32();
10183 gen_load_fpr32(ctx
, fp0
, fs
);
10184 gen_load_fpr32(ctx
, fp1
, ft
);
10185 gen_load_fpr32(ctx
, fp2
, fr
);
10186 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10187 tcg_temp_free_i32(fp0
);
10188 tcg_temp_free_i32(fp1
);
10189 gen_store_fpr32(ctx
, fp2
, fd
);
10190 tcg_temp_free_i32(fp2
);
10195 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10197 TCGv_i64 fp0
= tcg_temp_new_i64();
10198 TCGv_i64 fp1
= tcg_temp_new_i64();
10199 TCGv_i64 fp2
= tcg_temp_new_i64();
10201 gen_load_fpr64(ctx
, fp0
, fs
);
10202 gen_load_fpr64(ctx
, fp1
, ft
);
10203 gen_load_fpr64(ctx
, fp2
, fr
);
10204 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10205 tcg_temp_free_i64(fp0
);
10206 tcg_temp_free_i64(fp1
);
10207 gen_store_fpr64(ctx
, fp2
, fd
);
10208 tcg_temp_free_i64(fp2
);
10214 TCGv_i64 fp0
= tcg_temp_new_i64();
10215 TCGv_i64 fp1
= tcg_temp_new_i64();
10216 TCGv_i64 fp2
= tcg_temp_new_i64();
10218 gen_load_fpr64(ctx
, fp0
, fs
);
10219 gen_load_fpr64(ctx
, fp1
, ft
);
10220 gen_load_fpr64(ctx
, fp2
, fr
);
10221 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10222 tcg_temp_free_i64(fp0
);
10223 tcg_temp_free_i64(fp1
);
10224 gen_store_fpr64(ctx
, fp2
, fd
);
10225 tcg_temp_free_i64(fp2
);
10231 TCGv_i32 fp0
= tcg_temp_new_i32();
10232 TCGv_i32 fp1
= tcg_temp_new_i32();
10233 TCGv_i32 fp2
= tcg_temp_new_i32();
10235 gen_load_fpr32(ctx
, fp0
, fs
);
10236 gen_load_fpr32(ctx
, fp1
, ft
);
10237 gen_load_fpr32(ctx
, fp2
, fr
);
10238 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10239 tcg_temp_free_i32(fp0
);
10240 tcg_temp_free_i32(fp1
);
10241 gen_store_fpr32(ctx
, fp2
, fd
);
10242 tcg_temp_free_i32(fp2
);
10247 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10249 TCGv_i64 fp0
= tcg_temp_new_i64();
10250 TCGv_i64 fp1
= tcg_temp_new_i64();
10251 TCGv_i64 fp2
= tcg_temp_new_i64();
10253 gen_load_fpr64(ctx
, fp0
, fs
);
10254 gen_load_fpr64(ctx
, fp1
, ft
);
10255 gen_load_fpr64(ctx
, fp2
, fr
);
10256 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10257 tcg_temp_free_i64(fp0
);
10258 tcg_temp_free_i64(fp1
);
10259 gen_store_fpr64(ctx
, fp2
, fd
);
10260 tcg_temp_free_i64(fp2
);
10266 TCGv_i64 fp0
= tcg_temp_new_i64();
10267 TCGv_i64 fp1
= tcg_temp_new_i64();
10268 TCGv_i64 fp2
= tcg_temp_new_i64();
10270 gen_load_fpr64(ctx
, fp0
, fs
);
10271 gen_load_fpr64(ctx
, fp1
, ft
);
10272 gen_load_fpr64(ctx
, fp2
, fr
);
10273 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10274 tcg_temp_free_i64(fp0
);
10275 tcg_temp_free_i64(fp1
);
10276 gen_store_fpr64(ctx
, fp2
, fd
);
10277 tcg_temp_free_i64(fp2
);
10283 TCGv_i32 fp0
= tcg_temp_new_i32();
10284 TCGv_i32 fp1
= tcg_temp_new_i32();
10285 TCGv_i32 fp2
= tcg_temp_new_i32();
10287 gen_load_fpr32(ctx
, fp0
, fs
);
10288 gen_load_fpr32(ctx
, fp1
, ft
);
10289 gen_load_fpr32(ctx
, fp2
, fr
);
10290 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10291 tcg_temp_free_i32(fp0
);
10292 tcg_temp_free_i32(fp1
);
10293 gen_store_fpr32(ctx
, fp2
, fd
);
10294 tcg_temp_free_i32(fp2
);
10299 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10301 TCGv_i64 fp0
= tcg_temp_new_i64();
10302 TCGv_i64 fp1
= tcg_temp_new_i64();
10303 TCGv_i64 fp2
= tcg_temp_new_i64();
10305 gen_load_fpr64(ctx
, fp0
, fs
);
10306 gen_load_fpr64(ctx
, fp1
, ft
);
10307 gen_load_fpr64(ctx
, fp2
, fr
);
10308 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10309 tcg_temp_free_i64(fp0
);
10310 tcg_temp_free_i64(fp1
);
10311 gen_store_fpr64(ctx
, fp2
, fd
);
10312 tcg_temp_free_i64(fp2
);
10318 TCGv_i64 fp0
= tcg_temp_new_i64();
10319 TCGv_i64 fp1
= tcg_temp_new_i64();
10320 TCGv_i64 fp2
= tcg_temp_new_i64();
10322 gen_load_fpr64(ctx
, fp0
, fs
);
10323 gen_load_fpr64(ctx
, fp1
, ft
);
10324 gen_load_fpr64(ctx
, fp2
, fr
);
10325 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10326 tcg_temp_free_i64(fp0
);
10327 tcg_temp_free_i64(fp1
);
10328 gen_store_fpr64(ctx
, fp2
, fd
);
10329 tcg_temp_free_i64(fp2
);
10335 TCGv_i32 fp0
= tcg_temp_new_i32();
10336 TCGv_i32 fp1
= tcg_temp_new_i32();
10337 TCGv_i32 fp2
= tcg_temp_new_i32();
10339 gen_load_fpr32(ctx
, fp0
, fs
);
10340 gen_load_fpr32(ctx
, fp1
, ft
);
10341 gen_load_fpr32(ctx
, fp2
, fr
);
10342 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10343 tcg_temp_free_i32(fp0
);
10344 tcg_temp_free_i32(fp1
);
10345 gen_store_fpr32(ctx
, fp2
, fd
);
10346 tcg_temp_free_i32(fp2
);
10351 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10353 TCGv_i64 fp0
= tcg_temp_new_i64();
10354 TCGv_i64 fp1
= tcg_temp_new_i64();
10355 TCGv_i64 fp2
= tcg_temp_new_i64();
10357 gen_load_fpr64(ctx
, fp0
, fs
);
10358 gen_load_fpr64(ctx
, fp1
, ft
);
10359 gen_load_fpr64(ctx
, fp2
, fr
);
10360 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10361 tcg_temp_free_i64(fp0
);
10362 tcg_temp_free_i64(fp1
);
10363 gen_store_fpr64(ctx
, fp2
, fd
);
10364 tcg_temp_free_i64(fp2
);
10370 TCGv_i64 fp0
= tcg_temp_new_i64();
10371 TCGv_i64 fp1
= tcg_temp_new_i64();
10372 TCGv_i64 fp2
= tcg_temp_new_i64();
10374 gen_load_fpr64(ctx
, fp0
, fs
);
10375 gen_load_fpr64(ctx
, fp1
, ft
);
10376 gen_load_fpr64(ctx
, fp2
, fr
);
10377 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10378 tcg_temp_free_i64(fp0
);
10379 tcg_temp_free_i64(fp1
);
10380 gen_store_fpr64(ctx
, fp2
, fd
);
10381 tcg_temp_free_i64(fp2
);
10385 MIPS_INVAL("flt3_arith");
10386 generate_exception_end(ctx
, EXCP_RI
);
10391 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10395 #if !defined(CONFIG_USER_ONLY)
10396 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10397 Therefore only check the ISA in system mode. */
10398 check_insn(ctx
, ISA_MIPS32R2
);
10400 t0
= tcg_temp_new();
10404 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10405 gen_store_gpr(t0
, rt
);
10408 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10409 gen_store_gpr(t0
, rt
);
10412 gen_helper_rdhwr_cc(t0
, cpu_env
);
10413 gen_store_gpr(t0
, rt
);
10416 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10417 gen_store_gpr(t0
, rt
);
10420 check_insn(ctx
, ISA_MIPS32R6
);
10422 /* Performance counter registers are not implemented other than
10423 * control register 0.
10425 generate_exception(ctx
, EXCP_RI
);
10427 gen_helper_rdhwr_performance(t0
, cpu_env
);
10428 gen_store_gpr(t0
, rt
);
10431 check_insn(ctx
, ISA_MIPS32R6
);
10432 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10433 gen_store_gpr(t0
, rt
);
10436 #if defined(CONFIG_USER_ONLY)
10437 tcg_gen_ld_tl(t0
, cpu_env
,
10438 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10439 gen_store_gpr(t0
, rt
);
10442 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10443 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10444 tcg_gen_ld_tl(t0
, cpu_env
,
10445 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10446 gen_store_gpr(t0
, rt
);
10448 generate_exception_end(ctx
, EXCP_RI
);
10452 default: /* Invalid */
10453 MIPS_INVAL("rdhwr");
10454 generate_exception_end(ctx
, EXCP_RI
);
10460 static inline void clear_branch_hflags(DisasContext
*ctx
)
10462 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10463 if (ctx
->bstate
== BS_NONE
) {
10464 save_cpu_state(ctx
, 0);
10466 /* it is not safe to save ctx->hflags as hflags may be changed
10467 in execution time by the instruction in delay / forbidden slot. */
10468 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10472 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10474 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10475 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10476 /* Branches completion */
10477 clear_branch_hflags(ctx
);
10478 ctx
->bstate
= BS_BRANCH
;
10479 /* FIXME: Need to clear can_do_io. */
10480 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10481 case MIPS_HFLAG_FBNSLOT
:
10482 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10485 /* unconditional branch */
10486 if (proc_hflags
& MIPS_HFLAG_BX
) {
10487 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10489 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10491 case MIPS_HFLAG_BL
:
10492 /* blikely taken case */
10493 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10495 case MIPS_HFLAG_BC
:
10496 /* Conditional branch */
10498 TCGLabel
*l1
= gen_new_label();
10500 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10501 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10503 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10506 case MIPS_HFLAG_BR
:
10507 /* unconditional branch to register */
10508 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10509 TCGv t0
= tcg_temp_new();
10510 TCGv_i32 t1
= tcg_temp_new_i32();
10512 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10513 tcg_gen_trunc_tl_i32(t1
, t0
);
10515 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10516 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10517 tcg_gen_or_i32(hflags
, hflags
, t1
);
10518 tcg_temp_free_i32(t1
);
10520 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10522 tcg_gen_mov_tl(cpu_PC
, btarget
);
10524 if (ctx
->singlestep_enabled
) {
10525 save_cpu_state(ctx
, 0);
10526 gen_helper_raise_exception_debug(cpu_env
);
10528 tcg_gen_exit_tb(0);
10531 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10537 /* Compact Branches */
10538 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10539 int rs
, int rt
, int32_t offset
)
10541 int bcond_compute
= 0;
10542 TCGv t0
= tcg_temp_new();
10543 TCGv t1
= tcg_temp_new();
10544 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10546 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10547 #ifdef MIPS_DEBUG_DISAS
10548 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10551 generate_exception_end(ctx
, EXCP_RI
);
10555 /* Load needed operands and calculate btarget */
10557 /* compact branch */
10558 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10559 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10560 gen_load_gpr(t0
, rs
);
10561 gen_load_gpr(t1
, rt
);
10563 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10564 if (rs
<= rt
&& rs
== 0) {
10565 /* OPC_BEQZALC, OPC_BNEZALC */
10566 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10569 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10570 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10571 gen_load_gpr(t0
, rs
);
10572 gen_load_gpr(t1
, rt
);
10574 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10576 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10577 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10578 if (rs
== 0 || rs
== rt
) {
10579 /* OPC_BLEZALC, OPC_BGEZALC */
10580 /* OPC_BGTZALC, OPC_BLTZALC */
10581 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10583 gen_load_gpr(t0
, rs
);
10584 gen_load_gpr(t1
, rt
);
10586 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10590 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10595 /* OPC_BEQZC, OPC_BNEZC */
10596 gen_load_gpr(t0
, rs
);
10598 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10600 /* OPC_JIC, OPC_JIALC */
10601 TCGv tbase
= tcg_temp_new();
10602 TCGv toffset
= tcg_temp_new();
10604 gen_load_gpr(tbase
, rt
);
10605 tcg_gen_movi_tl(toffset
, offset
);
10606 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10607 tcg_temp_free(tbase
);
10608 tcg_temp_free(toffset
);
10612 MIPS_INVAL("Compact branch/jump");
10613 generate_exception_end(ctx
, EXCP_RI
);
10617 if (bcond_compute
== 0) {
10618 /* Uncoditional compact branch */
10621 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10624 ctx
->hflags
|= MIPS_HFLAG_BR
;
10627 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10630 ctx
->hflags
|= MIPS_HFLAG_B
;
10633 MIPS_INVAL("Compact branch/jump");
10634 generate_exception_end(ctx
, EXCP_RI
);
10638 /* Generating branch here as compact branches don't have delay slot */
10639 gen_branch(ctx
, 4);
10641 /* Conditional compact branch */
10642 TCGLabel
*fs
= gen_new_label();
10643 save_cpu_state(ctx
, 0);
10646 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10647 if (rs
== 0 && rt
!= 0) {
10649 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10650 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10652 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10655 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10658 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10659 if (rs
== 0 && rt
!= 0) {
10661 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10662 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10664 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10667 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10670 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10671 if (rs
== 0 && rt
!= 0) {
10673 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10674 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10676 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10679 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10682 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10683 if (rs
== 0 && rt
!= 0) {
10685 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10686 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10688 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10691 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10694 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10695 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10697 /* OPC_BOVC, OPC_BNVC */
10698 TCGv t2
= tcg_temp_new();
10699 TCGv t3
= tcg_temp_new();
10700 TCGv t4
= tcg_temp_new();
10701 TCGv input_overflow
= tcg_temp_new();
10703 gen_load_gpr(t0
, rs
);
10704 gen_load_gpr(t1
, rt
);
10705 tcg_gen_ext32s_tl(t2
, t0
);
10706 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10707 tcg_gen_ext32s_tl(t3
, t1
);
10708 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10709 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10711 tcg_gen_add_tl(t4
, t2
, t3
);
10712 tcg_gen_ext32s_tl(t4
, t4
);
10713 tcg_gen_xor_tl(t2
, t2
, t3
);
10714 tcg_gen_xor_tl(t3
, t4
, t3
);
10715 tcg_gen_andc_tl(t2
, t3
, t2
);
10716 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10717 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10718 if (opc
== OPC_BOVC
) {
10720 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10723 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10725 tcg_temp_free(input_overflow
);
10729 } else if (rs
< rt
&& rs
== 0) {
10730 /* OPC_BEQZALC, OPC_BNEZALC */
10731 if (opc
== OPC_BEQZALC
) {
10733 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10736 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10739 /* OPC_BEQC, OPC_BNEC */
10740 if (opc
== OPC_BEQC
) {
10742 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10745 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10750 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10753 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10756 MIPS_INVAL("Compact conditional branch/jump");
10757 generate_exception_end(ctx
, EXCP_RI
);
10761 /* Generating branch here as compact branches don't have delay slot */
10762 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10765 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10773 /* ISA extensions (ASEs) */
10774 /* MIPS16 extension to MIPS32 */
10776 /* MIPS16 major opcodes */
10778 M16_OPC_ADDIUSP
= 0x00,
10779 M16_OPC_ADDIUPC
= 0x01,
10781 M16_OPC_JAL
= 0x03,
10782 M16_OPC_BEQZ
= 0x04,
10783 M16_OPC_BNEQZ
= 0x05,
10784 M16_OPC_SHIFT
= 0x06,
10786 M16_OPC_RRIA
= 0x08,
10787 M16_OPC_ADDIU8
= 0x09,
10788 M16_OPC_SLTI
= 0x0a,
10789 M16_OPC_SLTIU
= 0x0b,
10792 M16_OPC_CMPI
= 0x0e,
10796 M16_OPC_LWSP
= 0x12,
10798 M16_OPC_LBU
= 0x14,
10799 M16_OPC_LHU
= 0x15,
10800 M16_OPC_LWPC
= 0x16,
10801 M16_OPC_LWU
= 0x17,
10804 M16_OPC_SWSP
= 0x1a,
10806 M16_OPC_RRR
= 0x1c,
10808 M16_OPC_EXTEND
= 0x1e,
10812 /* I8 funct field */
10831 /* RR funct field */
10865 /* I64 funct field */
10873 I64_DADDIUPC
= 0x6,
10877 /* RR ry field for CNVT */
10879 RR_RY_CNVT_ZEB
= 0x0,
10880 RR_RY_CNVT_ZEH
= 0x1,
10881 RR_RY_CNVT_ZEW
= 0x2,
10882 RR_RY_CNVT_SEB
= 0x4,
10883 RR_RY_CNVT_SEH
= 0x5,
10884 RR_RY_CNVT_SEW
= 0x6,
10887 static int xlat (int r
)
10889 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10894 static void gen_mips16_save (DisasContext
*ctx
,
10895 int xsregs
, int aregs
,
10896 int do_ra
, int do_s0
, int do_s1
,
10899 TCGv t0
= tcg_temp_new();
10900 TCGv t1
= tcg_temp_new();
10901 TCGv t2
= tcg_temp_new();
10931 generate_exception_end(ctx
, EXCP_RI
);
10937 gen_base_offset_addr(ctx
, t0
, 29, 12);
10938 gen_load_gpr(t1
, 7);
10939 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10942 gen_base_offset_addr(ctx
, t0
, 29, 8);
10943 gen_load_gpr(t1
, 6);
10944 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10947 gen_base_offset_addr(ctx
, t0
, 29, 4);
10948 gen_load_gpr(t1
, 5);
10949 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10952 gen_base_offset_addr(ctx
, t0
, 29, 0);
10953 gen_load_gpr(t1
, 4);
10954 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10957 gen_load_gpr(t0
, 29);
10959 #define DECR_AND_STORE(reg) do { \
10960 tcg_gen_movi_tl(t2, -4); \
10961 gen_op_addr_add(ctx, t0, t0, t2); \
10962 gen_load_gpr(t1, reg); \
10963 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10967 DECR_AND_STORE(31);
10972 DECR_AND_STORE(30);
10975 DECR_AND_STORE(23);
10978 DECR_AND_STORE(22);
10981 DECR_AND_STORE(21);
10984 DECR_AND_STORE(20);
10987 DECR_AND_STORE(19);
10990 DECR_AND_STORE(18);
10994 DECR_AND_STORE(17);
10997 DECR_AND_STORE(16);
11027 generate_exception_end(ctx
, EXCP_RI
);
11043 #undef DECR_AND_STORE
11045 tcg_gen_movi_tl(t2
, -framesize
);
11046 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11052 static void gen_mips16_restore (DisasContext
*ctx
,
11053 int xsregs
, int aregs
,
11054 int do_ra
, int do_s0
, int do_s1
,
11058 TCGv t0
= tcg_temp_new();
11059 TCGv t1
= tcg_temp_new();
11060 TCGv t2
= tcg_temp_new();
11062 tcg_gen_movi_tl(t2
, framesize
);
11063 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11065 #define DECR_AND_LOAD(reg) do { \
11066 tcg_gen_movi_tl(t2, -4); \
11067 gen_op_addr_add(ctx, t0, t0, t2); \
11068 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11069 gen_store_gpr(t1, reg); \
11133 generate_exception_end(ctx
, EXCP_RI
);
11149 #undef DECR_AND_LOAD
11151 tcg_gen_movi_tl(t2
, framesize
);
11152 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11158 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11159 int is_64_bit
, int extended
)
11163 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11164 generate_exception_end(ctx
, EXCP_RI
);
11168 t0
= tcg_temp_new();
11170 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11171 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11173 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11179 #if defined(TARGET_MIPS64)
11180 static void decode_i64_mips16 (DisasContext
*ctx
,
11181 int ry
, int funct
, int16_t offset
,
11186 check_insn(ctx
, ISA_MIPS3
);
11187 check_mips_64(ctx
);
11188 offset
= extended
? offset
: offset
<< 3;
11189 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11192 check_insn(ctx
, ISA_MIPS3
);
11193 check_mips_64(ctx
);
11194 offset
= extended
? offset
: offset
<< 3;
11195 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11198 check_insn(ctx
, ISA_MIPS3
);
11199 check_mips_64(ctx
);
11200 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11201 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11204 check_insn(ctx
, ISA_MIPS3
);
11205 check_mips_64(ctx
);
11206 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11207 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11210 check_insn(ctx
, ISA_MIPS3
);
11211 check_mips_64(ctx
);
11212 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11213 generate_exception_end(ctx
, EXCP_RI
);
11215 offset
= extended
? offset
: offset
<< 3;
11216 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11220 check_insn(ctx
, ISA_MIPS3
);
11221 check_mips_64(ctx
);
11222 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11223 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11226 check_insn(ctx
, ISA_MIPS3
);
11227 check_mips_64(ctx
);
11228 offset
= extended
? offset
: offset
<< 2;
11229 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11232 check_insn(ctx
, ISA_MIPS3
);
11233 check_mips_64(ctx
);
11234 offset
= extended
? offset
: offset
<< 2;
11235 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11241 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11243 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11244 int op
, rx
, ry
, funct
, sa
;
11245 int16_t imm
, offset
;
11247 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11248 op
= (ctx
->opcode
>> 11) & 0x1f;
11249 sa
= (ctx
->opcode
>> 22) & 0x1f;
11250 funct
= (ctx
->opcode
>> 8) & 0x7;
11251 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11252 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11253 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11254 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11255 | (ctx
->opcode
& 0x1f));
11257 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11260 case M16_OPC_ADDIUSP
:
11261 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11263 case M16_OPC_ADDIUPC
:
11264 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11267 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11268 /* No delay slot, so just process as a normal instruction */
11271 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11272 /* No delay slot, so just process as a normal instruction */
11274 case M16_OPC_BNEQZ
:
11275 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11276 /* No delay slot, so just process as a normal instruction */
11278 case M16_OPC_SHIFT
:
11279 switch (ctx
->opcode
& 0x3) {
11281 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11284 #if defined(TARGET_MIPS64)
11285 check_mips_64(ctx
);
11286 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11288 generate_exception_end(ctx
, EXCP_RI
);
11292 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11295 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11299 #if defined(TARGET_MIPS64)
11301 check_insn(ctx
, ISA_MIPS3
);
11302 check_mips_64(ctx
);
11303 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11307 imm
= ctx
->opcode
& 0xf;
11308 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11309 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11310 imm
= (int16_t) (imm
<< 1) >> 1;
11311 if ((ctx
->opcode
>> 4) & 0x1) {
11312 #if defined(TARGET_MIPS64)
11313 check_mips_64(ctx
);
11314 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11316 generate_exception_end(ctx
, EXCP_RI
);
11319 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11322 case M16_OPC_ADDIU8
:
11323 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11326 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11328 case M16_OPC_SLTIU
:
11329 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11334 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11337 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11340 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11343 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11346 check_insn(ctx
, ISA_MIPS32
);
11348 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11349 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11350 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11351 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11352 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11353 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11354 | (ctx
->opcode
& 0xf)) << 3;
11356 if (ctx
->opcode
& (1 << 7)) {
11357 gen_mips16_save(ctx
, xsregs
, aregs
,
11358 do_ra
, do_s0
, do_s1
,
11361 gen_mips16_restore(ctx
, xsregs
, aregs
,
11362 do_ra
, do_s0
, do_s1
,
11368 generate_exception_end(ctx
, EXCP_RI
);
11373 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11376 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11378 #if defined(TARGET_MIPS64)
11380 check_insn(ctx
, ISA_MIPS3
);
11381 check_mips_64(ctx
);
11382 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11386 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11389 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11392 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11395 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11398 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11401 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11404 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11406 #if defined(TARGET_MIPS64)
11408 check_insn(ctx
, ISA_MIPS3
);
11409 check_mips_64(ctx
);
11410 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11414 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11417 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11420 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11423 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11425 #if defined(TARGET_MIPS64)
11427 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11431 generate_exception_end(ctx
, EXCP_RI
);
11438 static inline bool is_uhi(int sdbbp_code
)
11440 #ifdef CONFIG_USER_ONLY
11443 return semihosting_enabled() && sdbbp_code
== 1;
11447 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11451 int op
, cnvt_op
, op1
, offset
;
11455 op
= (ctx
->opcode
>> 11) & 0x1f;
11456 sa
= (ctx
->opcode
>> 2) & 0x7;
11457 sa
= sa
== 0 ? 8 : sa
;
11458 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11459 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11460 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11461 op1
= offset
= ctx
->opcode
& 0x1f;
11466 case M16_OPC_ADDIUSP
:
11468 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11470 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11473 case M16_OPC_ADDIUPC
:
11474 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11477 offset
= (ctx
->opcode
& 0x7ff) << 1;
11478 offset
= (int16_t)(offset
<< 4) >> 4;
11479 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11480 /* No delay slot, so just process as a normal instruction */
11483 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11484 offset
= (((ctx
->opcode
& 0x1f) << 21)
11485 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11487 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11488 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11492 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11493 ((int8_t)ctx
->opcode
) << 1, 0);
11494 /* No delay slot, so just process as a normal instruction */
11496 case M16_OPC_BNEQZ
:
11497 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11498 ((int8_t)ctx
->opcode
) << 1, 0);
11499 /* No delay slot, so just process as a normal instruction */
11501 case M16_OPC_SHIFT
:
11502 switch (ctx
->opcode
& 0x3) {
11504 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11507 #if defined(TARGET_MIPS64)
11508 check_insn(ctx
, ISA_MIPS3
);
11509 check_mips_64(ctx
);
11510 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11512 generate_exception_end(ctx
, EXCP_RI
);
11516 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11519 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11523 #if defined(TARGET_MIPS64)
11525 check_insn(ctx
, ISA_MIPS3
);
11526 check_mips_64(ctx
);
11527 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11532 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11534 if ((ctx
->opcode
>> 4) & 1) {
11535 #if defined(TARGET_MIPS64)
11536 check_insn(ctx
, ISA_MIPS3
);
11537 check_mips_64(ctx
);
11538 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11540 generate_exception_end(ctx
, EXCP_RI
);
11543 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11547 case M16_OPC_ADDIU8
:
11549 int16_t imm
= (int8_t) ctx
->opcode
;
11551 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11556 int16_t imm
= (uint8_t) ctx
->opcode
;
11557 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11560 case M16_OPC_SLTIU
:
11562 int16_t imm
= (uint8_t) ctx
->opcode
;
11563 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11570 funct
= (ctx
->opcode
>> 8) & 0x7;
11573 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11574 ((int8_t)ctx
->opcode
) << 1, 0);
11577 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11578 ((int8_t)ctx
->opcode
) << 1, 0);
11581 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11584 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11585 ((int8_t)ctx
->opcode
) << 3);
11588 check_insn(ctx
, ISA_MIPS32
);
11590 int do_ra
= ctx
->opcode
& (1 << 6);
11591 int do_s0
= ctx
->opcode
& (1 << 5);
11592 int do_s1
= ctx
->opcode
& (1 << 4);
11593 int framesize
= ctx
->opcode
& 0xf;
11595 if (framesize
== 0) {
11598 framesize
= framesize
<< 3;
11601 if (ctx
->opcode
& (1 << 7)) {
11602 gen_mips16_save(ctx
, 0, 0,
11603 do_ra
, do_s0
, do_s1
, framesize
);
11605 gen_mips16_restore(ctx
, 0, 0,
11606 do_ra
, do_s0
, do_s1
, framesize
);
11612 int rz
= xlat(ctx
->opcode
& 0x7);
11614 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11615 ((ctx
->opcode
>> 5) & 0x7);
11616 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11620 reg32
= ctx
->opcode
& 0x1f;
11621 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11624 generate_exception_end(ctx
, EXCP_RI
);
11631 int16_t imm
= (uint8_t) ctx
->opcode
;
11633 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11638 int16_t imm
= (uint8_t) ctx
->opcode
;
11639 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11642 #if defined(TARGET_MIPS64)
11644 check_insn(ctx
, ISA_MIPS3
);
11645 check_mips_64(ctx
);
11646 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11650 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11653 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11656 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11659 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11662 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11665 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11668 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11670 #if defined (TARGET_MIPS64)
11672 check_insn(ctx
, ISA_MIPS3
);
11673 check_mips_64(ctx
);
11674 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11678 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11681 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11684 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11687 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11691 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11694 switch (ctx
->opcode
& 0x3) {
11696 mips32_op
= OPC_ADDU
;
11699 mips32_op
= OPC_SUBU
;
11701 #if defined(TARGET_MIPS64)
11703 mips32_op
= OPC_DADDU
;
11704 check_insn(ctx
, ISA_MIPS3
);
11705 check_mips_64(ctx
);
11708 mips32_op
= OPC_DSUBU
;
11709 check_insn(ctx
, ISA_MIPS3
);
11710 check_mips_64(ctx
);
11714 generate_exception_end(ctx
, EXCP_RI
);
11718 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11727 int nd
= (ctx
->opcode
>> 7) & 0x1;
11728 int link
= (ctx
->opcode
>> 6) & 0x1;
11729 int ra
= (ctx
->opcode
>> 5) & 0x1;
11732 check_insn(ctx
, ISA_MIPS32
);
11741 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11746 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11747 gen_helper_do_semihosting(cpu_env
);
11749 /* XXX: not clear which exception should be raised
11750 * when in debug mode...
11752 check_insn(ctx
, ISA_MIPS32
);
11753 generate_exception_end(ctx
, EXCP_DBp
);
11757 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11760 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11763 generate_exception_end(ctx
, EXCP_BREAK
);
11766 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11769 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11772 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11774 #if defined (TARGET_MIPS64)
11776 check_insn(ctx
, ISA_MIPS3
);
11777 check_mips_64(ctx
);
11778 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11782 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11785 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11788 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11791 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11794 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11797 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11800 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11803 check_insn(ctx
, ISA_MIPS32
);
11805 case RR_RY_CNVT_ZEB
:
11806 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11808 case RR_RY_CNVT_ZEH
:
11809 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11811 case RR_RY_CNVT_SEB
:
11812 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11814 case RR_RY_CNVT_SEH
:
11815 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11817 #if defined (TARGET_MIPS64)
11818 case RR_RY_CNVT_ZEW
:
11819 check_insn(ctx
, ISA_MIPS64
);
11820 check_mips_64(ctx
);
11821 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11823 case RR_RY_CNVT_SEW
:
11824 check_insn(ctx
, ISA_MIPS64
);
11825 check_mips_64(ctx
);
11826 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11830 generate_exception_end(ctx
, EXCP_RI
);
11835 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11837 #if defined (TARGET_MIPS64)
11839 check_insn(ctx
, ISA_MIPS3
);
11840 check_mips_64(ctx
);
11841 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11844 check_insn(ctx
, ISA_MIPS3
);
11845 check_mips_64(ctx
);
11846 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11849 check_insn(ctx
, ISA_MIPS3
);
11850 check_mips_64(ctx
);
11851 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11854 check_insn(ctx
, ISA_MIPS3
);
11855 check_mips_64(ctx
);
11856 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11860 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11863 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11866 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11869 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11871 #if defined (TARGET_MIPS64)
11873 check_insn(ctx
, ISA_MIPS3
);
11874 check_mips_64(ctx
);
11875 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11878 check_insn(ctx
, ISA_MIPS3
);
11879 check_mips_64(ctx
);
11880 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11883 check_insn(ctx
, ISA_MIPS3
);
11884 check_mips_64(ctx
);
11885 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11888 check_insn(ctx
, ISA_MIPS3
);
11889 check_mips_64(ctx
);
11890 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11894 generate_exception_end(ctx
, EXCP_RI
);
11898 case M16_OPC_EXTEND
:
11899 decode_extended_mips16_opc(env
, ctx
);
11902 #if defined(TARGET_MIPS64)
11904 funct
= (ctx
->opcode
>> 8) & 0x7;
11905 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11909 generate_exception_end(ctx
, EXCP_RI
);
11916 /* microMIPS extension to MIPS32/MIPS64 */
11919 * microMIPS32/microMIPS64 major opcodes
11921 * 1. MIPS Architecture for Programmers Volume II-B:
11922 * The microMIPS32 Instruction Set (Revision 3.05)
11924 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11926 * 2. MIPS Architecture For Programmers Volume II-A:
11927 * The MIPS64 Instruction Set (Revision 3.51)
11957 POOL32S
= 0x16, /* MIPS64 */
11958 DADDIU32
= 0x17, /* MIPS64 */
11987 /* 0x29 is reserved */
12000 /* 0x31 is reserved */
12013 SD32
= 0x36, /* MIPS64 */
12014 LD32
= 0x37, /* MIPS64 */
12016 /* 0x39 is reserved */
12032 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12042 /* POOL32A encoding of minor opcode field */
12045 /* These opcodes are distinguished only by bits 9..6; those bits are
12046 * what are recorded below. */
12083 /* The following can be distinguished by their lower 6 bits. */
12093 /* POOL32AXF encoding of minor opcode field extension */
12096 * 1. MIPS Architecture for Programmers Volume II-B:
12097 * The microMIPS32 Instruction Set (Revision 3.05)
12099 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12101 * 2. MIPS Architecture for Programmers VolumeIV-e:
12102 * The MIPS DSP Application-Specific Extension
12103 * to the microMIPS32 Architecture (Revision 2.34)
12105 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12120 /* begin of microMIPS32 DSP */
12122 /* bits 13..12 for 0x01 */
12128 /* bits 13..12 for 0x2a */
12134 /* bits 13..12 for 0x32 */
12138 /* end of microMIPS32 DSP */
12140 /* bits 15..12 for 0x2c */
12157 /* bits 15..12 for 0x34 */
12165 /* bits 15..12 for 0x3c */
12167 JR
= 0x0, /* alias */
12175 /* bits 15..12 for 0x05 */
12179 /* bits 15..12 for 0x0d */
12191 /* bits 15..12 for 0x15 */
12197 /* bits 15..12 for 0x1d */
12201 /* bits 15..12 for 0x2d */
12206 /* bits 15..12 for 0x35 */
12213 /* POOL32B encoding of minor opcode field (bits 15..12) */
12229 /* POOL32C encoding of minor opcode field (bits 15..12) */
12237 /* 0xa is reserved */
12244 /* 0x6 is reserved */
12250 /* POOL32F encoding of minor opcode field (bits 5..0) */
12253 /* These are the bit 7..6 values */
12262 /* These are the bit 8..6 values */
12287 MOVZ_FMT_05
= 0x05,
12321 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12328 /* POOL32Fxf encoding of minor opcode extension field */
12366 /* POOL32I encoding of minor opcode field (bits 25..21) */
12396 /* These overlap and are distinguished by bit16 of the instruction */
12405 /* POOL16A encoding of minor opcode field */
12412 /* POOL16B encoding of minor opcode field */
12419 /* POOL16C encoding of minor opcode field */
12439 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12459 /* POOL16D encoding of minor opcode field */
12466 /* POOL16E encoding of minor opcode field */
12473 static int mmreg (int r
)
12475 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12480 /* Used for 16-bit store instructions. */
12481 static int mmreg2 (int r
)
12483 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12488 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12489 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12490 #define uMIPS_RS2(op) uMIPS_RS(op)
12491 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12492 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12493 #define uMIPS_RS5(op) (op & 0x1f)
12495 /* Signed immediate */
12496 #define SIMM(op, start, width) \
12497 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12500 /* Zero-extended immediate */
12501 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12503 static void gen_addiur1sp(DisasContext
*ctx
)
12505 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12507 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12510 static void gen_addiur2(DisasContext
*ctx
)
12512 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12513 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12514 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12516 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12519 static void gen_addiusp(DisasContext
*ctx
)
12521 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12524 if (encoded
<= 1) {
12525 decoded
= 256 + encoded
;
12526 } else if (encoded
<= 255) {
12528 } else if (encoded
<= 509) {
12529 decoded
= encoded
- 512;
12531 decoded
= encoded
- 768;
12534 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12537 static void gen_addius5(DisasContext
*ctx
)
12539 int imm
= SIMM(ctx
->opcode
, 1, 4);
12540 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12542 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12545 static void gen_andi16(DisasContext
*ctx
)
12547 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12548 31, 32, 63, 64, 255, 32768, 65535 };
12549 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12550 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12551 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12553 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12556 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12557 int base
, int16_t offset
)
12562 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12563 generate_exception_end(ctx
, EXCP_RI
);
12567 t0
= tcg_temp_new();
12569 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12571 t1
= tcg_const_tl(reglist
);
12572 t2
= tcg_const_i32(ctx
->mem_idx
);
12574 save_cpu_state(ctx
, 1);
12577 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12580 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12582 #ifdef TARGET_MIPS64
12584 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12587 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12593 tcg_temp_free_i32(t2
);
12597 static void gen_pool16c_insn(DisasContext
*ctx
)
12599 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12600 int rs
= mmreg(ctx
->opcode
& 0x7);
12602 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12607 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12613 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12619 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12625 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12632 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12633 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12635 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12644 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12645 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12647 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12654 int reg
= ctx
->opcode
& 0x1f;
12656 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12662 int reg
= ctx
->opcode
& 0x1f;
12663 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12664 /* Let normal delay slot handling in our caller take us
12665 to the branch target. */
12670 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12671 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12675 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12676 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12680 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12684 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12687 generate_exception_end(ctx
, EXCP_BREAK
);
12690 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12691 gen_helper_do_semihosting(cpu_env
);
12693 /* XXX: not clear which exception should be raised
12694 * when in debug mode...
12696 check_insn(ctx
, ISA_MIPS32
);
12697 generate_exception_end(ctx
, EXCP_DBp
);
12700 case JRADDIUSP
+ 0:
12701 case JRADDIUSP
+ 1:
12703 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12704 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12705 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12706 /* Let normal delay slot handling in our caller take us
12707 to the branch target. */
12711 generate_exception_end(ctx
, EXCP_RI
);
12716 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12719 int rd
, rs
, re
, rt
;
12720 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12721 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12722 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12723 rd
= rd_enc
[enc_dest
];
12724 re
= re_enc
[enc_dest
];
12725 rs
= rs_rt_enc
[enc_rs
];
12726 rt
= rs_rt_enc
[enc_rt
];
12728 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12730 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12733 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12735 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12739 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12741 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12742 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12744 switch (ctx
->opcode
& 0xf) {
12746 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12749 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12753 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12754 int offset
= extract32(ctx
->opcode
, 4, 4);
12755 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12758 case R6_JRC16
: /* JRCADDIUSP */
12759 if ((ctx
->opcode
>> 4) & 1) {
12761 int imm
= extract32(ctx
->opcode
, 5, 5);
12762 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12763 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12766 int rs
= extract32(ctx
->opcode
, 5, 5);
12767 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12770 case MOVEP
... MOVEP_07
:
12771 case MOVEP_0C
... MOVEP_0F
:
12773 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12774 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12775 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12776 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12780 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12783 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12787 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12788 int offset
= extract32(ctx
->opcode
, 4, 4);
12789 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12792 case JALRC16
: /* BREAK16, SDBBP16 */
12793 switch (ctx
->opcode
& 0x3f) {
12795 case JALRC16
+ 0x20:
12797 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12802 generate_exception(ctx
, EXCP_BREAK
);
12806 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12807 gen_helper_do_semihosting(cpu_env
);
12809 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12810 generate_exception(ctx
, EXCP_RI
);
12812 generate_exception(ctx
, EXCP_DBp
);
12819 generate_exception(ctx
, EXCP_RI
);
12824 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12826 TCGv t0
= tcg_temp_new();
12827 TCGv t1
= tcg_temp_new();
12829 gen_load_gpr(t0
, base
);
12832 gen_load_gpr(t1
, index
);
12833 tcg_gen_shli_tl(t1
, t1
, 2);
12834 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12837 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12838 gen_store_gpr(t1
, rd
);
12844 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12845 int base
, int16_t offset
)
12849 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12850 generate_exception_end(ctx
, EXCP_RI
);
12854 t0
= tcg_temp_new();
12855 t1
= tcg_temp_new();
12857 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12862 generate_exception_end(ctx
, EXCP_RI
);
12865 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12866 gen_store_gpr(t1
, rd
);
12867 tcg_gen_movi_tl(t1
, 4);
12868 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12869 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12870 gen_store_gpr(t1
, rd
+1);
12873 gen_load_gpr(t1
, rd
);
12874 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12875 tcg_gen_movi_tl(t1
, 4);
12876 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12877 gen_load_gpr(t1
, rd
+1);
12878 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12880 #ifdef TARGET_MIPS64
12883 generate_exception_end(ctx
, EXCP_RI
);
12886 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12887 gen_store_gpr(t1
, rd
);
12888 tcg_gen_movi_tl(t1
, 8);
12889 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12890 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12891 gen_store_gpr(t1
, rd
+1);
12894 gen_load_gpr(t1
, rd
);
12895 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12896 tcg_gen_movi_tl(t1
, 8);
12897 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12898 gen_load_gpr(t1
, rd
+1);
12899 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12907 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12909 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12910 int minor
= (ctx
->opcode
>> 12) & 0xf;
12911 uint32_t mips32_op
;
12913 switch (extension
) {
12915 mips32_op
= OPC_TEQ
;
12918 mips32_op
= OPC_TGE
;
12921 mips32_op
= OPC_TGEU
;
12924 mips32_op
= OPC_TLT
;
12927 mips32_op
= OPC_TLTU
;
12930 mips32_op
= OPC_TNE
;
12932 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12934 #ifndef CONFIG_USER_ONLY
12937 check_cp0_enabled(ctx
);
12939 /* Treat as NOP. */
12942 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12946 check_cp0_enabled(ctx
);
12948 TCGv t0
= tcg_temp_new();
12950 gen_load_gpr(t0
, rt
);
12951 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12957 switch (minor
& 3) {
12959 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12962 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12965 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12968 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12971 goto pool32axf_invalid
;
12975 switch (minor
& 3) {
12977 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12980 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12983 goto pool32axf_invalid
;
12989 check_insn(ctx
, ISA_MIPS32R6
);
12990 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12993 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12996 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12999 mips32_op
= OPC_CLO
;
13002 mips32_op
= OPC_CLZ
;
13004 check_insn(ctx
, ISA_MIPS32
);
13005 gen_cl(ctx
, mips32_op
, rt
, rs
);
13008 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13009 gen_rdhwr(ctx
, rt
, rs
, 0);
13012 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13015 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13016 mips32_op
= OPC_MULT
;
13019 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13020 mips32_op
= OPC_MULTU
;
13023 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13024 mips32_op
= OPC_DIV
;
13027 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13028 mips32_op
= OPC_DIVU
;
13031 check_insn(ctx
, ISA_MIPS32
);
13032 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13036 mips32_op
= OPC_MADD
;
13039 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13040 mips32_op
= OPC_MADDU
;
13043 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13044 mips32_op
= OPC_MSUB
;
13047 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13048 mips32_op
= OPC_MSUBU
;
13050 check_insn(ctx
, ISA_MIPS32
);
13051 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13054 goto pool32axf_invalid
;
13065 generate_exception_err(ctx
, EXCP_CpU
, 2);
13068 goto pool32axf_invalid
;
13073 case JALR
: /* JALRC */
13074 case JALR_HB
: /* JALRC_HB */
13075 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13076 /* JALRC, JALRC_HB */
13077 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13079 /* JALR, JALR_HB */
13080 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13081 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13086 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13087 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13088 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13091 goto pool32axf_invalid
;
13097 check_cp0_enabled(ctx
);
13098 check_insn(ctx
, ISA_MIPS32R2
);
13099 gen_load_srsgpr(rs
, rt
);
13102 check_cp0_enabled(ctx
);
13103 check_insn(ctx
, ISA_MIPS32R2
);
13104 gen_store_srsgpr(rs
, rt
);
13107 goto pool32axf_invalid
;
13110 #ifndef CONFIG_USER_ONLY
13114 mips32_op
= OPC_TLBP
;
13117 mips32_op
= OPC_TLBR
;
13120 mips32_op
= OPC_TLBWI
;
13123 mips32_op
= OPC_TLBWR
;
13126 mips32_op
= OPC_TLBINV
;
13129 mips32_op
= OPC_TLBINVF
;
13132 mips32_op
= OPC_WAIT
;
13135 mips32_op
= OPC_DERET
;
13138 mips32_op
= OPC_ERET
;
13140 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13143 goto pool32axf_invalid
;
13149 check_cp0_enabled(ctx
);
13151 TCGv t0
= tcg_temp_new();
13153 save_cpu_state(ctx
, 1);
13154 gen_helper_di(t0
, cpu_env
);
13155 gen_store_gpr(t0
, rs
);
13156 /* Stop translation as we may have switched the execution mode */
13157 ctx
->bstate
= BS_STOP
;
13162 check_cp0_enabled(ctx
);
13164 TCGv t0
= tcg_temp_new();
13166 save_cpu_state(ctx
, 1);
13167 gen_helper_ei(t0
, cpu_env
);
13168 gen_store_gpr(t0
, rs
);
13169 /* Stop translation as we may have switched the execution mode */
13170 ctx
->bstate
= BS_STOP
;
13175 goto pool32axf_invalid
;
13185 generate_exception_end(ctx
, EXCP_SYSCALL
);
13188 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13189 gen_helper_do_semihosting(cpu_env
);
13191 check_insn(ctx
, ISA_MIPS32
);
13192 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13193 generate_exception_end(ctx
, EXCP_RI
);
13195 generate_exception_end(ctx
, EXCP_DBp
);
13200 goto pool32axf_invalid
;
13204 switch (minor
& 3) {
13206 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13209 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13212 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13215 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13218 goto pool32axf_invalid
;
13222 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13225 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13228 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13231 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13234 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13237 goto pool32axf_invalid
;
13242 MIPS_INVAL("pool32axf");
13243 generate_exception_end(ctx
, EXCP_RI
);
13248 /* Values for microMIPS fmt field. Variable-width, depending on which
13249 formats the instruction supports. */
13268 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13270 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13271 uint32_t mips32_op
;
13273 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13274 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13275 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13277 switch (extension
) {
13278 case FLOAT_1BIT_FMT(CFC1
, 0):
13279 mips32_op
= OPC_CFC1
;
13281 case FLOAT_1BIT_FMT(CTC1
, 0):
13282 mips32_op
= OPC_CTC1
;
13284 case FLOAT_1BIT_FMT(MFC1
, 0):
13285 mips32_op
= OPC_MFC1
;
13287 case FLOAT_1BIT_FMT(MTC1
, 0):
13288 mips32_op
= OPC_MTC1
;
13290 case FLOAT_1BIT_FMT(MFHC1
, 0):
13291 mips32_op
= OPC_MFHC1
;
13293 case FLOAT_1BIT_FMT(MTHC1
, 0):
13294 mips32_op
= OPC_MTHC1
;
13296 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13299 /* Reciprocal square root */
13300 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13301 mips32_op
= OPC_RSQRT_S
;
13303 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13304 mips32_op
= OPC_RSQRT_D
;
13308 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13309 mips32_op
= OPC_SQRT_S
;
13311 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13312 mips32_op
= OPC_SQRT_D
;
13316 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13317 mips32_op
= OPC_RECIP_S
;
13319 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13320 mips32_op
= OPC_RECIP_D
;
13324 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13325 mips32_op
= OPC_FLOOR_L_S
;
13327 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13328 mips32_op
= OPC_FLOOR_L_D
;
13330 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13331 mips32_op
= OPC_FLOOR_W_S
;
13333 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13334 mips32_op
= OPC_FLOOR_W_D
;
13338 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13339 mips32_op
= OPC_CEIL_L_S
;
13341 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13342 mips32_op
= OPC_CEIL_L_D
;
13344 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13345 mips32_op
= OPC_CEIL_W_S
;
13347 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13348 mips32_op
= OPC_CEIL_W_D
;
13352 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13353 mips32_op
= OPC_TRUNC_L_S
;
13355 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13356 mips32_op
= OPC_TRUNC_L_D
;
13358 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13359 mips32_op
= OPC_TRUNC_W_S
;
13361 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13362 mips32_op
= OPC_TRUNC_W_D
;
13366 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13367 mips32_op
= OPC_ROUND_L_S
;
13369 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13370 mips32_op
= OPC_ROUND_L_D
;
13372 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13373 mips32_op
= OPC_ROUND_W_S
;
13375 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13376 mips32_op
= OPC_ROUND_W_D
;
13379 /* Integer to floating-point conversion */
13380 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13381 mips32_op
= OPC_CVT_L_S
;
13383 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13384 mips32_op
= OPC_CVT_L_D
;
13386 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13387 mips32_op
= OPC_CVT_W_S
;
13389 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13390 mips32_op
= OPC_CVT_W_D
;
13393 /* Paired-foo conversions */
13394 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13395 mips32_op
= OPC_CVT_S_PL
;
13397 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13398 mips32_op
= OPC_CVT_S_PU
;
13400 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13401 mips32_op
= OPC_CVT_PW_PS
;
13403 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13404 mips32_op
= OPC_CVT_PS_PW
;
13407 /* Floating-point moves */
13408 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13409 mips32_op
= OPC_MOV_S
;
13411 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13412 mips32_op
= OPC_MOV_D
;
13414 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13415 mips32_op
= OPC_MOV_PS
;
13418 /* Absolute value */
13419 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13420 mips32_op
= OPC_ABS_S
;
13422 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13423 mips32_op
= OPC_ABS_D
;
13425 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13426 mips32_op
= OPC_ABS_PS
;
13430 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13431 mips32_op
= OPC_NEG_S
;
13433 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13434 mips32_op
= OPC_NEG_D
;
13436 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13437 mips32_op
= OPC_NEG_PS
;
13440 /* Reciprocal square root step */
13441 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13442 mips32_op
= OPC_RSQRT1_S
;
13444 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13445 mips32_op
= OPC_RSQRT1_D
;
13447 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13448 mips32_op
= OPC_RSQRT1_PS
;
13451 /* Reciprocal step */
13452 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13453 mips32_op
= OPC_RECIP1_S
;
13455 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13456 mips32_op
= OPC_RECIP1_S
;
13458 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13459 mips32_op
= OPC_RECIP1_PS
;
13462 /* Conversions from double */
13463 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13464 mips32_op
= OPC_CVT_D_S
;
13466 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13467 mips32_op
= OPC_CVT_D_W
;
13469 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13470 mips32_op
= OPC_CVT_D_L
;
13473 /* Conversions from single */
13474 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13475 mips32_op
= OPC_CVT_S_D
;
13477 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13478 mips32_op
= OPC_CVT_S_W
;
13480 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13481 mips32_op
= OPC_CVT_S_L
;
13483 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13486 /* Conditional moves on floating-point codes */
13487 case COND_FLOAT_MOV(MOVT
, 0):
13488 case COND_FLOAT_MOV(MOVT
, 1):
13489 case COND_FLOAT_MOV(MOVT
, 2):
13490 case COND_FLOAT_MOV(MOVT
, 3):
13491 case COND_FLOAT_MOV(MOVT
, 4):
13492 case COND_FLOAT_MOV(MOVT
, 5):
13493 case COND_FLOAT_MOV(MOVT
, 6):
13494 case COND_FLOAT_MOV(MOVT
, 7):
13495 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13496 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13498 case COND_FLOAT_MOV(MOVF
, 0):
13499 case COND_FLOAT_MOV(MOVF
, 1):
13500 case COND_FLOAT_MOV(MOVF
, 2):
13501 case COND_FLOAT_MOV(MOVF
, 3):
13502 case COND_FLOAT_MOV(MOVF
, 4):
13503 case COND_FLOAT_MOV(MOVF
, 5):
13504 case COND_FLOAT_MOV(MOVF
, 6):
13505 case COND_FLOAT_MOV(MOVF
, 7):
13506 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13507 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13510 MIPS_INVAL("pool32fxf");
13511 generate_exception_end(ctx
, EXCP_RI
);
13516 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13520 int rt
, rs
, rd
, rr
;
13522 uint32_t op
, minor
, mips32_op
;
13523 uint32_t cond
, fmt
, cc
;
13525 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13526 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13528 rt
= (ctx
->opcode
>> 21) & 0x1f;
13529 rs
= (ctx
->opcode
>> 16) & 0x1f;
13530 rd
= (ctx
->opcode
>> 11) & 0x1f;
13531 rr
= (ctx
->opcode
>> 6) & 0x1f;
13532 imm
= (int16_t) ctx
->opcode
;
13534 op
= (ctx
->opcode
>> 26) & 0x3f;
13537 minor
= ctx
->opcode
& 0x3f;
13540 minor
= (ctx
->opcode
>> 6) & 0xf;
13543 mips32_op
= OPC_SLL
;
13546 mips32_op
= OPC_SRA
;
13549 mips32_op
= OPC_SRL
;
13552 mips32_op
= OPC_ROTR
;
13554 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13557 check_insn(ctx
, ISA_MIPS32R6
);
13558 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13561 check_insn(ctx
, ISA_MIPS32R6
);
13562 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13565 check_insn(ctx
, ISA_MIPS32R6
);
13566 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13569 goto pool32a_invalid
;
13573 minor
= (ctx
->opcode
>> 6) & 0xf;
13577 mips32_op
= OPC_ADD
;
13580 mips32_op
= OPC_ADDU
;
13583 mips32_op
= OPC_SUB
;
13586 mips32_op
= OPC_SUBU
;
13589 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13590 mips32_op
= OPC_MUL
;
13592 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13596 mips32_op
= OPC_SLLV
;
13599 mips32_op
= OPC_SRLV
;
13602 mips32_op
= OPC_SRAV
;
13605 mips32_op
= OPC_ROTRV
;
13607 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13609 /* Logical operations */
13611 mips32_op
= OPC_AND
;
13614 mips32_op
= OPC_OR
;
13617 mips32_op
= OPC_NOR
;
13620 mips32_op
= OPC_XOR
;
13622 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13624 /* Set less than */
13626 mips32_op
= OPC_SLT
;
13629 mips32_op
= OPC_SLTU
;
13631 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13634 goto pool32a_invalid
;
13638 minor
= (ctx
->opcode
>> 6) & 0xf;
13640 /* Conditional moves */
13641 case MOVN
: /* MUL */
13642 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13644 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13647 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13650 case MOVZ
: /* MUH */
13651 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13653 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13656 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13660 check_insn(ctx
, ISA_MIPS32R6
);
13661 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13664 check_insn(ctx
, ISA_MIPS32R6
);
13665 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13667 case LWXS
: /* DIV */
13668 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13670 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13673 gen_ldxs(ctx
, rs
, rt
, rd
);
13677 check_insn(ctx
, ISA_MIPS32R6
);
13678 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13681 check_insn(ctx
, ISA_MIPS32R6
);
13682 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13685 check_insn(ctx
, ISA_MIPS32R6
);
13686 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13689 goto pool32a_invalid
;
13693 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13696 check_insn(ctx
, ISA_MIPS32R6
);
13697 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13698 extract32(ctx
->opcode
, 9, 2));
13701 check_insn(ctx
, ISA_MIPS32R6
);
13702 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13703 extract32(ctx
->opcode
, 9, 2));
13706 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13709 gen_pool32axf(env
, ctx
, rt
, rs
);
13712 generate_exception_end(ctx
, EXCP_BREAK
);
13715 check_insn(ctx
, ISA_MIPS32R6
);
13716 generate_exception_end(ctx
, EXCP_RI
);
13720 MIPS_INVAL("pool32a");
13721 generate_exception_end(ctx
, EXCP_RI
);
13726 minor
= (ctx
->opcode
>> 12) & 0xf;
13729 check_cp0_enabled(ctx
);
13730 /* Treat as no-op. */
13734 /* COP2: Not implemented. */
13735 generate_exception_err(ctx
, EXCP_CpU
, 2);
13737 #ifdef TARGET_MIPS64
13740 check_insn(ctx
, ISA_MIPS3
);
13741 check_mips_64(ctx
);
13746 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13748 #ifdef TARGET_MIPS64
13751 check_insn(ctx
, ISA_MIPS3
);
13752 check_mips_64(ctx
);
13757 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13760 MIPS_INVAL("pool32b");
13761 generate_exception_end(ctx
, EXCP_RI
);
13766 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13767 minor
= ctx
->opcode
& 0x3f;
13768 check_cp1_enabled(ctx
);
13771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13772 mips32_op
= OPC_ALNV_PS
;
13775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13776 mips32_op
= OPC_MADD_S
;
13779 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13780 mips32_op
= OPC_MADD_D
;
13783 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13784 mips32_op
= OPC_MADD_PS
;
13787 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13788 mips32_op
= OPC_MSUB_S
;
13791 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13792 mips32_op
= OPC_MSUB_D
;
13795 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13796 mips32_op
= OPC_MSUB_PS
;
13799 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13800 mips32_op
= OPC_NMADD_S
;
13803 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13804 mips32_op
= OPC_NMADD_D
;
13807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13808 mips32_op
= OPC_NMADD_PS
;
13811 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13812 mips32_op
= OPC_NMSUB_S
;
13815 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13816 mips32_op
= OPC_NMSUB_D
;
13819 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13820 mips32_op
= OPC_NMSUB_PS
;
13822 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13824 case CABS_COND_FMT
:
13825 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13826 cond
= (ctx
->opcode
>> 6) & 0xf;
13827 cc
= (ctx
->opcode
>> 13) & 0x7;
13828 fmt
= (ctx
->opcode
>> 10) & 0x3;
13831 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13834 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13837 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13840 goto pool32f_invalid
;
13844 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13845 cond
= (ctx
->opcode
>> 6) & 0xf;
13846 cc
= (ctx
->opcode
>> 13) & 0x7;
13847 fmt
= (ctx
->opcode
>> 10) & 0x3;
13850 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13853 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13856 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13859 goto pool32f_invalid
;
13863 check_insn(ctx
, ISA_MIPS32R6
);
13864 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13867 check_insn(ctx
, ISA_MIPS32R6
);
13868 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13871 gen_pool32fxf(ctx
, rt
, rs
);
13875 switch ((ctx
->opcode
>> 6) & 0x7) {
13877 mips32_op
= OPC_PLL_PS
;
13880 mips32_op
= OPC_PLU_PS
;
13883 mips32_op
= OPC_PUL_PS
;
13886 mips32_op
= OPC_PUU_PS
;
13889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13890 mips32_op
= OPC_CVT_PS_S
;
13892 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13895 goto pool32f_invalid
;
13899 check_insn(ctx
, ISA_MIPS32R6
);
13900 switch ((ctx
->opcode
>> 9) & 0x3) {
13902 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13905 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13908 goto pool32f_invalid
;
13913 switch ((ctx
->opcode
>> 6) & 0x7) {
13915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13916 mips32_op
= OPC_LWXC1
;
13919 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13920 mips32_op
= OPC_SWXC1
;
13923 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13924 mips32_op
= OPC_LDXC1
;
13927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13928 mips32_op
= OPC_SDXC1
;
13931 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13932 mips32_op
= OPC_LUXC1
;
13935 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13936 mips32_op
= OPC_SUXC1
;
13938 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13941 goto pool32f_invalid
;
13945 check_insn(ctx
, ISA_MIPS32R6
);
13946 switch ((ctx
->opcode
>> 9) & 0x3) {
13948 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13951 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13954 goto pool32f_invalid
;
13959 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13960 fmt
= (ctx
->opcode
>> 9) & 0x3;
13961 switch ((ctx
->opcode
>> 6) & 0x7) {
13965 mips32_op
= OPC_RSQRT2_S
;
13968 mips32_op
= OPC_RSQRT2_D
;
13971 mips32_op
= OPC_RSQRT2_PS
;
13974 goto pool32f_invalid
;
13980 mips32_op
= OPC_RECIP2_S
;
13983 mips32_op
= OPC_RECIP2_D
;
13986 mips32_op
= OPC_RECIP2_PS
;
13989 goto pool32f_invalid
;
13993 mips32_op
= OPC_ADDR_PS
;
13996 mips32_op
= OPC_MULR_PS
;
13998 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14001 goto pool32f_invalid
;
14005 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14006 cc
= (ctx
->opcode
>> 13) & 0x7;
14007 fmt
= (ctx
->opcode
>> 9) & 0x3;
14008 switch ((ctx
->opcode
>> 6) & 0x7) {
14009 case MOVF_FMT
: /* RINT_FMT */
14010 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14014 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14017 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14020 goto pool32f_invalid
;
14026 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14029 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14033 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14036 goto pool32f_invalid
;
14040 case MOVT_FMT
: /* CLASS_FMT */
14041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14045 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14048 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14051 goto pool32f_invalid
;
14057 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14060 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14064 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14067 goto pool32f_invalid
;
14072 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14075 goto pool32f_invalid
;
14078 #define FINSN_3ARG_SDPS(prfx) \
14079 switch ((ctx->opcode >> 8) & 0x3) { \
14081 mips32_op = OPC_##prfx##_S; \
14084 mips32_op = OPC_##prfx##_D; \
14086 case FMT_SDPS_PS: \
14088 mips32_op = OPC_##prfx##_PS; \
14091 goto pool32f_invalid; \
14094 check_insn(ctx
, ISA_MIPS32R6
);
14095 switch ((ctx
->opcode
>> 9) & 0x3) {
14097 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14100 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14103 goto pool32f_invalid
;
14107 check_insn(ctx
, ISA_MIPS32R6
);
14108 switch ((ctx
->opcode
>> 9) & 0x3) {
14110 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14113 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14116 goto pool32f_invalid
;
14120 /* regular FP ops */
14121 switch ((ctx
->opcode
>> 6) & 0x3) {
14123 FINSN_3ARG_SDPS(ADD
);
14126 FINSN_3ARG_SDPS(SUB
);
14129 FINSN_3ARG_SDPS(MUL
);
14132 fmt
= (ctx
->opcode
>> 8) & 0x3;
14134 mips32_op
= OPC_DIV_D
;
14135 } else if (fmt
== 0) {
14136 mips32_op
= OPC_DIV_S
;
14138 goto pool32f_invalid
;
14142 goto pool32f_invalid
;
14147 switch ((ctx
->opcode
>> 6) & 0x7) {
14148 case MOVN_FMT
: /* SELNEZ_FMT */
14149 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14151 switch ((ctx
->opcode
>> 9) & 0x3) {
14153 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14156 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14159 goto pool32f_invalid
;
14163 FINSN_3ARG_SDPS(MOVN
);
14167 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14168 FINSN_3ARG_SDPS(MOVN
);
14170 case MOVZ_FMT
: /* SELEQZ_FMT */
14171 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14173 switch ((ctx
->opcode
>> 9) & 0x3) {
14175 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14178 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14181 goto pool32f_invalid
;
14185 FINSN_3ARG_SDPS(MOVZ
);
14189 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14190 FINSN_3ARG_SDPS(MOVZ
);
14193 check_insn(ctx
, ISA_MIPS32R6
);
14194 switch ((ctx
->opcode
>> 9) & 0x3) {
14196 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14199 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14202 goto pool32f_invalid
;
14206 check_insn(ctx
, ISA_MIPS32R6
);
14207 switch ((ctx
->opcode
>> 9) & 0x3) {
14209 mips32_op
= OPC_MADDF_S
;
14212 mips32_op
= OPC_MADDF_D
;
14215 goto pool32f_invalid
;
14219 check_insn(ctx
, ISA_MIPS32R6
);
14220 switch ((ctx
->opcode
>> 9) & 0x3) {
14222 mips32_op
= OPC_MSUBF_S
;
14225 mips32_op
= OPC_MSUBF_D
;
14228 goto pool32f_invalid
;
14232 goto pool32f_invalid
;
14236 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14240 MIPS_INVAL("pool32f");
14241 generate_exception_end(ctx
, EXCP_RI
);
14245 generate_exception_err(ctx
, EXCP_CpU
, 1);
14249 minor
= (ctx
->opcode
>> 21) & 0x1f;
14252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14253 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14256 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14257 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14258 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14261 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14262 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14263 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14266 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14267 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14270 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14271 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14272 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14276 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14277 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14281 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14284 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14285 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14289 case TLTI
: /* BC1EQZC */
14290 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14292 check_cp1_enabled(ctx
);
14293 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14296 mips32_op
= OPC_TLTI
;
14300 case TGEI
: /* BC1NEZC */
14301 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14303 check_cp1_enabled(ctx
);
14304 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14307 mips32_op
= OPC_TGEI
;
14312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14313 mips32_op
= OPC_TLTIU
;
14316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14317 mips32_op
= OPC_TGEIU
;
14319 case TNEI
: /* SYNCI */
14320 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14322 /* Break the TB to be able to sync copied instructions
14324 ctx
->bstate
= BS_STOP
;
14327 mips32_op
= OPC_TNEI
;
14332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14333 mips32_op
= OPC_TEQI
;
14335 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14341 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14342 4, rs
, 0, imm
<< 1, 0);
14343 /* Compact branches don't have a delay slot, so just let
14344 the normal delay slot handling take us to the branch
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14353 /* Break the TB to be able to sync copied instructions
14355 ctx
->bstate
= BS_STOP
;
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 /* COP2: Not implemented. */
14361 generate_exception_err(ctx
, EXCP_CpU
, 2);
14364 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14365 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14368 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14369 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14372 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14373 mips32_op
= OPC_BC1FANY4
;
14376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14377 mips32_op
= OPC_BC1TANY4
;
14380 check_insn(ctx
, ASE_MIPS3D
);
14383 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14384 check_cp1_enabled(ctx
);
14385 gen_compute_branch1(ctx
, mips32_op
,
14386 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14388 generate_exception_err(ctx
, EXCP_CpU
, 1);
14393 /* MIPS DSP: not implemented */
14396 MIPS_INVAL("pool32i");
14397 generate_exception_end(ctx
, EXCP_RI
);
14402 minor
= (ctx
->opcode
>> 12) & 0xf;
14403 offset
= sextract32(ctx
->opcode
, 0,
14404 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14407 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14408 mips32_op
= OPC_LWL
;
14411 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14412 mips32_op
= OPC_SWL
;
14415 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14416 mips32_op
= OPC_LWR
;
14419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14420 mips32_op
= OPC_SWR
;
14422 #if defined(TARGET_MIPS64)
14424 check_insn(ctx
, ISA_MIPS3
);
14425 check_mips_64(ctx
);
14426 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14427 mips32_op
= OPC_LDL
;
14430 check_insn(ctx
, ISA_MIPS3
);
14431 check_mips_64(ctx
);
14432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14433 mips32_op
= OPC_SDL
;
14436 check_insn(ctx
, ISA_MIPS3
);
14437 check_mips_64(ctx
);
14438 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14439 mips32_op
= OPC_LDR
;
14442 check_insn(ctx
, ISA_MIPS3
);
14443 check_mips_64(ctx
);
14444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14445 mips32_op
= OPC_SDR
;
14448 check_insn(ctx
, ISA_MIPS3
);
14449 check_mips_64(ctx
);
14450 mips32_op
= OPC_LWU
;
14453 check_insn(ctx
, ISA_MIPS3
);
14454 check_mips_64(ctx
);
14455 mips32_op
= OPC_LLD
;
14459 mips32_op
= OPC_LL
;
14462 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14465 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14468 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14470 #if defined(TARGET_MIPS64)
14472 check_insn(ctx
, ISA_MIPS3
);
14473 check_mips_64(ctx
);
14474 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14478 /* Treat as no-op */
14479 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14480 /* hint codes 24-31 are reserved and signal RI */
14481 generate_exception(ctx
, EXCP_RI
);
14485 MIPS_INVAL("pool32c");
14486 generate_exception_end(ctx
, EXCP_RI
);
14490 case ADDI32
: /* AUI, LUI */
14491 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14493 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14496 mips32_op
= OPC_ADDI
;
14501 mips32_op
= OPC_ADDIU
;
14503 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14506 /* Logical operations */
14508 mips32_op
= OPC_ORI
;
14511 mips32_op
= OPC_XORI
;
14514 mips32_op
= OPC_ANDI
;
14516 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14519 /* Set less than immediate */
14521 mips32_op
= OPC_SLTI
;
14524 mips32_op
= OPC_SLTIU
;
14526 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14529 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14530 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14531 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14532 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14534 case JALS32
: /* BOVC, BEQC, BEQZALC */
14535 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14538 mips32_op
= OPC_BOVC
;
14539 } else if (rs
< rt
&& rs
== 0) {
14541 mips32_op
= OPC_BEQZALC
;
14544 mips32_op
= OPC_BEQC
;
14546 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14549 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14550 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14551 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14554 case BEQ32
: /* BC */
14555 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14557 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14558 sextract32(ctx
->opcode
<< 1, 0, 27));
14561 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14564 case BNE32
: /* BALC */
14565 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14567 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14568 sextract32(ctx
->opcode
<< 1, 0, 27));
14571 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14574 case J32
: /* BGTZC, BLTZC, BLTC */
14575 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14576 if (rs
== 0 && rt
!= 0) {
14578 mips32_op
= OPC_BGTZC
;
14579 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14581 mips32_op
= OPC_BLTZC
;
14584 mips32_op
= OPC_BLTC
;
14586 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14589 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14590 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14593 case JAL32
: /* BLEZC, BGEZC, BGEC */
14594 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14595 if (rs
== 0 && rt
!= 0) {
14597 mips32_op
= OPC_BLEZC
;
14598 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14600 mips32_op
= OPC_BGEZC
;
14603 mips32_op
= OPC_BGEC
;
14605 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14608 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14609 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14610 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14613 /* Floating point (COP1) */
14615 mips32_op
= OPC_LWC1
;
14618 mips32_op
= OPC_LDC1
;
14621 mips32_op
= OPC_SWC1
;
14624 mips32_op
= OPC_SDC1
;
14626 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14628 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14629 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14630 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14631 switch ((ctx
->opcode
>> 16) & 0x1f) {
14632 case ADDIUPC_00
... ADDIUPC_07
:
14633 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14636 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14639 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14641 case LWPC_08
... LWPC_0F
:
14642 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14645 generate_exception(ctx
, EXCP_RI
);
14650 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14651 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14653 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14656 case BNVC
: /* BNEC, BNEZALC */
14657 check_insn(ctx
, ISA_MIPS32R6
);
14660 mips32_op
= OPC_BNVC
;
14661 } else if (rs
< rt
&& rs
== 0) {
14663 mips32_op
= OPC_BNEZALC
;
14666 mips32_op
= OPC_BNEC
;
14668 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14670 case R6_BNEZC
: /* JIALC */
14671 check_insn(ctx
, ISA_MIPS32R6
);
14674 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14675 sextract32(ctx
->opcode
<< 1, 0, 22));
14678 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14681 case R6_BEQZC
: /* JIC */
14682 check_insn(ctx
, ISA_MIPS32R6
);
14685 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14686 sextract32(ctx
->opcode
<< 1, 0, 22));
14689 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14692 case BLEZALC
: /* BGEZALC, BGEUC */
14693 check_insn(ctx
, ISA_MIPS32R6
);
14694 if (rs
== 0 && rt
!= 0) {
14696 mips32_op
= OPC_BLEZALC
;
14697 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14699 mips32_op
= OPC_BGEZALC
;
14702 mips32_op
= OPC_BGEUC
;
14704 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14706 case BGTZALC
: /* BLTZALC, BLTUC */
14707 check_insn(ctx
, ISA_MIPS32R6
);
14708 if (rs
== 0 && rt
!= 0) {
14710 mips32_op
= OPC_BGTZALC
;
14711 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14713 mips32_op
= OPC_BLTZALC
;
14716 mips32_op
= OPC_BLTUC
;
14718 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14720 /* Loads and stores */
14722 mips32_op
= OPC_LB
;
14725 mips32_op
= OPC_LBU
;
14728 mips32_op
= OPC_LH
;
14731 mips32_op
= OPC_LHU
;
14734 mips32_op
= OPC_LW
;
14736 #ifdef TARGET_MIPS64
14738 check_insn(ctx
, ISA_MIPS3
);
14739 check_mips_64(ctx
);
14740 mips32_op
= OPC_LD
;
14743 check_insn(ctx
, ISA_MIPS3
);
14744 check_mips_64(ctx
);
14745 mips32_op
= OPC_SD
;
14749 mips32_op
= OPC_SB
;
14752 mips32_op
= OPC_SH
;
14755 mips32_op
= OPC_SW
;
14758 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14761 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14764 generate_exception_end(ctx
, EXCP_RI
);
14769 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14773 /* make sure instructions are on a halfword boundary */
14774 if (ctx
->pc
& 0x1) {
14775 env
->CP0_BadVAddr
= ctx
->pc
;
14776 generate_exception_end(ctx
, EXCP_AdEL
);
14780 op
= (ctx
->opcode
>> 10) & 0x3f;
14781 /* Enforce properly-sized instructions in a delay slot */
14782 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14783 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14785 /* POOL32A, POOL32B, POOL32I, POOL32C */
14787 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14789 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14791 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14793 /* LB32, LH32, LWC132, LDC132, LW32 */
14794 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14795 generate_exception_end(ctx
, EXCP_RI
);
14800 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14802 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14804 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14805 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14806 generate_exception_end(ctx
, EXCP_RI
);
14816 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14817 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14818 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14821 switch (ctx
->opcode
& 0x1) {
14829 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14830 /* In the Release 6 the register number location in
14831 * the instruction encoding has changed.
14833 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14835 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14841 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14842 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14843 int amount
= (ctx
->opcode
>> 1) & 0x7;
14845 amount
= amount
== 0 ? 8 : amount
;
14847 switch (ctx
->opcode
& 0x1) {
14856 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14860 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14861 gen_pool16c_r6_insn(ctx
);
14863 gen_pool16c_insn(ctx
);
14868 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14869 int rb
= 28; /* GP */
14870 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14872 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14877 if (ctx
->opcode
& 1) {
14878 generate_exception_end(ctx
, EXCP_RI
);
14881 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14882 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14883 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14884 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14889 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14890 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14891 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14892 offset
= (offset
== 0xf ? -1 : offset
);
14894 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14899 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14900 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14901 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14903 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14908 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14909 int rb
= 29; /* SP */
14910 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14912 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14917 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14918 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14919 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14921 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14926 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14927 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14928 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14930 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14935 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14936 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14937 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14939 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14944 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14945 int rb
= 29; /* SP */
14946 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14948 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14953 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14954 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14955 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14957 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14962 int rd
= uMIPS_RD5(ctx
->opcode
);
14963 int rs
= uMIPS_RS5(ctx
->opcode
);
14965 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14972 switch (ctx
->opcode
& 0x1) {
14982 switch (ctx
->opcode
& 0x1) {
14987 gen_addiur1sp(ctx
);
14991 case B16
: /* BC16 */
14992 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14993 sextract32(ctx
->opcode
, 0, 10) << 1,
14994 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14996 case BNEZ16
: /* BNEZC16 */
14997 case BEQZ16
: /* BEQZC16 */
14998 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14999 mmreg(uMIPS_RD(ctx
->opcode
)),
15000 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15001 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15006 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15007 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15009 imm
= (imm
== 0x7f ? -1 : imm
);
15010 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15016 generate_exception_end(ctx
, EXCP_RI
);
15019 decode_micromips32_opc(env
, ctx
);
15026 /* SmartMIPS extension to MIPS32 */
15028 #if defined(TARGET_MIPS64)
15030 /* MDMX extension to MIPS64 */
15034 /* MIPSDSP functions. */
15035 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15036 int rd
, int base
, int offset
)
15041 t0
= tcg_temp_new();
15044 gen_load_gpr(t0
, offset
);
15045 } else if (offset
== 0) {
15046 gen_load_gpr(t0
, base
);
15048 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15053 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15054 gen_store_gpr(t0
, rd
);
15057 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15058 gen_store_gpr(t0
, rd
);
15061 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15062 gen_store_gpr(t0
, rd
);
15064 #if defined(TARGET_MIPS64)
15066 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15067 gen_store_gpr(t0
, rd
);
15074 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15075 int ret
, int v1
, int v2
)
15081 /* Treat as NOP. */
15085 v1_t
= tcg_temp_new();
15086 v2_t
= tcg_temp_new();
15088 gen_load_gpr(v1_t
, v1
);
15089 gen_load_gpr(v2_t
, v2
);
15092 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15093 case OPC_MULT_G_2E
:
15097 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15099 case OPC_ADDUH_R_QB
:
15100 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15103 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15105 case OPC_ADDQH_R_PH
:
15106 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15109 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15111 case OPC_ADDQH_R_W
:
15112 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15115 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15117 case OPC_SUBUH_R_QB
:
15118 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15121 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15123 case OPC_SUBQH_R_PH
:
15124 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15127 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15129 case OPC_SUBQH_R_W
:
15130 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15134 case OPC_ABSQ_S_PH_DSP
:
15136 case OPC_ABSQ_S_QB
:
15138 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15140 case OPC_ABSQ_S_PH
:
15142 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15146 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15148 case OPC_PRECEQ_W_PHL
:
15150 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15151 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15153 case OPC_PRECEQ_W_PHR
:
15155 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15156 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15157 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15159 case OPC_PRECEQU_PH_QBL
:
15161 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15163 case OPC_PRECEQU_PH_QBR
:
15165 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15167 case OPC_PRECEQU_PH_QBLA
:
15169 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15171 case OPC_PRECEQU_PH_QBRA
:
15173 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15175 case OPC_PRECEU_PH_QBL
:
15177 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15179 case OPC_PRECEU_PH_QBR
:
15181 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15183 case OPC_PRECEU_PH_QBLA
:
15185 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15187 case OPC_PRECEU_PH_QBRA
:
15189 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15193 case OPC_ADDU_QB_DSP
:
15197 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15199 case OPC_ADDQ_S_PH
:
15201 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15205 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15209 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15211 case OPC_ADDU_S_QB
:
15213 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15217 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15219 case OPC_ADDU_S_PH
:
15221 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15225 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15227 case OPC_SUBQ_S_PH
:
15229 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15233 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15237 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15239 case OPC_SUBU_S_QB
:
15241 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15245 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15247 case OPC_SUBU_S_PH
:
15249 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15253 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15257 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15261 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15263 case OPC_RADDU_W_QB
:
15265 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15269 case OPC_CMPU_EQ_QB_DSP
:
15271 case OPC_PRECR_QB_PH
:
15273 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15275 case OPC_PRECRQ_QB_PH
:
15277 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15279 case OPC_PRECR_SRA_PH_W
:
15282 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15283 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15285 tcg_temp_free_i32(sa_t
);
15288 case OPC_PRECR_SRA_R_PH_W
:
15291 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15292 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15294 tcg_temp_free_i32(sa_t
);
15297 case OPC_PRECRQ_PH_W
:
15299 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15301 case OPC_PRECRQ_RS_PH_W
:
15303 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15305 case OPC_PRECRQU_S_QB_PH
:
15307 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15311 #ifdef TARGET_MIPS64
15312 case OPC_ABSQ_S_QH_DSP
:
15314 case OPC_PRECEQ_L_PWL
:
15316 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15318 case OPC_PRECEQ_L_PWR
:
15320 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15322 case OPC_PRECEQ_PW_QHL
:
15324 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15326 case OPC_PRECEQ_PW_QHR
:
15328 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15330 case OPC_PRECEQ_PW_QHLA
:
15332 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15334 case OPC_PRECEQ_PW_QHRA
:
15336 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15338 case OPC_PRECEQU_QH_OBL
:
15340 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15342 case OPC_PRECEQU_QH_OBR
:
15344 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15346 case OPC_PRECEQU_QH_OBLA
:
15348 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15350 case OPC_PRECEQU_QH_OBRA
:
15352 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15354 case OPC_PRECEU_QH_OBL
:
15356 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15358 case OPC_PRECEU_QH_OBR
:
15360 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15362 case OPC_PRECEU_QH_OBLA
:
15364 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15366 case OPC_PRECEU_QH_OBRA
:
15368 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15370 case OPC_ABSQ_S_OB
:
15372 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15374 case OPC_ABSQ_S_PW
:
15376 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15378 case OPC_ABSQ_S_QH
:
15380 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15384 case OPC_ADDU_OB_DSP
:
15386 case OPC_RADDU_L_OB
:
15388 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15392 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15394 case OPC_SUBQ_S_PW
:
15396 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15400 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15402 case OPC_SUBQ_S_QH
:
15404 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15408 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15410 case OPC_SUBU_S_OB
:
15412 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15416 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15418 case OPC_SUBU_S_QH
:
15420 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15424 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15426 case OPC_SUBUH_R_OB
:
15428 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15432 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15434 case OPC_ADDQ_S_PW
:
15436 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15440 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15442 case OPC_ADDQ_S_QH
:
15444 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15448 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15450 case OPC_ADDU_S_OB
:
15452 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15456 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15458 case OPC_ADDU_S_QH
:
15460 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15464 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15466 case OPC_ADDUH_R_OB
:
15468 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15472 case OPC_CMPU_EQ_OB_DSP
:
15474 case OPC_PRECR_OB_QH
:
15476 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15478 case OPC_PRECR_SRA_QH_PW
:
15481 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15482 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15483 tcg_temp_free_i32(ret_t
);
15486 case OPC_PRECR_SRA_R_QH_PW
:
15489 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15490 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15491 tcg_temp_free_i32(sa_v
);
15494 case OPC_PRECRQ_OB_QH
:
15496 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15498 case OPC_PRECRQ_PW_L
:
15500 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15502 case OPC_PRECRQ_QH_PW
:
15504 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15506 case OPC_PRECRQ_RS_QH_PW
:
15508 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15510 case OPC_PRECRQU_S_OB_QH
:
15512 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15519 tcg_temp_free(v1_t
);
15520 tcg_temp_free(v2_t
);
15523 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15524 int ret
, int v1
, int v2
)
15532 /* Treat as NOP. */
15536 t0
= tcg_temp_new();
15537 v1_t
= tcg_temp_new();
15538 v2_t
= tcg_temp_new();
15540 tcg_gen_movi_tl(t0
, v1
);
15541 gen_load_gpr(v1_t
, v1
);
15542 gen_load_gpr(v2_t
, v2
);
15545 case OPC_SHLL_QB_DSP
:
15547 op2
= MASK_SHLL_QB(ctx
->opcode
);
15551 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15555 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15559 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15563 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15565 case OPC_SHLL_S_PH
:
15567 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15569 case OPC_SHLLV_S_PH
:
15571 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15575 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15577 case OPC_SHLLV_S_W
:
15579 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15583 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15587 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15591 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15595 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15599 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15601 case OPC_SHRA_R_QB
:
15603 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15607 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15609 case OPC_SHRAV_R_QB
:
15611 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15615 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15617 case OPC_SHRA_R_PH
:
15619 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15623 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15625 case OPC_SHRAV_R_PH
:
15627 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15631 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15633 case OPC_SHRAV_R_W
:
15635 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15637 default: /* Invalid */
15638 MIPS_INVAL("MASK SHLL.QB");
15639 generate_exception_end(ctx
, EXCP_RI
);
15644 #ifdef TARGET_MIPS64
15645 case OPC_SHLL_OB_DSP
:
15646 op2
= MASK_SHLL_OB(ctx
->opcode
);
15650 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15654 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15656 case OPC_SHLL_S_PW
:
15658 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15660 case OPC_SHLLV_S_PW
:
15662 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15666 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15670 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15674 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15678 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15680 case OPC_SHLL_S_QH
:
15682 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15684 case OPC_SHLLV_S_QH
:
15686 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15690 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15694 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15696 case OPC_SHRA_R_OB
:
15698 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15700 case OPC_SHRAV_R_OB
:
15702 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15706 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15710 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15712 case OPC_SHRA_R_PW
:
15714 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15716 case OPC_SHRAV_R_PW
:
15718 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15722 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15726 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15728 case OPC_SHRA_R_QH
:
15730 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15732 case OPC_SHRAV_R_QH
:
15734 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15738 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15742 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15746 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15750 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15752 default: /* Invalid */
15753 MIPS_INVAL("MASK SHLL.OB");
15754 generate_exception_end(ctx
, EXCP_RI
);
15762 tcg_temp_free(v1_t
);
15763 tcg_temp_free(v2_t
);
15766 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15767 int ret
, int v1
, int v2
, int check_ret
)
15773 if ((ret
== 0) && (check_ret
== 1)) {
15774 /* Treat as NOP. */
15778 t0
= tcg_temp_new_i32();
15779 v1_t
= tcg_temp_new();
15780 v2_t
= tcg_temp_new();
15782 tcg_gen_movi_i32(t0
, ret
);
15783 gen_load_gpr(v1_t
, v1
);
15784 gen_load_gpr(v2_t
, v2
);
15787 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15788 * the same mask and op1. */
15789 case OPC_MULT_G_2E
:
15793 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15796 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15799 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15801 case OPC_MULQ_RS_W
:
15802 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15806 case OPC_DPA_W_PH_DSP
:
15808 case OPC_DPAU_H_QBL
:
15810 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15812 case OPC_DPAU_H_QBR
:
15814 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15816 case OPC_DPSU_H_QBL
:
15818 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15820 case OPC_DPSU_H_QBR
:
15822 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15826 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15828 case OPC_DPAX_W_PH
:
15830 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15832 case OPC_DPAQ_S_W_PH
:
15834 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15836 case OPC_DPAQX_S_W_PH
:
15838 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15840 case OPC_DPAQX_SA_W_PH
:
15842 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15846 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15848 case OPC_DPSX_W_PH
:
15850 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15852 case OPC_DPSQ_S_W_PH
:
15854 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15856 case OPC_DPSQX_S_W_PH
:
15858 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15860 case OPC_DPSQX_SA_W_PH
:
15862 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15864 case OPC_MULSAQ_S_W_PH
:
15866 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15868 case OPC_DPAQ_SA_L_W
:
15870 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15872 case OPC_DPSQ_SA_L_W
:
15874 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15876 case OPC_MAQ_S_W_PHL
:
15878 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15880 case OPC_MAQ_S_W_PHR
:
15882 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15884 case OPC_MAQ_SA_W_PHL
:
15886 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15888 case OPC_MAQ_SA_W_PHR
:
15890 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15892 case OPC_MULSA_W_PH
:
15894 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15898 #ifdef TARGET_MIPS64
15899 case OPC_DPAQ_W_QH_DSP
:
15901 int ac
= ret
& 0x03;
15902 tcg_gen_movi_i32(t0
, ac
);
15907 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15911 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15915 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15919 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15923 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15925 case OPC_DPAQ_S_W_QH
:
15927 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15929 case OPC_DPAQ_SA_L_PW
:
15931 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15933 case OPC_DPAU_H_OBL
:
15935 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15937 case OPC_DPAU_H_OBR
:
15939 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15943 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15945 case OPC_DPSQ_S_W_QH
:
15947 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15949 case OPC_DPSQ_SA_L_PW
:
15951 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15953 case OPC_DPSU_H_OBL
:
15955 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15957 case OPC_DPSU_H_OBR
:
15959 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15961 case OPC_MAQ_S_L_PWL
:
15963 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15965 case OPC_MAQ_S_L_PWR
:
15967 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15969 case OPC_MAQ_S_W_QHLL
:
15971 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15973 case OPC_MAQ_SA_W_QHLL
:
15975 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15977 case OPC_MAQ_S_W_QHLR
:
15979 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15981 case OPC_MAQ_SA_W_QHLR
:
15983 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15985 case OPC_MAQ_S_W_QHRL
:
15987 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15989 case OPC_MAQ_SA_W_QHRL
:
15991 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15993 case OPC_MAQ_S_W_QHRR
:
15995 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15997 case OPC_MAQ_SA_W_QHRR
:
15999 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16001 case OPC_MULSAQ_S_L_PW
:
16003 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16005 case OPC_MULSAQ_S_W_QH
:
16007 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16013 case OPC_ADDU_QB_DSP
:
16015 case OPC_MULEU_S_PH_QBL
:
16017 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16019 case OPC_MULEU_S_PH_QBR
:
16021 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16023 case OPC_MULQ_RS_PH
:
16025 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16027 case OPC_MULEQ_S_W_PHL
:
16029 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16031 case OPC_MULEQ_S_W_PHR
:
16033 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16035 case OPC_MULQ_S_PH
:
16037 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16041 #ifdef TARGET_MIPS64
16042 case OPC_ADDU_OB_DSP
:
16044 case OPC_MULEQ_S_PW_QHL
:
16046 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16048 case OPC_MULEQ_S_PW_QHR
:
16050 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16052 case OPC_MULEU_S_QH_OBL
:
16054 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16056 case OPC_MULEU_S_QH_OBR
:
16058 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16060 case OPC_MULQ_RS_QH
:
16062 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16069 tcg_temp_free_i32(t0
);
16070 tcg_temp_free(v1_t
);
16071 tcg_temp_free(v2_t
);
16074 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16082 /* Treat as NOP. */
16086 t0
= tcg_temp_new();
16087 val_t
= tcg_temp_new();
16088 gen_load_gpr(val_t
, val
);
16091 case OPC_ABSQ_S_PH_DSP
:
16095 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16100 target_long result
;
16101 imm
= (ctx
->opcode
>> 16) & 0xFF;
16102 result
= (uint32_t)imm
<< 24 |
16103 (uint32_t)imm
<< 16 |
16104 (uint32_t)imm
<< 8 |
16106 result
= (int32_t)result
;
16107 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16112 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16113 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16114 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16115 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16116 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16117 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16122 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16123 imm
= (int16_t)(imm
<< 6) >> 6;
16124 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16125 (target_long
)((int32_t)imm
<< 16 | \
16131 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16132 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16133 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16134 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16138 #ifdef TARGET_MIPS64
16139 case OPC_ABSQ_S_QH_DSP
:
16146 imm
= (ctx
->opcode
>> 16) & 0xFF;
16147 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16148 temp
= (temp
<< 16) | temp
;
16149 temp
= (temp
<< 32) | temp
;
16150 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16158 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16159 imm
= (int16_t)(imm
<< 6) >> 6;
16160 temp
= ((target_long
)imm
<< 32) \
16161 | ((target_long
)imm
& 0xFFFFFFFF);
16162 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16170 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16171 imm
= (int16_t)(imm
<< 6) >> 6;
16173 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16174 ((uint64_t)(uint16_t)imm
<< 32) |
16175 ((uint64_t)(uint16_t)imm
<< 16) |
16176 (uint64_t)(uint16_t)imm
;
16177 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16182 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16183 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16184 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16185 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16186 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16187 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16188 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16192 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16193 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16194 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16198 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16199 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16200 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16201 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16202 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16209 tcg_temp_free(val_t
);
16212 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16213 uint32_t op1
, uint32_t op2
,
16214 int ret
, int v1
, int v2
, int check_ret
)
16220 if ((ret
== 0) && (check_ret
== 1)) {
16221 /* Treat as NOP. */
16225 t1
= tcg_temp_new();
16226 v1_t
= tcg_temp_new();
16227 v2_t
= tcg_temp_new();
16229 gen_load_gpr(v1_t
, v1
);
16230 gen_load_gpr(v2_t
, v2
);
16233 case OPC_CMPU_EQ_QB_DSP
:
16235 case OPC_CMPU_EQ_QB
:
16237 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16239 case OPC_CMPU_LT_QB
:
16241 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16243 case OPC_CMPU_LE_QB
:
16245 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16247 case OPC_CMPGU_EQ_QB
:
16249 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16251 case OPC_CMPGU_LT_QB
:
16253 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16255 case OPC_CMPGU_LE_QB
:
16257 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16259 case OPC_CMPGDU_EQ_QB
:
16261 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16262 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16263 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16264 tcg_gen_shli_tl(t1
, t1
, 24);
16265 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16267 case OPC_CMPGDU_LT_QB
:
16269 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16270 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16271 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16272 tcg_gen_shli_tl(t1
, t1
, 24);
16273 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16275 case OPC_CMPGDU_LE_QB
:
16277 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16278 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16279 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16280 tcg_gen_shli_tl(t1
, t1
, 24);
16281 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16283 case OPC_CMP_EQ_PH
:
16285 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16287 case OPC_CMP_LT_PH
:
16289 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16291 case OPC_CMP_LE_PH
:
16293 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16297 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16301 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16303 case OPC_PACKRL_PH
:
16305 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16309 #ifdef TARGET_MIPS64
16310 case OPC_CMPU_EQ_OB_DSP
:
16312 case OPC_CMP_EQ_PW
:
16314 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16316 case OPC_CMP_LT_PW
:
16318 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16320 case OPC_CMP_LE_PW
:
16322 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16324 case OPC_CMP_EQ_QH
:
16326 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16328 case OPC_CMP_LT_QH
:
16330 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16332 case OPC_CMP_LE_QH
:
16334 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16336 case OPC_CMPGDU_EQ_OB
:
16338 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16340 case OPC_CMPGDU_LT_OB
:
16342 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16344 case OPC_CMPGDU_LE_OB
:
16346 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16348 case OPC_CMPGU_EQ_OB
:
16350 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16352 case OPC_CMPGU_LT_OB
:
16354 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16356 case OPC_CMPGU_LE_OB
:
16358 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16360 case OPC_CMPU_EQ_OB
:
16362 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16364 case OPC_CMPU_LT_OB
:
16366 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16368 case OPC_CMPU_LE_OB
:
16370 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16372 case OPC_PACKRL_PW
:
16374 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16378 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16382 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16386 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16394 tcg_temp_free(v1_t
);
16395 tcg_temp_free(v2_t
);
16398 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16399 uint32_t op1
, int rt
, int rs
, int sa
)
16406 /* Treat as NOP. */
16410 t0
= tcg_temp_new();
16411 gen_load_gpr(t0
, rs
);
16414 case OPC_APPEND_DSP
:
16415 switch (MASK_APPEND(ctx
->opcode
)) {
16418 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16420 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16424 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16425 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16426 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16427 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16429 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16433 if (sa
!= 0 && sa
!= 2) {
16434 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16435 tcg_gen_ext32u_tl(t0
, t0
);
16436 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16437 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16439 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16441 default: /* Invalid */
16442 MIPS_INVAL("MASK APPEND");
16443 generate_exception_end(ctx
, EXCP_RI
);
16447 #ifdef TARGET_MIPS64
16448 case OPC_DAPPEND_DSP
:
16449 switch (MASK_DAPPEND(ctx
->opcode
)) {
16452 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16456 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16457 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16458 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16462 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16463 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16464 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16469 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16470 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16471 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16472 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16475 default: /* Invalid */
16476 MIPS_INVAL("MASK DAPPEND");
16477 generate_exception_end(ctx
, EXCP_RI
);
16486 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16487 int ret
, int v1
, int v2
, int check_ret
)
16496 if ((ret
== 0) && (check_ret
== 1)) {
16497 /* Treat as NOP. */
16501 t0
= tcg_temp_new();
16502 t1
= tcg_temp_new();
16503 v1_t
= tcg_temp_new();
16504 v2_t
= tcg_temp_new();
16506 gen_load_gpr(v1_t
, v1
);
16507 gen_load_gpr(v2_t
, v2
);
16510 case OPC_EXTR_W_DSP
:
16514 tcg_gen_movi_tl(t0
, v2
);
16515 tcg_gen_movi_tl(t1
, v1
);
16516 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16519 tcg_gen_movi_tl(t0
, v2
);
16520 tcg_gen_movi_tl(t1
, v1
);
16521 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16523 case OPC_EXTR_RS_W
:
16524 tcg_gen_movi_tl(t0
, v2
);
16525 tcg_gen_movi_tl(t1
, v1
);
16526 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16529 tcg_gen_movi_tl(t0
, v2
);
16530 tcg_gen_movi_tl(t1
, v1
);
16531 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16533 case OPC_EXTRV_S_H
:
16534 tcg_gen_movi_tl(t0
, v2
);
16535 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16538 tcg_gen_movi_tl(t0
, v2
);
16539 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16541 case OPC_EXTRV_R_W
:
16542 tcg_gen_movi_tl(t0
, v2
);
16543 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16545 case OPC_EXTRV_RS_W
:
16546 tcg_gen_movi_tl(t0
, v2
);
16547 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16550 tcg_gen_movi_tl(t0
, v2
);
16551 tcg_gen_movi_tl(t1
, v1
);
16552 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16555 tcg_gen_movi_tl(t0
, v2
);
16556 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16559 tcg_gen_movi_tl(t0
, v2
);
16560 tcg_gen_movi_tl(t1
, v1
);
16561 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16564 tcg_gen_movi_tl(t0
, v2
);
16565 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16568 imm
= (ctx
->opcode
>> 20) & 0x3F;
16569 tcg_gen_movi_tl(t0
, ret
);
16570 tcg_gen_movi_tl(t1
, imm
);
16571 gen_helper_shilo(t0
, t1
, cpu_env
);
16574 tcg_gen_movi_tl(t0
, ret
);
16575 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16578 tcg_gen_movi_tl(t0
, ret
);
16579 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16582 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16583 tcg_gen_movi_tl(t0
, imm
);
16584 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16587 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16588 tcg_gen_movi_tl(t0
, imm
);
16589 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16593 #ifdef TARGET_MIPS64
16594 case OPC_DEXTR_W_DSP
:
16598 tcg_gen_movi_tl(t0
, ret
);
16599 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16603 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16604 int ac
= (ctx
->opcode
>> 11) & 0x03;
16605 tcg_gen_movi_tl(t0
, shift
);
16606 tcg_gen_movi_tl(t1
, ac
);
16607 gen_helper_dshilo(t0
, t1
, cpu_env
);
16612 int ac
= (ctx
->opcode
>> 11) & 0x03;
16613 tcg_gen_movi_tl(t0
, ac
);
16614 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16618 tcg_gen_movi_tl(t0
, v2
);
16619 tcg_gen_movi_tl(t1
, v1
);
16621 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16624 tcg_gen_movi_tl(t0
, v2
);
16625 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16628 tcg_gen_movi_tl(t0
, v2
);
16629 tcg_gen_movi_tl(t1
, v1
);
16630 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16633 tcg_gen_movi_tl(t0
, v2
);
16634 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16637 tcg_gen_movi_tl(t0
, v2
);
16638 tcg_gen_movi_tl(t1
, v1
);
16639 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16641 case OPC_DEXTR_R_L
:
16642 tcg_gen_movi_tl(t0
, v2
);
16643 tcg_gen_movi_tl(t1
, v1
);
16644 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16646 case OPC_DEXTR_RS_L
:
16647 tcg_gen_movi_tl(t0
, v2
);
16648 tcg_gen_movi_tl(t1
, v1
);
16649 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16652 tcg_gen_movi_tl(t0
, v2
);
16653 tcg_gen_movi_tl(t1
, v1
);
16654 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16656 case OPC_DEXTR_R_W
:
16657 tcg_gen_movi_tl(t0
, v2
);
16658 tcg_gen_movi_tl(t1
, v1
);
16659 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16661 case OPC_DEXTR_RS_W
:
16662 tcg_gen_movi_tl(t0
, v2
);
16663 tcg_gen_movi_tl(t1
, v1
);
16664 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16666 case OPC_DEXTR_S_H
:
16667 tcg_gen_movi_tl(t0
, v2
);
16668 tcg_gen_movi_tl(t1
, v1
);
16669 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16671 case OPC_DEXTRV_S_H
:
16672 tcg_gen_movi_tl(t0
, v2
);
16673 tcg_gen_movi_tl(t1
, v1
);
16674 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16677 tcg_gen_movi_tl(t0
, v2
);
16678 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16680 case OPC_DEXTRV_R_L
:
16681 tcg_gen_movi_tl(t0
, v2
);
16682 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16684 case OPC_DEXTRV_RS_L
:
16685 tcg_gen_movi_tl(t0
, v2
);
16686 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16689 tcg_gen_movi_tl(t0
, v2
);
16690 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16692 case OPC_DEXTRV_R_W
:
16693 tcg_gen_movi_tl(t0
, v2
);
16694 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16696 case OPC_DEXTRV_RS_W
:
16697 tcg_gen_movi_tl(t0
, v2
);
16698 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16707 tcg_temp_free(v1_t
);
16708 tcg_temp_free(v2_t
);
16711 /* End MIPSDSP functions. */
16713 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16715 int rs
, rt
, rd
, sa
;
16718 rs
= (ctx
->opcode
>> 21) & 0x1f;
16719 rt
= (ctx
->opcode
>> 16) & 0x1f;
16720 rd
= (ctx
->opcode
>> 11) & 0x1f;
16721 sa
= (ctx
->opcode
>> 6) & 0x1f;
16723 op1
= MASK_SPECIAL(ctx
->opcode
);
16726 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16728 case OPC_MULT
... OPC_DIVU
:
16729 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16739 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16742 MIPS_INVAL("special_r6 muldiv");
16743 generate_exception_end(ctx
, EXCP_RI
);
16749 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16753 if (rt
== 0 && sa
== 1) {
16754 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16755 We need additionally to check other fields */
16756 gen_cl(ctx
, op1
, rd
, rs
);
16758 generate_exception_end(ctx
, EXCP_RI
);
16762 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16763 gen_helper_do_semihosting(cpu_env
);
16765 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16766 generate_exception_end(ctx
, EXCP_RI
);
16768 generate_exception_end(ctx
, EXCP_DBp
);
16772 #if defined(TARGET_MIPS64)
16774 check_mips_64(ctx
);
16775 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16779 if (rt
== 0 && sa
== 1) {
16780 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16781 We need additionally to check other fields */
16782 check_mips_64(ctx
);
16783 gen_cl(ctx
, op1
, rd
, rs
);
16785 generate_exception_end(ctx
, EXCP_RI
);
16788 case OPC_DMULT
... OPC_DDIVU
:
16789 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16799 check_mips_64(ctx
);
16800 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16803 MIPS_INVAL("special_r6 muldiv");
16804 generate_exception_end(ctx
, EXCP_RI
);
16809 default: /* Invalid */
16810 MIPS_INVAL("special_r6");
16811 generate_exception_end(ctx
, EXCP_RI
);
16816 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16818 int rs
, rt
, rd
, sa
;
16821 rs
= (ctx
->opcode
>> 21) & 0x1f;
16822 rt
= (ctx
->opcode
>> 16) & 0x1f;
16823 rd
= (ctx
->opcode
>> 11) & 0x1f;
16824 sa
= (ctx
->opcode
>> 6) & 0x1f;
16826 op1
= MASK_SPECIAL(ctx
->opcode
);
16828 case OPC_MOVN
: /* Conditional move */
16830 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16831 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16832 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16834 case OPC_MFHI
: /* Move from HI/LO */
16836 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16839 case OPC_MTLO
: /* Move to HI/LO */
16840 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16843 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16844 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16845 check_cp1_enabled(ctx
);
16846 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16847 (ctx
->opcode
>> 16) & 1);
16849 generate_exception_err(ctx
, EXCP_CpU
, 1);
16855 check_insn(ctx
, INSN_VR54XX
);
16856 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16857 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16859 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16864 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16866 #if defined(TARGET_MIPS64)
16867 case OPC_DMULT
... OPC_DDIVU
:
16868 check_insn(ctx
, ISA_MIPS3
);
16869 check_mips_64(ctx
);
16870 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16874 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16877 #ifdef MIPS_STRICT_STANDARD
16878 MIPS_INVAL("SPIM");
16879 generate_exception_end(ctx
, EXCP_RI
);
16881 /* Implemented as RI exception for now. */
16882 MIPS_INVAL("spim (unofficial)");
16883 generate_exception_end(ctx
, EXCP_RI
);
16886 default: /* Invalid */
16887 MIPS_INVAL("special_legacy");
16888 generate_exception_end(ctx
, EXCP_RI
);
16893 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16895 int rs
, rt
, rd
, sa
;
16898 rs
= (ctx
->opcode
>> 21) & 0x1f;
16899 rt
= (ctx
->opcode
>> 16) & 0x1f;
16900 rd
= (ctx
->opcode
>> 11) & 0x1f;
16901 sa
= (ctx
->opcode
>> 6) & 0x1f;
16903 op1
= MASK_SPECIAL(ctx
->opcode
);
16905 case OPC_SLL
: /* Shift with immediate */
16906 if (sa
== 5 && rd
== 0 &&
16907 rs
== 0 && rt
== 0) { /* PAUSE */
16908 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16909 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16910 generate_exception_end(ctx
, EXCP_RI
);
16916 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16919 switch ((ctx
->opcode
>> 21) & 0x1f) {
16921 /* rotr is decoded as srl on non-R2 CPUs */
16922 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16927 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16930 generate_exception_end(ctx
, EXCP_RI
);
16934 case OPC_ADD
... OPC_SUBU
:
16935 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16937 case OPC_SLLV
: /* Shifts */
16939 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16942 switch ((ctx
->opcode
>> 6) & 0x1f) {
16944 /* rotrv is decoded as srlv on non-R2 CPUs */
16945 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16950 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16953 generate_exception_end(ctx
, EXCP_RI
);
16957 case OPC_SLT
: /* Set on less than */
16959 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16961 case OPC_AND
: /* Logic*/
16965 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16968 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16970 case OPC_TGE
... OPC_TEQ
: /* Traps */
16972 check_insn(ctx
, ISA_MIPS2
);
16973 gen_trap(ctx
, op1
, rs
, rt
, -1);
16975 case OPC_LSA
: /* OPC_PMON */
16976 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16977 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16978 decode_opc_special_r6(env
, ctx
);
16980 /* Pmon entry point, also R4010 selsl */
16981 #ifdef MIPS_STRICT_STANDARD
16982 MIPS_INVAL("PMON / selsl");
16983 generate_exception_end(ctx
, EXCP_RI
);
16985 gen_helper_0e0i(pmon
, sa
);
16990 generate_exception_end(ctx
, EXCP_SYSCALL
);
16993 generate_exception_end(ctx
, EXCP_BREAK
);
16996 check_insn(ctx
, ISA_MIPS2
);
16997 /* Treat as NOP. */
17000 #if defined(TARGET_MIPS64)
17001 /* MIPS64 specific opcodes */
17006 check_insn(ctx
, ISA_MIPS3
);
17007 check_mips_64(ctx
);
17008 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17011 switch ((ctx
->opcode
>> 21) & 0x1f) {
17013 /* drotr is decoded as dsrl on non-R2 CPUs */
17014 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17019 check_insn(ctx
, ISA_MIPS3
);
17020 check_mips_64(ctx
);
17021 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17024 generate_exception_end(ctx
, EXCP_RI
);
17029 switch ((ctx
->opcode
>> 21) & 0x1f) {
17031 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17032 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17037 check_insn(ctx
, ISA_MIPS3
);
17038 check_mips_64(ctx
);
17039 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17042 generate_exception_end(ctx
, EXCP_RI
);
17046 case OPC_DADD
... OPC_DSUBU
:
17047 check_insn(ctx
, ISA_MIPS3
);
17048 check_mips_64(ctx
);
17049 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17053 check_insn(ctx
, ISA_MIPS3
);
17054 check_mips_64(ctx
);
17055 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17058 switch ((ctx
->opcode
>> 6) & 0x1f) {
17060 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17061 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17066 check_insn(ctx
, ISA_MIPS3
);
17067 check_mips_64(ctx
);
17068 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17071 generate_exception_end(ctx
, EXCP_RI
);
17076 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17077 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17078 decode_opc_special_r6(env
, ctx
);
17083 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17084 decode_opc_special_r6(env
, ctx
);
17086 decode_opc_special_legacy(env
, ctx
);
17091 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17096 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17098 rs
= (ctx
->opcode
>> 21) & 0x1f;
17099 rt
= (ctx
->opcode
>> 16) & 0x1f;
17100 rd
= (ctx
->opcode
>> 11) & 0x1f;
17102 op1
= MASK_SPECIAL2(ctx
->opcode
);
17104 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17105 case OPC_MSUB
... OPC_MSUBU
:
17106 check_insn(ctx
, ISA_MIPS32
);
17107 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17110 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17113 case OPC_DIVU_G_2F
:
17114 case OPC_MULT_G_2F
:
17115 case OPC_MULTU_G_2F
:
17117 case OPC_MODU_G_2F
:
17118 check_insn(ctx
, INSN_LOONGSON2F
);
17119 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17123 check_insn(ctx
, ISA_MIPS32
);
17124 gen_cl(ctx
, op1
, rd
, rs
);
17127 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17128 gen_helper_do_semihosting(cpu_env
);
17130 /* XXX: not clear which exception should be raised
17131 * when in debug mode...
17133 check_insn(ctx
, ISA_MIPS32
);
17134 generate_exception_end(ctx
, EXCP_DBp
);
17137 #if defined(TARGET_MIPS64)
17140 check_insn(ctx
, ISA_MIPS64
);
17141 check_mips_64(ctx
);
17142 gen_cl(ctx
, op1
, rd
, rs
);
17144 case OPC_DMULT_G_2F
:
17145 case OPC_DMULTU_G_2F
:
17146 case OPC_DDIV_G_2F
:
17147 case OPC_DDIVU_G_2F
:
17148 case OPC_DMOD_G_2F
:
17149 case OPC_DMODU_G_2F
:
17150 check_insn(ctx
, INSN_LOONGSON2F
);
17151 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17154 default: /* Invalid */
17155 MIPS_INVAL("special2_legacy");
17156 generate_exception_end(ctx
, EXCP_RI
);
17161 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17163 int rs
, rt
, rd
, sa
;
17167 rs
= (ctx
->opcode
>> 21) & 0x1f;
17168 rt
= (ctx
->opcode
>> 16) & 0x1f;
17169 rd
= (ctx
->opcode
>> 11) & 0x1f;
17170 sa
= (ctx
->opcode
>> 6) & 0x1f;
17171 imm
= (int16_t)ctx
->opcode
>> 7;
17173 op1
= MASK_SPECIAL3(ctx
->opcode
);
17177 /* hint codes 24-31 are reserved and signal RI */
17178 generate_exception_end(ctx
, EXCP_RI
);
17180 /* Treat as NOP. */
17183 /* Treat as NOP. */
17186 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17189 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17194 /* Treat as NOP. */
17197 op2
= MASK_BSHFL(ctx
->opcode
);
17199 case OPC_ALIGN
... OPC_ALIGN_END
:
17200 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17203 gen_bitswap(ctx
, op2
, rd
, rt
);
17208 #if defined(TARGET_MIPS64)
17210 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17213 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17216 check_mips_64(ctx
);
17219 /* Treat as NOP. */
17222 op2
= MASK_DBSHFL(ctx
->opcode
);
17224 case OPC_DALIGN
... OPC_DALIGN_END
:
17225 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17228 gen_bitswap(ctx
, op2
, rd
, rt
);
17235 default: /* Invalid */
17236 MIPS_INVAL("special3_r6");
17237 generate_exception_end(ctx
, EXCP_RI
);
17242 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17247 rs
= (ctx
->opcode
>> 21) & 0x1f;
17248 rt
= (ctx
->opcode
>> 16) & 0x1f;
17249 rd
= (ctx
->opcode
>> 11) & 0x1f;
17251 op1
= MASK_SPECIAL3(ctx
->opcode
);
17253 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17254 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17255 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17256 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17257 * the same mask and op1. */
17258 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17259 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17262 case OPC_ADDUH_R_QB
:
17264 case OPC_ADDQH_R_PH
:
17266 case OPC_ADDQH_R_W
:
17268 case OPC_SUBUH_R_QB
:
17270 case OPC_SUBQH_R_PH
:
17272 case OPC_SUBQH_R_W
:
17273 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17278 case OPC_MULQ_RS_W
:
17279 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17282 MIPS_INVAL("MASK ADDUH.QB");
17283 generate_exception_end(ctx
, EXCP_RI
);
17286 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17287 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17289 generate_exception_end(ctx
, EXCP_RI
);
17293 op2
= MASK_LX(ctx
->opcode
);
17295 #if defined(TARGET_MIPS64)
17301 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17303 default: /* Invalid */
17304 MIPS_INVAL("MASK LX");
17305 generate_exception_end(ctx
, EXCP_RI
);
17309 case OPC_ABSQ_S_PH_DSP
:
17310 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17312 case OPC_ABSQ_S_QB
:
17313 case OPC_ABSQ_S_PH
:
17315 case OPC_PRECEQ_W_PHL
:
17316 case OPC_PRECEQ_W_PHR
:
17317 case OPC_PRECEQU_PH_QBL
:
17318 case OPC_PRECEQU_PH_QBR
:
17319 case OPC_PRECEQU_PH_QBLA
:
17320 case OPC_PRECEQU_PH_QBRA
:
17321 case OPC_PRECEU_PH_QBL
:
17322 case OPC_PRECEU_PH_QBR
:
17323 case OPC_PRECEU_PH_QBLA
:
17324 case OPC_PRECEU_PH_QBRA
:
17325 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17332 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17335 MIPS_INVAL("MASK ABSQ_S.PH");
17336 generate_exception_end(ctx
, EXCP_RI
);
17340 case OPC_ADDU_QB_DSP
:
17341 op2
= MASK_ADDU_QB(ctx
->opcode
);
17344 case OPC_ADDQ_S_PH
:
17347 case OPC_ADDU_S_QB
:
17349 case OPC_ADDU_S_PH
:
17351 case OPC_SUBQ_S_PH
:
17354 case OPC_SUBU_S_QB
:
17356 case OPC_SUBU_S_PH
:
17360 case OPC_RADDU_W_QB
:
17361 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17363 case OPC_MULEU_S_PH_QBL
:
17364 case OPC_MULEU_S_PH_QBR
:
17365 case OPC_MULQ_RS_PH
:
17366 case OPC_MULEQ_S_W_PHL
:
17367 case OPC_MULEQ_S_W_PHR
:
17368 case OPC_MULQ_S_PH
:
17369 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17371 default: /* Invalid */
17372 MIPS_INVAL("MASK ADDU.QB");
17373 generate_exception_end(ctx
, EXCP_RI
);
17378 case OPC_CMPU_EQ_QB_DSP
:
17379 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17381 case OPC_PRECR_SRA_PH_W
:
17382 case OPC_PRECR_SRA_R_PH_W
:
17383 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17385 case OPC_PRECR_QB_PH
:
17386 case OPC_PRECRQ_QB_PH
:
17387 case OPC_PRECRQ_PH_W
:
17388 case OPC_PRECRQ_RS_PH_W
:
17389 case OPC_PRECRQU_S_QB_PH
:
17390 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17392 case OPC_CMPU_EQ_QB
:
17393 case OPC_CMPU_LT_QB
:
17394 case OPC_CMPU_LE_QB
:
17395 case OPC_CMP_EQ_PH
:
17396 case OPC_CMP_LT_PH
:
17397 case OPC_CMP_LE_PH
:
17398 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17400 case OPC_CMPGU_EQ_QB
:
17401 case OPC_CMPGU_LT_QB
:
17402 case OPC_CMPGU_LE_QB
:
17403 case OPC_CMPGDU_EQ_QB
:
17404 case OPC_CMPGDU_LT_QB
:
17405 case OPC_CMPGDU_LE_QB
:
17408 case OPC_PACKRL_PH
:
17409 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17411 default: /* Invalid */
17412 MIPS_INVAL("MASK CMPU.EQ.QB");
17413 generate_exception_end(ctx
, EXCP_RI
);
17417 case OPC_SHLL_QB_DSP
:
17418 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17420 case OPC_DPA_W_PH_DSP
:
17421 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17423 case OPC_DPAU_H_QBL
:
17424 case OPC_DPAU_H_QBR
:
17425 case OPC_DPSU_H_QBL
:
17426 case OPC_DPSU_H_QBR
:
17428 case OPC_DPAX_W_PH
:
17429 case OPC_DPAQ_S_W_PH
:
17430 case OPC_DPAQX_S_W_PH
:
17431 case OPC_DPAQX_SA_W_PH
:
17433 case OPC_DPSX_W_PH
:
17434 case OPC_DPSQ_S_W_PH
:
17435 case OPC_DPSQX_S_W_PH
:
17436 case OPC_DPSQX_SA_W_PH
:
17437 case OPC_MULSAQ_S_W_PH
:
17438 case OPC_DPAQ_SA_L_W
:
17439 case OPC_DPSQ_SA_L_W
:
17440 case OPC_MAQ_S_W_PHL
:
17441 case OPC_MAQ_S_W_PHR
:
17442 case OPC_MAQ_SA_W_PHL
:
17443 case OPC_MAQ_SA_W_PHR
:
17444 case OPC_MULSA_W_PH
:
17445 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17447 default: /* Invalid */
17448 MIPS_INVAL("MASK DPAW.PH");
17449 generate_exception_end(ctx
, EXCP_RI
);
17454 op2
= MASK_INSV(ctx
->opcode
);
17465 t0
= tcg_temp_new();
17466 t1
= tcg_temp_new();
17468 gen_load_gpr(t0
, rt
);
17469 gen_load_gpr(t1
, rs
);
17471 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17477 default: /* Invalid */
17478 MIPS_INVAL("MASK INSV");
17479 generate_exception_end(ctx
, EXCP_RI
);
17483 case OPC_APPEND_DSP
:
17484 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17486 case OPC_EXTR_W_DSP
:
17487 op2
= MASK_EXTR_W(ctx
->opcode
);
17491 case OPC_EXTR_RS_W
:
17493 case OPC_EXTRV_S_H
:
17495 case OPC_EXTRV_R_W
:
17496 case OPC_EXTRV_RS_W
:
17501 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17504 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17510 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17512 default: /* Invalid */
17513 MIPS_INVAL("MASK EXTR.W");
17514 generate_exception_end(ctx
, EXCP_RI
);
17518 #if defined(TARGET_MIPS64)
17519 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17520 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17521 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17522 check_insn(ctx
, INSN_LOONGSON2E
);
17523 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17525 case OPC_ABSQ_S_QH_DSP
:
17526 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17528 case OPC_PRECEQ_L_PWL
:
17529 case OPC_PRECEQ_L_PWR
:
17530 case OPC_PRECEQ_PW_QHL
:
17531 case OPC_PRECEQ_PW_QHR
:
17532 case OPC_PRECEQ_PW_QHLA
:
17533 case OPC_PRECEQ_PW_QHRA
:
17534 case OPC_PRECEQU_QH_OBL
:
17535 case OPC_PRECEQU_QH_OBR
:
17536 case OPC_PRECEQU_QH_OBLA
:
17537 case OPC_PRECEQU_QH_OBRA
:
17538 case OPC_PRECEU_QH_OBL
:
17539 case OPC_PRECEU_QH_OBR
:
17540 case OPC_PRECEU_QH_OBLA
:
17541 case OPC_PRECEU_QH_OBRA
:
17542 case OPC_ABSQ_S_OB
:
17543 case OPC_ABSQ_S_PW
:
17544 case OPC_ABSQ_S_QH
:
17545 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17553 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17555 default: /* Invalid */
17556 MIPS_INVAL("MASK ABSQ_S.QH");
17557 generate_exception_end(ctx
, EXCP_RI
);
17561 case OPC_ADDU_OB_DSP
:
17562 op2
= MASK_ADDU_OB(ctx
->opcode
);
17564 case OPC_RADDU_L_OB
:
17566 case OPC_SUBQ_S_PW
:
17568 case OPC_SUBQ_S_QH
:
17570 case OPC_SUBU_S_OB
:
17572 case OPC_SUBU_S_QH
:
17574 case OPC_SUBUH_R_OB
:
17576 case OPC_ADDQ_S_PW
:
17578 case OPC_ADDQ_S_QH
:
17580 case OPC_ADDU_S_OB
:
17582 case OPC_ADDU_S_QH
:
17584 case OPC_ADDUH_R_OB
:
17585 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17587 case OPC_MULEQ_S_PW_QHL
:
17588 case OPC_MULEQ_S_PW_QHR
:
17589 case OPC_MULEU_S_QH_OBL
:
17590 case OPC_MULEU_S_QH_OBR
:
17591 case OPC_MULQ_RS_QH
:
17592 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17594 default: /* Invalid */
17595 MIPS_INVAL("MASK ADDU.OB");
17596 generate_exception_end(ctx
, EXCP_RI
);
17600 case OPC_CMPU_EQ_OB_DSP
:
17601 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17603 case OPC_PRECR_SRA_QH_PW
:
17604 case OPC_PRECR_SRA_R_QH_PW
:
17605 /* Return value is rt. */
17606 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17608 case OPC_PRECR_OB_QH
:
17609 case OPC_PRECRQ_OB_QH
:
17610 case OPC_PRECRQ_PW_L
:
17611 case OPC_PRECRQ_QH_PW
:
17612 case OPC_PRECRQ_RS_QH_PW
:
17613 case OPC_PRECRQU_S_OB_QH
:
17614 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17616 case OPC_CMPU_EQ_OB
:
17617 case OPC_CMPU_LT_OB
:
17618 case OPC_CMPU_LE_OB
:
17619 case OPC_CMP_EQ_QH
:
17620 case OPC_CMP_LT_QH
:
17621 case OPC_CMP_LE_QH
:
17622 case OPC_CMP_EQ_PW
:
17623 case OPC_CMP_LT_PW
:
17624 case OPC_CMP_LE_PW
:
17625 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17627 case OPC_CMPGDU_EQ_OB
:
17628 case OPC_CMPGDU_LT_OB
:
17629 case OPC_CMPGDU_LE_OB
:
17630 case OPC_CMPGU_EQ_OB
:
17631 case OPC_CMPGU_LT_OB
:
17632 case OPC_CMPGU_LE_OB
:
17633 case OPC_PACKRL_PW
:
17637 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17639 default: /* Invalid */
17640 MIPS_INVAL("MASK CMPU_EQ.OB");
17641 generate_exception_end(ctx
, EXCP_RI
);
17645 case OPC_DAPPEND_DSP
:
17646 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17648 case OPC_DEXTR_W_DSP
:
17649 op2
= MASK_DEXTR_W(ctx
->opcode
);
17656 case OPC_DEXTR_R_L
:
17657 case OPC_DEXTR_RS_L
:
17659 case OPC_DEXTR_R_W
:
17660 case OPC_DEXTR_RS_W
:
17661 case OPC_DEXTR_S_H
:
17663 case OPC_DEXTRV_R_L
:
17664 case OPC_DEXTRV_RS_L
:
17665 case OPC_DEXTRV_S_H
:
17667 case OPC_DEXTRV_R_W
:
17668 case OPC_DEXTRV_RS_W
:
17669 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17674 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17676 default: /* Invalid */
17677 MIPS_INVAL("MASK EXTR.W");
17678 generate_exception_end(ctx
, EXCP_RI
);
17682 case OPC_DPAQ_W_QH_DSP
:
17683 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17685 case OPC_DPAU_H_OBL
:
17686 case OPC_DPAU_H_OBR
:
17687 case OPC_DPSU_H_OBL
:
17688 case OPC_DPSU_H_OBR
:
17690 case OPC_DPAQ_S_W_QH
:
17692 case OPC_DPSQ_S_W_QH
:
17693 case OPC_MULSAQ_S_W_QH
:
17694 case OPC_DPAQ_SA_L_PW
:
17695 case OPC_DPSQ_SA_L_PW
:
17696 case OPC_MULSAQ_S_L_PW
:
17697 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17699 case OPC_MAQ_S_W_QHLL
:
17700 case OPC_MAQ_S_W_QHLR
:
17701 case OPC_MAQ_S_W_QHRL
:
17702 case OPC_MAQ_S_W_QHRR
:
17703 case OPC_MAQ_SA_W_QHLL
:
17704 case OPC_MAQ_SA_W_QHLR
:
17705 case OPC_MAQ_SA_W_QHRL
:
17706 case OPC_MAQ_SA_W_QHRR
:
17707 case OPC_MAQ_S_L_PWL
:
17708 case OPC_MAQ_S_L_PWR
:
17713 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17715 default: /* Invalid */
17716 MIPS_INVAL("MASK DPAQ.W.QH");
17717 generate_exception_end(ctx
, EXCP_RI
);
17721 case OPC_DINSV_DSP
:
17722 op2
= MASK_INSV(ctx
->opcode
);
17733 t0
= tcg_temp_new();
17734 t1
= tcg_temp_new();
17736 gen_load_gpr(t0
, rt
);
17737 gen_load_gpr(t1
, rs
);
17739 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17745 default: /* Invalid */
17746 MIPS_INVAL("MASK DINSV");
17747 generate_exception_end(ctx
, EXCP_RI
);
17751 case OPC_SHLL_OB_DSP
:
17752 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17755 default: /* Invalid */
17756 MIPS_INVAL("special3_legacy");
17757 generate_exception_end(ctx
, EXCP_RI
);
17762 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17764 int rs
, rt
, rd
, sa
;
17767 rs
= (ctx
->opcode
>> 21) & 0x1f;
17768 rt
= (ctx
->opcode
>> 16) & 0x1f;
17769 rd
= (ctx
->opcode
>> 11) & 0x1f;
17770 sa
= (ctx
->opcode
>> 6) & 0x1f;
17772 op1
= MASK_SPECIAL3(ctx
->opcode
);
17776 check_insn(ctx
, ISA_MIPS32R2
);
17777 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17780 op2
= MASK_BSHFL(ctx
->opcode
);
17782 case OPC_ALIGN
... OPC_ALIGN_END
:
17784 check_insn(ctx
, ISA_MIPS32R6
);
17785 decode_opc_special3_r6(env
, ctx
);
17788 check_insn(ctx
, ISA_MIPS32R2
);
17789 gen_bshfl(ctx
, op2
, rt
, rd
);
17793 #if defined(TARGET_MIPS64)
17794 case OPC_DEXTM
... OPC_DEXT
:
17795 case OPC_DINSM
... OPC_DINS
:
17796 check_insn(ctx
, ISA_MIPS64R2
);
17797 check_mips_64(ctx
);
17798 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17801 op2
= MASK_DBSHFL(ctx
->opcode
);
17803 case OPC_DALIGN
... OPC_DALIGN_END
:
17805 check_insn(ctx
, ISA_MIPS32R6
);
17806 decode_opc_special3_r6(env
, ctx
);
17809 check_insn(ctx
, ISA_MIPS64R2
);
17810 check_mips_64(ctx
);
17811 op2
= MASK_DBSHFL(ctx
->opcode
);
17812 gen_bshfl(ctx
, op2
, rt
, rd
);
17818 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17821 check_insn(ctx
, ASE_MT
);
17823 TCGv t0
= tcg_temp_new();
17824 TCGv t1
= tcg_temp_new();
17826 gen_load_gpr(t0
, rt
);
17827 gen_load_gpr(t1
, rs
);
17828 gen_helper_fork(t0
, t1
);
17834 check_insn(ctx
, ASE_MT
);
17836 TCGv t0
= tcg_temp_new();
17838 gen_load_gpr(t0
, rs
);
17839 gen_helper_yield(t0
, cpu_env
, t0
);
17840 gen_store_gpr(t0
, rd
);
17845 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17846 decode_opc_special3_r6(env
, ctx
);
17848 decode_opc_special3_legacy(env
, ctx
);
17853 /* MIPS SIMD Architecture (MSA) */
17854 static inline int check_msa_access(DisasContext
*ctx
)
17856 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17857 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17858 generate_exception_end(ctx
, EXCP_RI
);
17862 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17863 if (ctx
->insn_flags
& ASE_MSA
) {
17864 generate_exception_end(ctx
, EXCP_MSADIS
);
17867 generate_exception_end(ctx
, EXCP_RI
);
17874 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17876 /* generates tcg ops to check if any element is 0 */
17877 /* Note this function only works with MSA_WRLEN = 128 */
17878 uint64_t eval_zero_or_big
= 0;
17879 uint64_t eval_big
= 0;
17880 TCGv_i64 t0
= tcg_temp_new_i64();
17881 TCGv_i64 t1
= tcg_temp_new_i64();
17884 eval_zero_or_big
= 0x0101010101010101ULL
;
17885 eval_big
= 0x8080808080808080ULL
;
17888 eval_zero_or_big
= 0x0001000100010001ULL
;
17889 eval_big
= 0x8000800080008000ULL
;
17892 eval_zero_or_big
= 0x0000000100000001ULL
;
17893 eval_big
= 0x8000000080000000ULL
;
17896 eval_zero_or_big
= 0x0000000000000001ULL
;
17897 eval_big
= 0x8000000000000000ULL
;
17900 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17901 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17902 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17903 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17904 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17905 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17906 tcg_gen_or_i64(t0
, t0
, t1
);
17907 /* if all bits are zero then all elements are not zero */
17908 /* if some bit is non-zero then some element is zero */
17909 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17910 tcg_gen_trunc_i64_tl(tresult
, t0
);
17911 tcg_temp_free_i64(t0
);
17912 tcg_temp_free_i64(t1
);
17915 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17917 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17918 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17919 int64_t s16
= (int16_t)ctx
->opcode
;
17921 check_msa_access(ctx
);
17923 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17924 generate_exception_end(ctx
, EXCP_RI
);
17931 TCGv_i64 t0
= tcg_temp_new_i64();
17932 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17933 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17934 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17935 tcg_gen_trunc_i64_tl(bcond
, t0
);
17936 tcg_temp_free_i64(t0
);
17943 gen_check_zero_element(bcond
, df
, wt
);
17949 gen_check_zero_element(bcond
, df
, wt
);
17950 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17954 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17956 ctx
->hflags
|= MIPS_HFLAG_BC
;
17957 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17960 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17962 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17963 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17964 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17965 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17967 TCGv_i32 twd
= tcg_const_i32(wd
);
17968 TCGv_i32 tws
= tcg_const_i32(ws
);
17969 TCGv_i32 ti8
= tcg_const_i32(i8
);
17971 switch (MASK_MSA_I8(ctx
->opcode
)) {
17973 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17976 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17979 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17982 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17985 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17988 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17991 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17997 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17998 if (df
== DF_DOUBLE
) {
17999 generate_exception_end(ctx
, EXCP_RI
);
18001 TCGv_i32 tdf
= tcg_const_i32(df
);
18002 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18003 tcg_temp_free_i32(tdf
);
18008 MIPS_INVAL("MSA instruction");
18009 generate_exception_end(ctx
, EXCP_RI
);
18013 tcg_temp_free_i32(twd
);
18014 tcg_temp_free_i32(tws
);
18015 tcg_temp_free_i32(ti8
);
18018 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18020 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18021 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18022 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18023 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18024 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18025 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18027 TCGv_i32 tdf
= tcg_const_i32(df
);
18028 TCGv_i32 twd
= tcg_const_i32(wd
);
18029 TCGv_i32 tws
= tcg_const_i32(ws
);
18030 TCGv_i32 timm
= tcg_temp_new_i32();
18031 tcg_gen_movi_i32(timm
, u5
);
18033 switch (MASK_MSA_I5(ctx
->opcode
)) {
18035 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18038 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18040 case OPC_MAXI_S_df
:
18041 tcg_gen_movi_i32(timm
, s5
);
18042 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18044 case OPC_MAXI_U_df
:
18045 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18047 case OPC_MINI_S_df
:
18048 tcg_gen_movi_i32(timm
, s5
);
18049 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18051 case OPC_MINI_U_df
:
18052 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18055 tcg_gen_movi_i32(timm
, s5
);
18056 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18058 case OPC_CLTI_S_df
:
18059 tcg_gen_movi_i32(timm
, s5
);
18060 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18062 case OPC_CLTI_U_df
:
18063 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18065 case OPC_CLEI_S_df
:
18066 tcg_gen_movi_i32(timm
, s5
);
18067 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18069 case OPC_CLEI_U_df
:
18070 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18074 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18075 tcg_gen_movi_i32(timm
, s10
);
18076 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18080 MIPS_INVAL("MSA instruction");
18081 generate_exception_end(ctx
, EXCP_RI
);
18085 tcg_temp_free_i32(tdf
);
18086 tcg_temp_free_i32(twd
);
18087 tcg_temp_free_i32(tws
);
18088 tcg_temp_free_i32(timm
);
18091 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18093 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18094 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18095 uint32_t df
= 0, m
= 0;
18096 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18097 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18104 if ((dfm
& 0x40) == 0x00) {
18107 } else if ((dfm
& 0x60) == 0x40) {
18110 } else if ((dfm
& 0x70) == 0x60) {
18113 } else if ((dfm
& 0x78) == 0x70) {
18117 generate_exception_end(ctx
, EXCP_RI
);
18121 tdf
= tcg_const_i32(df
);
18122 tm
= tcg_const_i32(m
);
18123 twd
= tcg_const_i32(wd
);
18124 tws
= tcg_const_i32(ws
);
18126 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18128 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18131 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18134 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18137 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18140 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18143 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18145 case OPC_BINSLI_df
:
18146 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18148 case OPC_BINSRI_df
:
18149 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18152 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18155 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18158 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18161 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18164 MIPS_INVAL("MSA instruction");
18165 generate_exception_end(ctx
, EXCP_RI
);
18169 tcg_temp_free_i32(tdf
);
18170 tcg_temp_free_i32(tm
);
18171 tcg_temp_free_i32(twd
);
18172 tcg_temp_free_i32(tws
);
18175 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18177 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18178 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18179 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18180 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18181 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18183 TCGv_i32 tdf
= tcg_const_i32(df
);
18184 TCGv_i32 twd
= tcg_const_i32(wd
);
18185 TCGv_i32 tws
= tcg_const_i32(ws
);
18186 TCGv_i32 twt
= tcg_const_i32(wt
);
18188 switch (MASK_MSA_3R(ctx
->opcode
)) {
18190 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18193 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18196 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18199 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18201 case OPC_SUBS_S_df
:
18202 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18205 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18208 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18211 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18214 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18217 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18219 case OPC_ADDS_A_df
:
18220 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18222 case OPC_SUBS_U_df
:
18223 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18229 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18232 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18235 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18238 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18241 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18243 case OPC_ADDS_S_df
:
18244 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18246 case OPC_SUBSUS_U_df
:
18247 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18250 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18253 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18256 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18259 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18262 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18265 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18267 case OPC_ADDS_U_df
:
18268 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18270 case OPC_SUBSUU_S_df
:
18271 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18274 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18277 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18280 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18283 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18286 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18288 case OPC_ASUB_S_df
:
18289 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18292 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18295 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18298 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18301 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18304 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18307 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18309 case OPC_ASUB_U_df
:
18310 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18313 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18316 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18319 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18322 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18324 case OPC_AVER_S_df
:
18325 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18328 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18331 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18334 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18337 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18339 case OPC_AVER_U_df
:
18340 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18343 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18346 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18349 case OPC_DOTP_S_df
:
18350 case OPC_DOTP_U_df
:
18351 case OPC_DPADD_S_df
:
18352 case OPC_DPADD_U_df
:
18353 case OPC_DPSUB_S_df
:
18354 case OPC_HADD_S_df
:
18355 case OPC_DPSUB_U_df
:
18356 case OPC_HADD_U_df
:
18357 case OPC_HSUB_S_df
:
18358 case OPC_HSUB_U_df
:
18359 if (df
== DF_BYTE
) {
18360 generate_exception_end(ctx
, EXCP_RI
);
18363 switch (MASK_MSA_3R(ctx
->opcode
)) {
18364 case OPC_DOTP_S_df
:
18365 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18367 case OPC_DOTP_U_df
:
18368 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18370 case OPC_DPADD_S_df
:
18371 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18373 case OPC_DPADD_U_df
:
18374 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18376 case OPC_DPSUB_S_df
:
18377 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18379 case OPC_HADD_S_df
:
18380 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18382 case OPC_DPSUB_U_df
:
18383 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18385 case OPC_HADD_U_df
:
18386 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18388 case OPC_HSUB_S_df
:
18389 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18391 case OPC_HSUB_U_df
:
18392 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18397 MIPS_INVAL("MSA instruction");
18398 generate_exception_end(ctx
, EXCP_RI
);
18401 tcg_temp_free_i32(twd
);
18402 tcg_temp_free_i32(tws
);
18403 tcg_temp_free_i32(twt
);
18404 tcg_temp_free_i32(tdf
);
18407 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18409 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18410 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18411 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18412 TCGv telm
= tcg_temp_new();
18413 TCGv_i32 tsr
= tcg_const_i32(source
);
18414 TCGv_i32 tdt
= tcg_const_i32(dest
);
18416 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18418 gen_load_gpr(telm
, source
);
18419 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18422 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18423 gen_store_gpr(telm
, dest
);
18426 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18429 MIPS_INVAL("MSA instruction");
18430 generate_exception_end(ctx
, EXCP_RI
);
18434 tcg_temp_free(telm
);
18435 tcg_temp_free_i32(tdt
);
18436 tcg_temp_free_i32(tsr
);
18439 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18442 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18443 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18444 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18446 TCGv_i32 tws
= tcg_const_i32(ws
);
18447 TCGv_i32 twd
= tcg_const_i32(wd
);
18448 TCGv_i32 tn
= tcg_const_i32(n
);
18449 TCGv_i32 tdf
= tcg_const_i32(df
);
18451 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18453 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18455 case OPC_SPLATI_df
:
18456 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18459 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18461 case OPC_COPY_S_df
:
18462 case OPC_COPY_U_df
:
18463 case OPC_INSERT_df
:
18464 #if !defined(TARGET_MIPS64)
18465 /* Double format valid only for MIPS64 */
18466 if (df
== DF_DOUBLE
) {
18467 generate_exception_end(ctx
, EXCP_RI
);
18471 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18472 case OPC_COPY_S_df
:
18473 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18475 case OPC_COPY_U_df
:
18476 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18478 case OPC_INSERT_df
:
18479 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18484 MIPS_INVAL("MSA instruction");
18485 generate_exception_end(ctx
, EXCP_RI
);
18487 tcg_temp_free_i32(twd
);
18488 tcg_temp_free_i32(tws
);
18489 tcg_temp_free_i32(tn
);
18490 tcg_temp_free_i32(tdf
);
18493 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18495 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18496 uint32_t df
= 0, n
= 0;
18498 if ((dfn
& 0x30) == 0x00) {
18501 } else if ((dfn
& 0x38) == 0x20) {
18504 } else if ((dfn
& 0x3c) == 0x30) {
18507 } else if ((dfn
& 0x3e) == 0x38) {
18510 } else if (dfn
== 0x3E) {
18511 /* CTCMSA, CFCMSA, MOVE.V */
18512 gen_msa_elm_3e(env
, ctx
);
18515 generate_exception_end(ctx
, EXCP_RI
);
18519 gen_msa_elm_df(env
, ctx
, df
, n
);
18522 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18524 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18525 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18526 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18527 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18528 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18530 TCGv_i32 twd
= tcg_const_i32(wd
);
18531 TCGv_i32 tws
= tcg_const_i32(ws
);
18532 TCGv_i32 twt
= tcg_const_i32(wt
);
18533 TCGv_i32 tdf
= tcg_temp_new_i32();
18535 /* adjust df value for floating-point instruction */
18536 tcg_gen_movi_i32(tdf
, df
+ 2);
18538 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18540 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18543 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18546 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18552 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18558 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18561 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18564 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18567 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18576 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18579 tcg_gen_movi_i32(tdf
, df
+ 1);
18580 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18583 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18586 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18588 case OPC_MADD_Q_df
:
18589 tcg_gen_movi_i32(tdf
, df
+ 1);
18590 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18593 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18595 case OPC_MSUB_Q_df
:
18596 tcg_gen_movi_i32(tdf
, df
+ 1);
18597 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18600 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18603 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18606 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18612 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18618 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18621 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18624 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18627 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18630 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18633 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18636 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18638 case OPC_MULR_Q_df
:
18639 tcg_gen_movi_i32(tdf
, df
+ 1);
18640 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18643 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18645 case OPC_FMIN_A_df
:
18646 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18648 case OPC_MADDR_Q_df
:
18649 tcg_gen_movi_i32(tdf
, df
+ 1);
18650 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18653 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18656 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18658 case OPC_MSUBR_Q_df
:
18659 tcg_gen_movi_i32(tdf
, df
+ 1);
18660 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18663 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18665 case OPC_FMAX_A_df
:
18666 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18669 MIPS_INVAL("MSA instruction");
18670 generate_exception_end(ctx
, EXCP_RI
);
18674 tcg_temp_free_i32(twd
);
18675 tcg_temp_free_i32(tws
);
18676 tcg_temp_free_i32(twt
);
18677 tcg_temp_free_i32(tdf
);
18680 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18682 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18683 (op & (0x7 << 18)))
18684 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18685 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18686 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18687 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18688 TCGv_i32 twd
= tcg_const_i32(wd
);
18689 TCGv_i32 tws
= tcg_const_i32(ws
);
18690 TCGv_i32 twt
= tcg_const_i32(wt
);
18691 TCGv_i32 tdf
= tcg_const_i32(df
);
18693 switch (MASK_MSA_2R(ctx
->opcode
)) {
18695 #if !defined(TARGET_MIPS64)
18696 /* Double format valid only for MIPS64 */
18697 if (df
== DF_DOUBLE
) {
18698 generate_exception_end(ctx
, EXCP_RI
);
18702 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18705 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18708 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18711 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18714 MIPS_INVAL("MSA instruction");
18715 generate_exception_end(ctx
, EXCP_RI
);
18719 tcg_temp_free_i32(twd
);
18720 tcg_temp_free_i32(tws
);
18721 tcg_temp_free_i32(twt
);
18722 tcg_temp_free_i32(tdf
);
18725 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18727 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18728 (op & (0xf << 17)))
18729 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18730 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18731 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18732 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18733 TCGv_i32 twd
= tcg_const_i32(wd
);
18734 TCGv_i32 tws
= tcg_const_i32(ws
);
18735 TCGv_i32 twt
= tcg_const_i32(wt
);
18736 /* adjust df value for floating-point instruction */
18737 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18739 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18740 case OPC_FCLASS_df
:
18741 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18743 case OPC_FTRUNC_S_df
:
18744 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18746 case OPC_FTRUNC_U_df
:
18747 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18750 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18752 case OPC_FRSQRT_df
:
18753 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18756 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18759 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18762 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18764 case OPC_FEXUPL_df
:
18765 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18767 case OPC_FEXUPR_df
:
18768 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18771 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18774 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18776 case OPC_FTINT_S_df
:
18777 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18779 case OPC_FTINT_U_df
:
18780 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18782 case OPC_FFINT_S_df
:
18783 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18785 case OPC_FFINT_U_df
:
18786 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18790 tcg_temp_free_i32(twd
);
18791 tcg_temp_free_i32(tws
);
18792 tcg_temp_free_i32(twt
);
18793 tcg_temp_free_i32(tdf
);
18796 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18798 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18799 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18800 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18801 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18802 TCGv_i32 twd
= tcg_const_i32(wd
);
18803 TCGv_i32 tws
= tcg_const_i32(ws
);
18804 TCGv_i32 twt
= tcg_const_i32(wt
);
18806 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18808 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18811 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18814 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18817 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18820 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18823 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18826 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18829 MIPS_INVAL("MSA instruction");
18830 generate_exception_end(ctx
, EXCP_RI
);
18834 tcg_temp_free_i32(twd
);
18835 tcg_temp_free_i32(tws
);
18836 tcg_temp_free_i32(twt
);
18839 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18841 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18849 gen_msa_vec_v(env
, ctx
);
18852 gen_msa_2r(env
, ctx
);
18855 gen_msa_2rf(env
, ctx
);
18858 MIPS_INVAL("MSA instruction");
18859 generate_exception_end(ctx
, EXCP_RI
);
18864 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18866 uint32_t opcode
= ctx
->opcode
;
18867 check_insn(ctx
, ASE_MSA
);
18868 check_msa_access(ctx
);
18870 switch (MASK_MSA_MINOR(opcode
)) {
18871 case OPC_MSA_I8_00
:
18872 case OPC_MSA_I8_01
:
18873 case OPC_MSA_I8_02
:
18874 gen_msa_i8(env
, ctx
);
18876 case OPC_MSA_I5_06
:
18877 case OPC_MSA_I5_07
:
18878 gen_msa_i5(env
, ctx
);
18880 case OPC_MSA_BIT_09
:
18881 case OPC_MSA_BIT_0A
:
18882 gen_msa_bit(env
, ctx
);
18884 case OPC_MSA_3R_0D
:
18885 case OPC_MSA_3R_0E
:
18886 case OPC_MSA_3R_0F
:
18887 case OPC_MSA_3R_10
:
18888 case OPC_MSA_3R_11
:
18889 case OPC_MSA_3R_12
:
18890 case OPC_MSA_3R_13
:
18891 case OPC_MSA_3R_14
:
18892 case OPC_MSA_3R_15
:
18893 gen_msa_3r(env
, ctx
);
18896 gen_msa_elm(env
, ctx
);
18898 case OPC_MSA_3RF_1A
:
18899 case OPC_MSA_3RF_1B
:
18900 case OPC_MSA_3RF_1C
:
18901 gen_msa_3rf(env
, ctx
);
18904 gen_msa_vec(env
, ctx
);
18915 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18916 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18917 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18918 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18920 TCGv_i32 twd
= tcg_const_i32(wd
);
18921 TCGv taddr
= tcg_temp_new();
18922 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18924 switch (MASK_MSA_MINOR(opcode
)) {
18926 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18929 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18932 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18935 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18938 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18941 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18944 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18947 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18951 tcg_temp_free_i32(twd
);
18952 tcg_temp_free(taddr
);
18956 MIPS_INVAL("MSA instruction");
18957 generate_exception_end(ctx
, EXCP_RI
);
18963 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18966 int rs
, rt
, rd
, sa
;
18970 /* make sure instructions are on a word boundary */
18971 if (ctx
->pc
& 0x3) {
18972 env
->CP0_BadVAddr
= ctx
->pc
;
18973 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18977 /* Handle blikely not taken case */
18978 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18979 TCGLabel
*l1
= gen_new_label();
18981 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18982 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18983 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18987 op
= MASK_OP_MAJOR(ctx
->opcode
);
18988 rs
= (ctx
->opcode
>> 21) & 0x1f;
18989 rt
= (ctx
->opcode
>> 16) & 0x1f;
18990 rd
= (ctx
->opcode
>> 11) & 0x1f;
18991 sa
= (ctx
->opcode
>> 6) & 0x1f;
18992 imm
= (int16_t)ctx
->opcode
;
18995 decode_opc_special(env
, ctx
);
18998 decode_opc_special2_legacy(env
, ctx
);
19001 decode_opc_special3(env
, ctx
);
19004 op1
= MASK_REGIMM(ctx
->opcode
);
19006 case OPC_BLTZL
: /* REGIMM branches */
19010 check_insn(ctx
, ISA_MIPS2
);
19011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19015 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19021 /* OPC_NAL, OPC_BAL */
19022 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19024 generate_exception_end(ctx
, EXCP_RI
);
19027 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19030 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19032 check_insn(ctx
, ISA_MIPS2
);
19033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19034 gen_trap(ctx
, op1
, rs
, -1, imm
);
19037 check_insn(ctx
, ISA_MIPS32R6
);
19038 generate_exception_end(ctx
, EXCP_RI
);
19041 check_insn(ctx
, ISA_MIPS32R2
);
19042 /* Break the TB to be able to sync copied instructions
19044 ctx
->bstate
= BS_STOP
;
19046 case OPC_BPOSGE32
: /* MIPS DSP branch */
19047 #if defined(TARGET_MIPS64)
19051 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19053 #if defined(TARGET_MIPS64)
19055 check_insn(ctx
, ISA_MIPS32R6
);
19056 check_mips_64(ctx
);
19058 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19062 check_insn(ctx
, ISA_MIPS32R6
);
19063 check_mips_64(ctx
);
19065 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19069 default: /* Invalid */
19070 MIPS_INVAL("regimm");
19071 generate_exception_end(ctx
, EXCP_RI
);
19076 check_cp0_enabled(ctx
);
19077 op1
= MASK_CP0(ctx
->opcode
);
19085 #if defined(TARGET_MIPS64)
19089 #ifndef CONFIG_USER_ONLY
19090 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19091 #endif /* !CONFIG_USER_ONLY */
19093 case OPC_C0_FIRST
... OPC_C0_LAST
:
19094 #ifndef CONFIG_USER_ONLY
19095 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19096 #endif /* !CONFIG_USER_ONLY */
19099 #ifndef CONFIG_USER_ONLY
19102 TCGv t0
= tcg_temp_new();
19104 op2
= MASK_MFMC0(ctx
->opcode
);
19107 check_insn(ctx
, ASE_MT
);
19108 gen_helper_dmt(t0
);
19109 gen_store_gpr(t0
, rt
);
19112 check_insn(ctx
, ASE_MT
);
19113 gen_helper_emt(t0
);
19114 gen_store_gpr(t0
, rt
);
19117 check_insn(ctx
, ASE_MT
);
19118 gen_helper_dvpe(t0
, cpu_env
);
19119 gen_store_gpr(t0
, rt
);
19122 check_insn(ctx
, ASE_MT
);
19123 gen_helper_evpe(t0
, cpu_env
);
19124 gen_store_gpr(t0
, rt
);
19127 check_insn(ctx
, ISA_MIPS32R6
);
19129 gen_helper_dvp(t0
, cpu_env
);
19130 gen_store_gpr(t0
, rt
);
19134 check_insn(ctx
, ISA_MIPS32R6
);
19136 gen_helper_evp(t0
, cpu_env
);
19137 gen_store_gpr(t0
, rt
);
19141 check_insn(ctx
, ISA_MIPS32R2
);
19142 save_cpu_state(ctx
, 1);
19143 gen_helper_di(t0
, cpu_env
);
19144 gen_store_gpr(t0
, rt
);
19145 /* Stop translation as we may have switched
19146 the execution mode. */
19147 ctx
->bstate
= BS_STOP
;
19150 check_insn(ctx
, ISA_MIPS32R2
);
19151 save_cpu_state(ctx
, 1);
19152 gen_helper_ei(t0
, cpu_env
);
19153 gen_store_gpr(t0
, rt
);
19154 /* Stop translation as we may have switched
19155 the execution mode. */
19156 ctx
->bstate
= BS_STOP
;
19158 default: /* Invalid */
19159 MIPS_INVAL("mfmc0");
19160 generate_exception_end(ctx
, EXCP_RI
);
19165 #endif /* !CONFIG_USER_ONLY */
19168 check_insn(ctx
, ISA_MIPS32R2
);
19169 gen_load_srsgpr(rt
, rd
);
19172 check_insn(ctx
, ISA_MIPS32R2
);
19173 gen_store_srsgpr(rt
, rd
);
19177 generate_exception_end(ctx
, EXCP_RI
);
19181 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19182 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19183 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19184 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19187 /* Arithmetic with immediate opcode */
19188 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19192 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19194 case OPC_SLTI
: /* Set on less than with immediate opcode */
19196 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19198 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19199 case OPC_LUI
: /* OPC_AUI */
19202 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19204 case OPC_J
... OPC_JAL
: /* Jump */
19205 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19206 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19209 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19210 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19212 generate_exception_end(ctx
, EXCP_RI
);
19215 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19216 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19219 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19222 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19223 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19225 generate_exception_end(ctx
, EXCP_RI
);
19228 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19229 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19232 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19235 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19238 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19240 check_insn(ctx
, ISA_MIPS32R6
);
19241 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19242 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19245 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19248 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19250 check_insn(ctx
, ISA_MIPS32R6
);
19251 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19252 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19257 check_insn(ctx
, ISA_MIPS2
);
19258 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19262 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19264 case OPC_LL
: /* Load and stores */
19265 check_insn(ctx
, ISA_MIPS2
);
19269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19271 case OPC_LB
... OPC_LH
:
19272 case OPC_LW
... OPC_LHU
:
19273 gen_ld(ctx
, op
, rt
, rs
, imm
);
19277 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19279 case OPC_SB
... OPC_SH
:
19281 gen_st(ctx
, op
, rt
, rs
, imm
);
19284 check_insn(ctx
, ISA_MIPS2
);
19285 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19286 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19290 check_cp0_enabled(ctx
);
19291 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19292 /* Treat as NOP. */
19295 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19296 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19297 /* Treat as NOP. */
19300 /* Floating point (COP1). */
19305 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19309 op1
= MASK_CP1(ctx
->opcode
);
19314 check_cp1_enabled(ctx
);
19315 check_insn(ctx
, ISA_MIPS32R2
);
19320 check_cp1_enabled(ctx
);
19321 gen_cp1(ctx
, op1
, rt
, rd
);
19323 #if defined(TARGET_MIPS64)
19326 check_cp1_enabled(ctx
);
19327 check_insn(ctx
, ISA_MIPS3
);
19328 check_mips_64(ctx
);
19329 gen_cp1(ctx
, op1
, rt
, rd
);
19332 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19333 check_cp1_enabled(ctx
);
19334 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19336 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19341 check_insn(ctx
, ASE_MIPS3D
);
19342 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19343 (rt
>> 2) & 0x7, imm
<< 2);
19347 check_cp1_enabled(ctx
);
19348 check_insn(ctx
, ISA_MIPS32R6
);
19349 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19353 check_cp1_enabled(ctx
);
19354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19356 check_insn(ctx
, ASE_MIPS3D
);
19359 check_cp1_enabled(ctx
);
19360 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19361 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19362 (rt
>> 2) & 0x7, imm
<< 2);
19369 check_cp1_enabled(ctx
);
19370 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19376 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19377 check_cp1_enabled(ctx
);
19378 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19380 case R6_OPC_CMP_AF_S
:
19381 case R6_OPC_CMP_UN_S
:
19382 case R6_OPC_CMP_EQ_S
:
19383 case R6_OPC_CMP_UEQ_S
:
19384 case R6_OPC_CMP_LT_S
:
19385 case R6_OPC_CMP_ULT_S
:
19386 case R6_OPC_CMP_LE_S
:
19387 case R6_OPC_CMP_ULE_S
:
19388 case R6_OPC_CMP_SAF_S
:
19389 case R6_OPC_CMP_SUN_S
:
19390 case R6_OPC_CMP_SEQ_S
:
19391 case R6_OPC_CMP_SEUQ_S
:
19392 case R6_OPC_CMP_SLT_S
:
19393 case R6_OPC_CMP_SULT_S
:
19394 case R6_OPC_CMP_SLE_S
:
19395 case R6_OPC_CMP_SULE_S
:
19396 case R6_OPC_CMP_OR_S
:
19397 case R6_OPC_CMP_UNE_S
:
19398 case R6_OPC_CMP_NE_S
:
19399 case R6_OPC_CMP_SOR_S
:
19400 case R6_OPC_CMP_SUNE_S
:
19401 case R6_OPC_CMP_SNE_S
:
19402 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19404 case R6_OPC_CMP_AF_D
:
19405 case R6_OPC_CMP_UN_D
:
19406 case R6_OPC_CMP_EQ_D
:
19407 case R6_OPC_CMP_UEQ_D
:
19408 case R6_OPC_CMP_LT_D
:
19409 case R6_OPC_CMP_ULT_D
:
19410 case R6_OPC_CMP_LE_D
:
19411 case R6_OPC_CMP_ULE_D
:
19412 case R6_OPC_CMP_SAF_D
:
19413 case R6_OPC_CMP_SUN_D
:
19414 case R6_OPC_CMP_SEQ_D
:
19415 case R6_OPC_CMP_SEUQ_D
:
19416 case R6_OPC_CMP_SLT_D
:
19417 case R6_OPC_CMP_SULT_D
:
19418 case R6_OPC_CMP_SLE_D
:
19419 case R6_OPC_CMP_SULE_D
:
19420 case R6_OPC_CMP_OR_D
:
19421 case R6_OPC_CMP_UNE_D
:
19422 case R6_OPC_CMP_NE_D
:
19423 case R6_OPC_CMP_SOR_D
:
19424 case R6_OPC_CMP_SUNE_D
:
19425 case R6_OPC_CMP_SNE_D
:
19426 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19429 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19430 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19435 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19450 check_insn(ctx
, ASE_MSA
);
19451 gen_msa_branch(env
, ctx
, op1
);
19455 generate_exception_end(ctx
, EXCP_RI
);
19460 /* Compact branches [R6] and COP2 [non-R6] */
19461 case OPC_BC
: /* OPC_LWC2 */
19462 case OPC_BALC
: /* OPC_SWC2 */
19463 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19464 /* OPC_BC, OPC_BALC */
19465 gen_compute_compact_branch(ctx
, op
, 0, 0,
19466 sextract32(ctx
->opcode
<< 2, 0, 28));
19468 /* OPC_LWC2, OPC_SWC2 */
19469 /* COP2: Not implemented. */
19470 generate_exception_err(ctx
, EXCP_CpU
, 2);
19473 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19474 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19475 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19477 /* OPC_BEQZC, OPC_BNEZC */
19478 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19479 sextract32(ctx
->opcode
<< 2, 0, 23));
19481 /* OPC_JIC, OPC_JIALC */
19482 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19485 /* OPC_LWC2, OPC_SWC2 */
19486 /* COP2: Not implemented. */
19487 generate_exception_err(ctx
, EXCP_CpU
, 2);
19491 check_insn(ctx
, INSN_LOONGSON2F
);
19492 /* Note that these instructions use different fields. */
19493 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19498 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19499 check_cp1_enabled(ctx
);
19500 op1
= MASK_CP3(ctx
->opcode
);
19504 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19510 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19511 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19514 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19515 /* Treat as NOP. */
19518 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19532 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19533 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19537 generate_exception_end(ctx
, EXCP_RI
);
19541 generate_exception_err(ctx
, EXCP_CpU
, 1);
19545 #if defined(TARGET_MIPS64)
19546 /* MIPS64 opcodes */
19547 case OPC_LDL
... OPC_LDR
:
19549 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19553 check_insn(ctx
, ISA_MIPS3
);
19554 check_mips_64(ctx
);
19555 gen_ld(ctx
, op
, rt
, rs
, imm
);
19557 case OPC_SDL
... OPC_SDR
:
19558 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19561 check_insn(ctx
, ISA_MIPS3
);
19562 check_mips_64(ctx
);
19563 gen_st(ctx
, op
, rt
, rs
, imm
);
19566 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19567 check_insn(ctx
, ISA_MIPS3
);
19568 check_mips_64(ctx
);
19569 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19571 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19572 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19573 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19574 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19577 check_insn(ctx
, ISA_MIPS3
);
19578 check_mips_64(ctx
);
19579 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19583 check_insn(ctx
, ISA_MIPS3
);
19584 check_mips_64(ctx
);
19585 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19588 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19590 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19592 MIPS_INVAL("major opcode");
19593 generate_exception_end(ctx
, EXCP_RI
);
19597 case OPC_DAUI
: /* OPC_JALX */
19598 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19599 #if defined(TARGET_MIPS64)
19601 check_mips_64(ctx
);
19603 generate_exception(ctx
, EXCP_RI
);
19604 } else if (rt
!= 0) {
19605 TCGv t0
= tcg_temp_new();
19606 gen_load_gpr(t0
, rs
);
19607 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19611 generate_exception_end(ctx
, EXCP_RI
);
19612 MIPS_INVAL("major opcode");
19616 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19617 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19618 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19621 case OPC_MSA
: /* OPC_MDMX */
19622 /* MDMX: Not implemented. */
19626 check_insn(ctx
, ISA_MIPS32R6
);
19627 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19629 default: /* Invalid */
19630 MIPS_INVAL("major opcode");
19631 generate_exception_end(ctx
, EXCP_RI
);
19636 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19638 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19639 CPUState
*cs
= CPU(cpu
);
19641 target_ulong pc_start
;
19642 target_ulong next_page_start
;
19649 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19652 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19653 ctx
.insn_flags
= env
->insn_flags
;
19654 ctx
.CP0_Config1
= env
->CP0_Config1
;
19656 ctx
.bstate
= BS_NONE
;
19658 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19659 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19660 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19661 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19662 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19663 ctx
.PAMask
= env
->PAMask
;
19664 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19665 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19666 /* Restore delay slot state from the tb context. */
19667 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19668 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19669 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19670 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19671 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19672 restore_cpu_state(env
, &ctx
);
19673 #ifdef CONFIG_USER_ONLY
19674 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19676 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19678 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19679 MO_UNALN
: MO_ALIGN
;
19681 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19682 if (max_insns
== 0) {
19683 max_insns
= CF_COUNT_MASK
;
19685 if (max_insns
> TCG_MAX_INSNS
) {
19686 max_insns
= TCG_MAX_INSNS
;
19689 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19691 while (ctx
.bstate
== BS_NONE
) {
19692 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19695 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19696 save_cpu_state(&ctx
, 1);
19697 ctx
.bstate
= BS_BRANCH
;
19698 gen_helper_raise_exception_debug(cpu_env
);
19699 /* The address covered by the breakpoint must be included in
19700 [tb->pc, tb->pc + tb->size) in order to for it to be
19701 properly cleared -- thus we increment the PC here so that
19702 the logic setting tb->size below does the right thing. */
19704 goto done_generating
;
19707 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19711 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19712 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19713 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19715 decode_opc(env
, &ctx
);
19716 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19717 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19718 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19719 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19720 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19721 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19723 generate_exception_end(&ctx
, EXCP_RI
);
19727 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19728 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19729 MIPS_HFLAG_FBNSLOT
))) {
19730 /* force to generate branch as there is neither delay nor
19734 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19735 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19736 /* Force to generate branch as microMIPS R6 doesn't restrict
19737 branches in the forbidden slot. */
19742 gen_branch(&ctx
, insn_bytes
);
19744 ctx
.pc
+= insn_bytes
;
19746 /* Execute a branch and its delay slot as a single instruction.
19747 This is what GDB expects and is consistent with what the
19748 hardware does (e.g. if a delay slot instruction faults, the
19749 reported PC is the PC of the branch). */
19750 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19754 if (ctx
.pc
>= next_page_start
) {
19758 if (tcg_op_buf_full()) {
19762 if (num_insns
>= max_insns
)
19768 if (tb
->cflags
& CF_LAST_IO
) {
19771 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19772 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19773 gen_helper_raise_exception_debug(cpu_env
);
19775 switch (ctx
.bstate
) {
19777 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19780 save_cpu_state(&ctx
, 0);
19781 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19784 tcg_gen_exit_tb(0);
19792 gen_tb_end(tb
, num_insns
);
19794 tb
->size
= ctx
.pc
- pc_start
;
19795 tb
->icount
= num_insns
;
19799 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19800 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19801 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19807 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19811 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19813 #define printfpr(fp) \
19816 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19817 " fd:%13g fs:%13g psu: %13g\n", \
19818 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19819 (double)(fp)->fd, \
19820 (double)(fp)->fs[FP_ENDIAN_IDX], \
19821 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19824 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19825 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19826 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19827 " fd:%13g fs:%13g psu:%13g\n", \
19828 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19830 (double)tmp.fs[FP_ENDIAN_IDX], \
19831 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19836 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19837 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19838 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19839 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19840 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19841 printfpr(&env
->active_fpu
.fpr
[i
]);
19847 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19850 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19851 CPUMIPSState
*env
= &cpu
->env
;
19854 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19855 " LO=0x" TARGET_FMT_lx
" ds %04x "
19856 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19857 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19858 env
->hflags
, env
->btarget
, env
->bcond
);
19859 for (i
= 0; i
< 32; i
++) {
19861 cpu_fprintf(f
, "GPR%02d:", i
);
19862 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19864 cpu_fprintf(f
, "\n");
19867 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19868 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19869 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19871 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19872 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19873 env
->CP0_Config2
, env
->CP0_Config3
);
19874 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19875 env
->CP0_Config4
, env
->CP0_Config5
);
19876 if (env
->hflags
& MIPS_HFLAG_FPU
)
19877 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19880 void mips_tcg_init(void)
19885 /* Initialize various static tables. */
19889 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19891 TCGV_UNUSED(cpu_gpr
[0]);
19892 for (i
= 1; i
< 32; i
++)
19893 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
19894 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19897 for (i
= 0; i
< 32; i
++) {
19898 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19900 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
19901 /* The scalar floating-point unit (FPU) registers are mapped on
19902 * the MSA vector registers. */
19903 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19904 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19905 msa_wr_d
[i
* 2 + 1] =
19906 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
19909 cpu_PC
= tcg_global_mem_new(cpu_env
,
19910 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19911 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19912 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
19913 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19915 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
19916 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19919 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
19920 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19922 bcond
= tcg_global_mem_new(cpu_env
,
19923 offsetof(CPUMIPSState
, bcond
), "bcond");
19924 btarget
= tcg_global_mem_new(cpu_env
,
19925 offsetof(CPUMIPSState
, btarget
), "btarget");
19926 hflags
= tcg_global_mem_new_i32(cpu_env
,
19927 offsetof(CPUMIPSState
, hflags
), "hflags");
19929 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
19930 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19932 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
19933 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19939 #include "translate_init.c"
19941 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19945 const mips_def_t
*def
;
19947 def
= cpu_mips_find_by_name(cpu_model
);
19950 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19952 env
->cpu_model
= def
;
19954 #ifndef CONFIG_USER_ONLY
19955 mmu_init(env
, def
);
19957 fpu_init(env
, def
);
19958 mvp_init(env
, def
);
19960 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19965 void cpu_state_reset(CPUMIPSState
*env
)
19967 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19968 CPUState
*cs
= CPU(cpu
);
19970 /* Reset registers to their default values */
19971 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19972 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19973 #ifdef TARGET_WORDS_BIGENDIAN
19974 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19976 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19977 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19978 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19979 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19980 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19981 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19982 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19983 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19984 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19985 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19986 << env
->cpu_model
->CP0_LLAddr_shift
;
19987 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19988 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19989 env
->CCRes
= env
->cpu_model
->CCRes
;
19990 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19991 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19992 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19993 env
->current_tc
= 0;
19994 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19995 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19996 #if defined(TARGET_MIPS64)
19997 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19998 env
->SEGMask
|= 3ULL << 62;
20001 env
->PABITS
= env
->cpu_model
->PABITS
;
20002 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20003 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20004 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20005 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20006 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20007 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20008 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20009 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20010 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20011 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20012 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20013 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20014 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20015 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20016 env
->msair
= env
->cpu_model
->MSAIR
;
20017 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20019 #if defined(CONFIG_USER_ONLY)
20020 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20021 # ifdef TARGET_MIPS64
20022 /* Enable 64-bit register mode. */
20023 env
->CP0_Status
|= (1 << CP0St_PX
);
20025 # ifdef TARGET_ABI_MIPSN64
20026 /* Enable 64-bit address mode. */
20027 env
->CP0_Status
|= (1 << CP0St_UX
);
20029 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20030 hardware registers. */
20031 env
->CP0_HWREna
|= 0x0000000F;
20032 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20033 env
->CP0_Status
|= (1 << CP0St_CU1
);
20035 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20036 env
->CP0_Status
|= (1 << CP0St_MX
);
20038 # if defined(TARGET_MIPS64)
20039 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20040 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20041 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20042 env
->CP0_Status
|= (1 << CP0St_FR
);
20046 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20047 /* If the exception was raised from a delay slot,
20048 come back to the jump. */
20049 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20050 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20052 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20054 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20055 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20056 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20057 env
->CP0_Wired
= 0;
20058 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20059 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20060 if (kvm_enabled()) {
20061 env
->CP0_EBase
|= 0x40000000;
20063 env
->CP0_EBase
|= 0x80000000;
20065 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20066 /* vectored interrupts not implemented, timer on int 7,
20067 no performance counters. */
20068 env
->CP0_IntCtl
= 0xe0000000;
20072 for (i
= 0; i
< 7; i
++) {
20073 env
->CP0_WatchLo
[i
] = 0;
20074 env
->CP0_WatchHi
[i
] = 0x80000000;
20076 env
->CP0_WatchLo
[7] = 0;
20077 env
->CP0_WatchHi
[7] = 0;
20079 /* Count register increments in debug mode, EJTAG version 1 */
20080 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20082 cpu_mips_store_count(env
, 1);
20084 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20087 /* Only TC0 on VPE 0 starts as active. */
20088 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20089 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20090 env
->tcs
[i
].CP0_TCHalt
= 1;
20092 env
->active_tc
.CP0_TCHalt
= 1;
20095 if (cs
->cpu_index
== 0) {
20096 /* VPE0 starts up enabled. */
20097 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20098 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20100 /* TC0 starts up unhalted. */
20102 env
->active_tc
.CP0_TCHalt
= 0;
20103 env
->tcs
[0].CP0_TCHalt
= 0;
20104 /* With thread 0 active. */
20105 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20106 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20110 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20111 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20112 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20113 env
->CP0_Status
|= (1 << CP0St_FR
);
20117 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20121 compute_hflags(env
);
20122 restore_rounding_mode(env
);
20123 restore_flush_mode(env
);
20124 restore_pamask(env
);
20125 cs
->exception_index
= EXCP_NONE
;
20127 if (semihosting_get_argc()) {
20128 /* UHI interface can be used to obtain argc and argv */
20129 env
->active_tc
.gpr
[4] = -1;
20133 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20134 target_ulong
*data
)
20136 env
->active_tc
.PC
= data
[0];
20137 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20138 env
->hflags
|= data
[1];
20139 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20140 case MIPS_HFLAG_BR
:
20142 case MIPS_HFLAG_BC
:
20143 case MIPS_HFLAG_BL
:
20145 env
->btarget
= data
[2];