2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
899 /* Coprocessor 0 (with rs == C0) */
900 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
903 OPC_TLBR
= 0x01 | OPC_C0
,
904 OPC_TLBWI
= 0x02 | OPC_C0
,
905 OPC_TLBINV
= 0x03 | OPC_C0
,
906 OPC_TLBINVF
= 0x04 | OPC_C0
,
907 OPC_TLBWR
= 0x06 | OPC_C0
,
908 OPC_TLBP
= 0x08 | OPC_C0
,
909 OPC_RFE
= 0x10 | OPC_C0
,
910 OPC_ERET
= 0x18 | OPC_C0
,
911 OPC_DERET
= 0x1F | OPC_C0
,
912 OPC_WAIT
= 0x20 | OPC_C0
,
915 /* Coprocessor 1 (rs field) */
916 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
918 /* Values for the fmt field in FP instructions */
920 /* 0 - 15 are reserved */
921 FMT_S
= 16, /* single fp */
922 FMT_D
= 17, /* double fp */
923 FMT_E
= 18, /* extended fp */
924 FMT_Q
= 19, /* quad fp */
925 FMT_W
= 20, /* 32-bit fixed */
926 FMT_L
= 21, /* 64-bit fixed */
927 FMT_PS
= 22, /* paired single fp */
928 /* 23 - 31 are reserved */
932 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
933 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
934 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
935 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
936 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
937 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
938 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
939 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
940 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
941 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
942 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
943 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
944 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
945 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
946 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
947 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
948 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
949 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
950 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
951 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
952 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
953 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
954 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
955 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
956 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
957 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
958 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
959 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
960 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
961 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
964 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
965 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
968 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
969 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
970 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
971 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
975 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
976 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
980 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
981 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
984 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
987 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
988 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
989 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
990 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
991 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
992 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
993 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
994 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
995 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
996 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
997 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1000 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1003 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1022 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1023 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1025 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1026 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1027 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1028 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1097 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1100 OPC_LWXC1
= 0x00 | OPC_CP3
,
1101 OPC_LDXC1
= 0x01 | OPC_CP3
,
1102 OPC_LUXC1
= 0x05 | OPC_CP3
,
1103 OPC_SWXC1
= 0x08 | OPC_CP3
,
1104 OPC_SDXC1
= 0x09 | OPC_CP3
,
1105 OPC_SUXC1
= 0x0D | OPC_CP3
,
1106 OPC_PREFX
= 0x0F | OPC_CP3
,
1107 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1108 OPC_MADD_S
= 0x20 | OPC_CP3
,
1109 OPC_MADD_D
= 0x21 | OPC_CP3
,
1110 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1111 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1112 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1113 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1114 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1115 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1116 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1117 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1118 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1119 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1123 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1125 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1126 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1127 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1128 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1129 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1130 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1131 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1132 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1133 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1134 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1135 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1136 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1137 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1138 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1139 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1140 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1141 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1142 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1143 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1144 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1145 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1147 /* MI10 instruction */
1148 OPC_LD_B
= (0x20) | OPC_MSA
,
1149 OPC_LD_H
= (0x21) | OPC_MSA
,
1150 OPC_LD_W
= (0x22) | OPC_MSA
,
1151 OPC_LD_D
= (0x23) | OPC_MSA
,
1152 OPC_ST_B
= (0x24) | OPC_MSA
,
1153 OPC_ST_H
= (0x25) | OPC_MSA
,
1154 OPC_ST_W
= (0x26) | OPC_MSA
,
1155 OPC_ST_D
= (0x27) | OPC_MSA
,
1159 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1160 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1161 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1162 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1163 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1164 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1165 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1167 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1171 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1173 /* I8 instruction */
1174 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1175 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1176 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1177 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1178 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1180 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1181 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1182 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1183 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1185 /* VEC/2R/2RF instruction */
1186 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1187 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1188 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1189 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1190 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1191 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1192 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1194 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1195 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1197 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1198 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1199 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1200 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1201 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1203 /* 2RF instruction df(bit 16) = _w, _d */
1204 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1205 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1206 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1207 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1208 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1209 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1210 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1211 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1212 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1213 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1214 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1215 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1216 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1217 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1218 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1219 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1221 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1222 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1223 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1224 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1225 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1226 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1227 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1228 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1229 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1230 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1231 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1232 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1233 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1234 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1235 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1236 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1237 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1238 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1239 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1240 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1241 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1242 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1243 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1244 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1245 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1246 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1247 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1248 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1249 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1250 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1251 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1252 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1253 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1254 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1255 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1256 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1257 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1258 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1259 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1260 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1261 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1262 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1263 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1264 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1265 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1266 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1267 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1268 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1269 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1270 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1271 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1272 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1273 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1274 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1275 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1276 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1277 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1278 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1279 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1280 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1281 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1282 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1283 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1284 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1286 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1287 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1288 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1289 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 /* 3RF instruction _df(bit 21) = _w, _d */
1298 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1299 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1300 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1303 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1314 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1319 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1323 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1325 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1330 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1340 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1341 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1342 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1343 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1350 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1351 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1355 /* global register indices */
1356 static TCGv_ptr cpu_env
;
1357 static TCGv cpu_gpr
[32], cpu_PC
;
1358 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1359 static TCGv cpu_dspctrl
, btarget
, bcond
;
1360 static TCGv_i32 hflags
;
1361 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1362 static TCGv_i64 fpu_f64
[32];
1363 static TCGv_i64 msa_wr_d
[64];
1365 #include "exec/gen-icount.h"
1367 #define gen_helper_0e0i(name, arg) do { \
1368 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1369 gen_helper_##name(cpu_env, helper_tmp); \
1370 tcg_temp_free_i32(helper_tmp); \
1373 #define gen_helper_0e1i(name, arg1, arg2) do { \
1374 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1375 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1376 tcg_temp_free_i32(helper_tmp); \
1379 #define gen_helper_1e0i(name, ret, arg1) do { \
1380 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1381 gen_helper_##name(ret, cpu_env, helper_tmp); \
1382 tcg_temp_free_i32(helper_tmp); \
1385 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1386 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1387 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1388 tcg_temp_free_i32(helper_tmp); \
1391 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1392 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1393 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1394 tcg_temp_free_i32(helper_tmp); \
1397 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1398 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1399 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1400 tcg_temp_free_i32(helper_tmp); \
1403 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1404 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1405 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1406 tcg_temp_free_i32(helper_tmp); \
1409 typedef struct DisasContext
{
1410 struct TranslationBlock
*tb
;
1411 target_ulong pc
, saved_pc
;
1413 int singlestep_enabled
;
1415 int32_t CP0_Config1
;
1416 /* Routine used to access memory */
1418 TCGMemOp default_tcg_memop_mask
;
1419 uint32_t hflags
, saved_hflags
;
1421 target_ulong btarget
;
1430 int CP0_LLAddr_shift
;
1435 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1436 * exception condition */
1437 BS_STOP
= 1, /* We want to stop translation for any reason */
1438 BS_BRANCH
= 2, /* We reached a branch condition */
1439 BS_EXCP
= 3, /* We reached an exception condition */
1442 static const char * const regnames
[] = {
1443 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1444 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1445 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1446 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1449 static const char * const regnames_HI
[] = {
1450 "HI0", "HI1", "HI2", "HI3",
1453 static const char * const regnames_LO
[] = {
1454 "LO0", "LO1", "LO2", "LO3",
1457 static const char * const fregnames
[] = {
1458 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1459 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1460 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1461 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1464 static const char * const msaregnames
[] = {
1465 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1466 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1467 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1468 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1469 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1470 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1471 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1472 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1473 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1474 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1475 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1476 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1477 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1478 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1479 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1480 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1483 #define LOG_DISAS(...) \
1485 if (MIPS_DEBUG_DISAS) { \
1486 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1490 #define MIPS_INVAL(op) \
1492 if (MIPS_DEBUG_DISAS) { \
1493 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1494 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1495 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1496 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1500 /* General purpose registers moves. */
1501 static inline void gen_load_gpr (TCGv t
, int reg
)
1504 tcg_gen_movi_tl(t
, 0);
1506 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1509 static inline void gen_store_gpr (TCGv t
, int reg
)
1512 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1515 /* Moves to/from shadow registers. */
1516 static inline void gen_load_srsgpr (int from
, int to
)
1518 TCGv t0
= tcg_temp_new();
1521 tcg_gen_movi_tl(t0
, 0);
1523 TCGv_i32 t2
= tcg_temp_new_i32();
1524 TCGv_ptr addr
= tcg_temp_new_ptr();
1526 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1527 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1528 tcg_gen_andi_i32(t2
, t2
, 0xf);
1529 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1530 tcg_gen_ext_i32_ptr(addr
, t2
);
1531 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1533 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1534 tcg_temp_free_ptr(addr
);
1535 tcg_temp_free_i32(t2
);
1537 gen_store_gpr(t0
, to
);
1541 static inline void gen_store_srsgpr (int from
, int to
)
1544 TCGv t0
= tcg_temp_new();
1545 TCGv_i32 t2
= tcg_temp_new_i32();
1546 TCGv_ptr addr
= tcg_temp_new_ptr();
1548 gen_load_gpr(t0
, from
);
1549 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1550 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1551 tcg_gen_andi_i32(t2
, t2
, 0xf);
1552 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1553 tcg_gen_ext_i32_ptr(addr
, t2
);
1554 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1556 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1557 tcg_temp_free_ptr(addr
);
1558 tcg_temp_free_i32(t2
);
1564 static inline void gen_save_pc(target_ulong pc
)
1566 tcg_gen_movi_tl(cpu_PC
, pc
);
1569 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1571 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1572 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1573 gen_save_pc(ctx
->pc
);
1574 ctx
->saved_pc
= ctx
->pc
;
1576 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1577 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1578 ctx
->saved_hflags
= ctx
->hflags
;
1579 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1585 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1591 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1593 ctx
->saved_hflags
= ctx
->hflags
;
1594 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1600 ctx
->btarget
= env
->btarget
;
1605 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1607 TCGv_i32 texcp
= tcg_const_i32(excp
);
1608 TCGv_i32 terr
= tcg_const_i32(err
);
1609 save_cpu_state(ctx
, 1);
1610 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1611 tcg_temp_free_i32(terr
);
1612 tcg_temp_free_i32(texcp
);
1613 ctx
->bstate
= BS_EXCP
;
1616 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1618 gen_helper_0e0i(raise_exception
, excp
);
1621 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1623 generate_exception_err(ctx
, excp
, 0);
1626 /* Floating point register moves. */
1627 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1629 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1630 generate_exception(ctx
, EXCP_RI
);
1632 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1635 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1638 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1639 generate_exception(ctx
, EXCP_RI
);
1641 t64
= tcg_temp_new_i64();
1642 tcg_gen_extu_i32_i64(t64
, t
);
1643 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1644 tcg_temp_free_i64(t64
);
1647 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1649 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1650 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1652 gen_load_fpr32(ctx
, t
, reg
| 1);
1656 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1658 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1659 TCGv_i64 t64
= tcg_temp_new_i64();
1660 tcg_gen_extu_i32_i64(t64
, t
);
1661 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1662 tcg_temp_free_i64(t64
);
1664 gen_store_fpr32(ctx
, t
, reg
| 1);
1668 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1670 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1671 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1673 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1677 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1679 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1680 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1683 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1684 t0
= tcg_temp_new_i64();
1685 tcg_gen_shri_i64(t0
, t
, 32);
1686 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1687 tcg_temp_free_i64(t0
);
1691 static inline int get_fp_bit (int cc
)
1699 /* Addresses computation */
1700 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1702 tcg_gen_add_tl(ret
, arg0
, arg1
);
1704 #if defined(TARGET_MIPS64)
1705 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1706 tcg_gen_ext32s_i64(ret
, ret
);
1711 /* Addresses computation (translation time) */
1712 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1715 target_long sum
= base
+ offset
;
1717 #if defined(TARGET_MIPS64)
1718 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1725 /* Sign-extract the low 32-bits to a target_long. */
1726 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1728 #if defined(TARGET_MIPS64)
1729 tcg_gen_ext32s_i64(ret
, arg
);
1731 tcg_gen_extrl_i64_i32(ret
, arg
);
1735 /* Sign-extract the high 32-bits to a target_long. */
1736 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1738 #if defined(TARGET_MIPS64)
1739 tcg_gen_sari_i64(ret
, arg
, 32);
1741 tcg_gen_extrh_i64_i32(ret
, arg
);
1745 static inline void check_cp0_enabled(DisasContext
*ctx
)
1747 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1748 generate_exception_err(ctx
, EXCP_CpU
, 0);
1751 static inline void check_cp1_enabled(DisasContext
*ctx
)
1753 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1754 generate_exception_err(ctx
, EXCP_CpU
, 1);
1757 /* Verify that the processor is running with COP1X instructions enabled.
1758 This is associated with the nabla symbol in the MIPS32 and MIPS64
1761 static inline void check_cop1x(DisasContext
*ctx
)
1763 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1764 generate_exception_end(ctx
, EXCP_RI
);
1767 /* Verify that the processor is running with 64-bit floating-point
1768 operations enabled. */
1770 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1772 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1773 generate_exception_end(ctx
, EXCP_RI
);
1777 * Verify if floating point register is valid; an operation is not defined
1778 * if bit 0 of any register specification is set and the FR bit in the
1779 * Status register equals zero, since the register numbers specify an
1780 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1781 * in the Status register equals one, both even and odd register numbers
1782 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1784 * Multiple 64 bit wide registers can be checked by calling
1785 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1787 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1789 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1790 generate_exception_end(ctx
, EXCP_RI
);
1793 /* Verify that the processor is running with DSP instructions enabled.
1794 This is enabled by CP0 Status register MX(24) bit.
1797 static inline void check_dsp(DisasContext
*ctx
)
1799 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1800 if (ctx
->insn_flags
& ASE_DSP
) {
1801 generate_exception_end(ctx
, EXCP_DSPDIS
);
1803 generate_exception_end(ctx
, EXCP_RI
);
1808 static inline void check_dspr2(DisasContext
*ctx
)
1810 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1811 if (ctx
->insn_flags
& ASE_DSP
) {
1812 generate_exception_end(ctx
, EXCP_DSPDIS
);
1814 generate_exception_end(ctx
, EXCP_RI
);
1819 /* This code generates a "reserved instruction" exception if the
1820 CPU does not support the instruction set corresponding to flags. */
1821 static inline void check_insn(DisasContext
*ctx
, int flags
)
1823 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1824 generate_exception_end(ctx
, EXCP_RI
);
1828 /* This code generates a "reserved instruction" exception if the
1829 CPU has corresponding flag set which indicates that the instruction
1830 has been removed. */
1831 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1833 if (unlikely(ctx
->insn_flags
& flags
)) {
1834 generate_exception_end(ctx
, EXCP_RI
);
1838 /* This code generates a "reserved instruction" exception if the
1839 CPU does not support 64-bit paired-single (PS) floating point data type */
1840 static inline void check_ps(DisasContext
*ctx
)
1842 if (unlikely(!ctx
->ps
)) {
1843 generate_exception(ctx
, EXCP_RI
);
1845 check_cp1_64bitmode(ctx
);
1848 #ifdef TARGET_MIPS64
1849 /* This code generates a "reserved instruction" exception if 64-bit
1850 instructions are not enabled. */
1851 static inline void check_mips_64(DisasContext
*ctx
)
1853 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1854 generate_exception_end(ctx
, EXCP_RI
);
1858 #ifndef CONFIG_USER_ONLY
1859 static inline void check_mvh(DisasContext
*ctx
)
1861 if (unlikely(!ctx
->mvh
)) {
1862 generate_exception(ctx
, EXCP_RI
);
1867 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1868 calling interface for 32 and 64-bit FPRs. No sense in changing
1869 all callers for gen_load_fpr32 when we need the CTX parameter for
1871 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1872 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1873 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1874 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1875 int ft, int fs, int cc) \
1877 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1878 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1887 check_cp1_registers(ctx, fs | ft); \
1895 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1896 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1898 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1899 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1900 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1901 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1902 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1903 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1904 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1905 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1906 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1907 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1908 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1909 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1910 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1911 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1912 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1913 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1916 tcg_temp_free_i##bits (fp0); \
1917 tcg_temp_free_i##bits (fp1); \
1920 FOP_CONDS(, 0, d
, FMT_D
, 64)
1921 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1922 FOP_CONDS(, 0, s
, FMT_S
, 32)
1923 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1924 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1925 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1928 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1929 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1930 int ft, int fs, int fd) \
1932 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1933 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1934 if (ifmt == FMT_D) { \
1935 check_cp1_registers(ctx, fs | ft | fd); \
1937 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1938 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1941 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1944 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1947 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1950 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2010 tcg_temp_free_i ## bits (fp0); \
2011 tcg_temp_free_i ## bits (fp1); \
2014 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2015 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2017 #undef gen_ldcmp_fpr32
2018 #undef gen_ldcmp_fpr64
2020 /* load/store instructions. */
2021 #ifdef CONFIG_USER_ONLY
2022 #define OP_LD_ATOMIC(insn,fname) \
2023 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2025 TCGv t0 = tcg_temp_new(); \
2026 tcg_gen_mov_tl(t0, arg1); \
2027 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2028 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2029 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2030 tcg_temp_free(t0); \
2033 #define OP_LD_ATOMIC(insn,fname) \
2034 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2036 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2039 OP_LD_ATOMIC(ll
,ld32s
);
2040 #if defined(TARGET_MIPS64)
2041 OP_LD_ATOMIC(lld
,ld64
);
2045 #ifdef CONFIG_USER_ONLY
2046 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2047 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2049 TCGv t0 = tcg_temp_new(); \
2050 TCGLabel *l1 = gen_new_label(); \
2051 TCGLabel *l2 = gen_new_label(); \
2053 tcg_gen_andi_tl(t0, arg2, almask); \
2054 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2055 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2056 generate_exception(ctx, EXCP_AdES); \
2057 gen_set_label(l1); \
2058 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2059 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2060 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2061 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2062 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2063 generate_exception_end(ctx, EXCP_SC); \
2064 gen_set_label(l2); \
2065 tcg_gen_movi_tl(t0, 0); \
2066 gen_store_gpr(t0, rt); \
2067 tcg_temp_free(t0); \
2070 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2071 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2073 TCGv t0 = tcg_temp_new(); \
2074 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2075 gen_store_gpr(t0, rt); \
2076 tcg_temp_free(t0); \
2079 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2080 #if defined(TARGET_MIPS64)
2081 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2085 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2086 int base
, int16_t offset
)
2089 tcg_gen_movi_tl(addr
, offset
);
2090 } else if (offset
== 0) {
2091 gen_load_gpr(addr
, base
);
2093 tcg_gen_movi_tl(addr
, offset
);
2094 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2098 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2100 target_ulong pc
= ctx
->pc
;
2102 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2103 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2108 pc
&= ~(target_ulong
)3;
2113 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2114 int rt
, int base
, int16_t offset
)
2118 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2119 /* Loongson CPU uses a load to zero register for prefetch.
2120 We emulate it as a NOP. On other CPU we must perform the
2121 actual memory access. */
2125 t0
= tcg_temp_new();
2126 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2129 #if defined(TARGET_MIPS64)
2131 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2132 ctx
->default_tcg_memop_mask
);
2133 gen_store_gpr(t0
, rt
);
2136 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2137 ctx
->default_tcg_memop_mask
);
2138 gen_store_gpr(t0
, rt
);
2142 op_ld_lld(t0
, t0
, ctx
);
2143 gen_store_gpr(t0
, rt
);
2146 t1
= tcg_temp_new();
2147 /* Do a byte access to possibly trigger a page
2148 fault with the unaligned address. */
2149 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2150 tcg_gen_andi_tl(t1
, t0
, 7);
2151 #ifndef TARGET_WORDS_BIGENDIAN
2152 tcg_gen_xori_tl(t1
, t1
, 7);
2154 tcg_gen_shli_tl(t1
, t1
, 3);
2155 tcg_gen_andi_tl(t0
, t0
, ~7);
2156 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2157 tcg_gen_shl_tl(t0
, t0
, t1
);
2158 t2
= tcg_const_tl(-1);
2159 tcg_gen_shl_tl(t2
, t2
, t1
);
2160 gen_load_gpr(t1
, rt
);
2161 tcg_gen_andc_tl(t1
, t1
, t2
);
2163 tcg_gen_or_tl(t0
, t0
, t1
);
2165 gen_store_gpr(t0
, rt
);
2168 t1
= tcg_temp_new();
2169 /* Do a byte access to possibly trigger a page
2170 fault with the unaligned address. */
2171 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2172 tcg_gen_andi_tl(t1
, t0
, 7);
2173 #ifdef TARGET_WORDS_BIGENDIAN
2174 tcg_gen_xori_tl(t1
, t1
, 7);
2176 tcg_gen_shli_tl(t1
, t1
, 3);
2177 tcg_gen_andi_tl(t0
, t0
, ~7);
2178 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2179 tcg_gen_shr_tl(t0
, t0
, t1
);
2180 tcg_gen_xori_tl(t1
, t1
, 63);
2181 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2182 tcg_gen_shl_tl(t2
, t2
, t1
);
2183 gen_load_gpr(t1
, rt
);
2184 tcg_gen_and_tl(t1
, t1
, t2
);
2186 tcg_gen_or_tl(t0
, t0
, t1
);
2188 gen_store_gpr(t0
, rt
);
2191 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2192 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2194 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2195 gen_store_gpr(t0
, rt
);
2199 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2200 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2203 gen_store_gpr(t0
, rt
);
2206 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2207 ctx
->default_tcg_memop_mask
);
2208 gen_store_gpr(t0
, rt
);
2211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2212 ctx
->default_tcg_memop_mask
);
2213 gen_store_gpr(t0
, rt
);
2216 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2217 ctx
->default_tcg_memop_mask
);
2218 gen_store_gpr(t0
, rt
);
2221 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2222 gen_store_gpr(t0
, rt
);
2225 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2226 gen_store_gpr(t0
, rt
);
2229 t1
= tcg_temp_new();
2230 /* Do a byte access to possibly trigger a page
2231 fault with the unaligned address. */
2232 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2233 tcg_gen_andi_tl(t1
, t0
, 3);
2234 #ifndef TARGET_WORDS_BIGENDIAN
2235 tcg_gen_xori_tl(t1
, t1
, 3);
2237 tcg_gen_shli_tl(t1
, t1
, 3);
2238 tcg_gen_andi_tl(t0
, t0
, ~3);
2239 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2240 tcg_gen_shl_tl(t0
, t0
, t1
);
2241 t2
= tcg_const_tl(-1);
2242 tcg_gen_shl_tl(t2
, t2
, t1
);
2243 gen_load_gpr(t1
, rt
);
2244 tcg_gen_andc_tl(t1
, t1
, t2
);
2246 tcg_gen_or_tl(t0
, t0
, t1
);
2248 tcg_gen_ext32s_tl(t0
, t0
);
2249 gen_store_gpr(t0
, rt
);
2252 t1
= tcg_temp_new();
2253 /* Do a byte access to possibly trigger a page
2254 fault with the unaligned address. */
2255 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2256 tcg_gen_andi_tl(t1
, t0
, 3);
2257 #ifdef TARGET_WORDS_BIGENDIAN
2258 tcg_gen_xori_tl(t1
, t1
, 3);
2260 tcg_gen_shli_tl(t1
, t1
, 3);
2261 tcg_gen_andi_tl(t0
, t0
, ~3);
2262 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2263 tcg_gen_shr_tl(t0
, t0
, t1
);
2264 tcg_gen_xori_tl(t1
, t1
, 31);
2265 t2
= tcg_const_tl(0xfffffffeull
);
2266 tcg_gen_shl_tl(t2
, t2
, t1
);
2267 gen_load_gpr(t1
, rt
);
2268 tcg_gen_and_tl(t1
, t1
, t2
);
2270 tcg_gen_or_tl(t0
, t0
, t1
);
2272 tcg_gen_ext32s_tl(t0
, t0
);
2273 gen_store_gpr(t0
, rt
);
2277 op_ld_ll(t0
, t0
, ctx
);
2278 gen_store_gpr(t0
, rt
);
2285 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2286 int base
, int16_t offset
)
2288 TCGv t0
= tcg_temp_new();
2289 TCGv t1
= tcg_temp_new();
2291 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2292 gen_load_gpr(t1
, rt
);
2294 #if defined(TARGET_MIPS64)
2296 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2297 ctx
->default_tcg_memop_mask
);
2300 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2303 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2307 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2308 ctx
->default_tcg_memop_mask
);
2311 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2312 ctx
->default_tcg_memop_mask
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2318 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2321 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2329 /* Store conditional */
2330 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2331 int base
, int16_t offset
)
2335 #ifdef CONFIG_USER_ONLY
2336 t0
= tcg_temp_local_new();
2337 t1
= tcg_temp_local_new();
2339 t0
= tcg_temp_new();
2340 t1
= tcg_temp_new();
2342 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2343 gen_load_gpr(t1
, rt
);
2345 #if defined(TARGET_MIPS64)
2348 op_st_scd(t1
, t0
, rt
, ctx
);
2353 op_st_sc(t1
, t0
, rt
, ctx
);
2360 /* Load and store */
2361 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2362 int base
, int16_t offset
)
2364 TCGv t0
= tcg_temp_new();
2366 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2367 /* Don't do NOP if destination is zero: we must perform the actual
2372 TCGv_i32 fp0
= tcg_temp_new_i32();
2373 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2374 ctx
->default_tcg_memop_mask
);
2375 gen_store_fpr32(ctx
, fp0
, ft
);
2376 tcg_temp_free_i32(fp0
);
2381 TCGv_i32 fp0
= tcg_temp_new_i32();
2382 gen_load_fpr32(ctx
, fp0
, ft
);
2383 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2384 ctx
->default_tcg_memop_mask
);
2385 tcg_temp_free_i32(fp0
);
2390 TCGv_i64 fp0
= tcg_temp_new_i64();
2391 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2392 ctx
->default_tcg_memop_mask
);
2393 gen_store_fpr64(ctx
, fp0
, ft
);
2394 tcg_temp_free_i64(fp0
);
2399 TCGv_i64 fp0
= tcg_temp_new_i64();
2400 gen_load_fpr64(ctx
, fp0
, ft
);
2401 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2402 ctx
->default_tcg_memop_mask
);
2403 tcg_temp_free_i64(fp0
);
2407 MIPS_INVAL("flt_ldst");
2408 generate_exception_end(ctx
, EXCP_RI
);
2415 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2416 int rs
, int16_t imm
)
2418 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2419 check_cp1_enabled(ctx
);
2423 check_insn(ctx
, ISA_MIPS2
);
2426 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2429 generate_exception_err(ctx
, EXCP_CpU
, 1);
2433 /* Arithmetic with immediate operand */
2434 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2435 int rt
, int rs
, int16_t imm
)
2437 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2439 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2440 /* If no destination, treat it as a NOP.
2441 For addi, we must generate the overflow exception when needed. */
2447 TCGv t0
= tcg_temp_local_new();
2448 TCGv t1
= tcg_temp_new();
2449 TCGv t2
= tcg_temp_new();
2450 TCGLabel
*l1
= gen_new_label();
2452 gen_load_gpr(t1
, rs
);
2453 tcg_gen_addi_tl(t0
, t1
, uimm
);
2454 tcg_gen_ext32s_tl(t0
, t0
);
2456 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2457 tcg_gen_xori_tl(t2
, t0
, uimm
);
2458 tcg_gen_and_tl(t1
, t1
, t2
);
2460 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2462 /* operands of same sign, result different sign */
2463 generate_exception(ctx
, EXCP_OVERFLOW
);
2465 tcg_gen_ext32s_tl(t0
, t0
);
2466 gen_store_gpr(t0
, rt
);
2472 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2473 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2475 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2478 #if defined(TARGET_MIPS64)
2481 TCGv t0
= tcg_temp_local_new();
2482 TCGv t1
= tcg_temp_new();
2483 TCGv t2
= tcg_temp_new();
2484 TCGLabel
*l1
= gen_new_label();
2486 gen_load_gpr(t1
, rs
);
2487 tcg_gen_addi_tl(t0
, t1
, uimm
);
2489 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2490 tcg_gen_xori_tl(t2
, t0
, uimm
);
2491 tcg_gen_and_tl(t1
, t1
, t2
);
2493 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2495 /* operands of same sign, result different sign */
2496 generate_exception(ctx
, EXCP_OVERFLOW
);
2498 gen_store_gpr(t0
, rt
);
2504 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2506 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2513 /* Logic with immediate operand */
2514 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2515 int rt
, int rs
, int16_t imm
)
2520 /* If no destination, treat it as a NOP. */
2523 uimm
= (uint16_t)imm
;
2526 if (likely(rs
!= 0))
2527 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2529 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2533 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2535 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2538 if (likely(rs
!= 0))
2539 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2541 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2544 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2546 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2547 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2549 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2558 /* Set on less than with immediate operand */
2559 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2560 int rt
, int rs
, int16_t imm
)
2562 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2566 /* If no destination, treat it as a NOP. */
2569 t0
= tcg_temp_new();
2570 gen_load_gpr(t0
, rs
);
2573 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2576 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2582 /* Shifts with immediate operand */
2583 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2584 int rt
, int rs
, int16_t imm
)
2586 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2590 /* If no destination, treat it as a NOP. */
2594 t0
= tcg_temp_new();
2595 gen_load_gpr(t0
, rs
);
2598 tcg_gen_shli_tl(t0
, t0
, uimm
);
2599 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2602 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2606 tcg_gen_ext32u_tl(t0
, t0
);
2607 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2609 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2614 TCGv_i32 t1
= tcg_temp_new_i32();
2616 tcg_gen_trunc_tl_i32(t1
, t0
);
2617 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2618 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2619 tcg_temp_free_i32(t1
);
2621 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2624 #if defined(TARGET_MIPS64)
2626 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2629 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2632 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2636 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2638 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2642 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2645 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2648 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2651 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2659 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2660 int rd
, int rs
, int rt
)
2662 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2663 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2664 /* If no destination, treat it as a NOP.
2665 For add & sub, we must generate the overflow exception when needed. */
2672 TCGv t0
= tcg_temp_local_new();
2673 TCGv t1
= tcg_temp_new();
2674 TCGv t2
= tcg_temp_new();
2675 TCGLabel
*l1
= gen_new_label();
2677 gen_load_gpr(t1
, rs
);
2678 gen_load_gpr(t2
, rt
);
2679 tcg_gen_add_tl(t0
, t1
, t2
);
2680 tcg_gen_ext32s_tl(t0
, t0
);
2681 tcg_gen_xor_tl(t1
, t1
, t2
);
2682 tcg_gen_xor_tl(t2
, t0
, t2
);
2683 tcg_gen_andc_tl(t1
, t2
, t1
);
2685 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2687 /* operands of same sign, result different sign */
2688 generate_exception(ctx
, EXCP_OVERFLOW
);
2690 gen_store_gpr(t0
, rd
);
2695 if (rs
!= 0 && rt
!= 0) {
2696 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2697 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2698 } else if (rs
== 0 && rt
!= 0) {
2699 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2700 } else if (rs
!= 0 && rt
== 0) {
2701 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2703 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2708 TCGv t0
= tcg_temp_local_new();
2709 TCGv t1
= tcg_temp_new();
2710 TCGv t2
= tcg_temp_new();
2711 TCGLabel
*l1
= gen_new_label();
2713 gen_load_gpr(t1
, rs
);
2714 gen_load_gpr(t2
, rt
);
2715 tcg_gen_sub_tl(t0
, t1
, t2
);
2716 tcg_gen_ext32s_tl(t0
, t0
);
2717 tcg_gen_xor_tl(t2
, t1
, t2
);
2718 tcg_gen_xor_tl(t1
, t0
, t1
);
2719 tcg_gen_and_tl(t1
, t1
, t2
);
2721 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2723 /* operands of different sign, first operand and result different sign */
2724 generate_exception(ctx
, EXCP_OVERFLOW
);
2726 gen_store_gpr(t0
, rd
);
2731 if (rs
!= 0 && rt
!= 0) {
2732 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2733 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2734 } else if (rs
== 0 && rt
!= 0) {
2735 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2736 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2737 } else if (rs
!= 0 && rt
== 0) {
2738 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2743 #if defined(TARGET_MIPS64)
2746 TCGv t0
= tcg_temp_local_new();
2747 TCGv t1
= tcg_temp_new();
2748 TCGv t2
= tcg_temp_new();
2749 TCGLabel
*l1
= gen_new_label();
2751 gen_load_gpr(t1
, rs
);
2752 gen_load_gpr(t2
, rt
);
2753 tcg_gen_add_tl(t0
, t1
, t2
);
2754 tcg_gen_xor_tl(t1
, t1
, t2
);
2755 tcg_gen_xor_tl(t2
, t0
, t2
);
2756 tcg_gen_andc_tl(t1
, t2
, t1
);
2758 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2760 /* operands of same sign, result different sign */
2761 generate_exception(ctx
, EXCP_OVERFLOW
);
2763 gen_store_gpr(t0
, rd
);
2768 if (rs
!= 0 && rt
!= 0) {
2769 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2770 } else if (rs
== 0 && rt
!= 0) {
2771 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2772 } else if (rs
!= 0 && rt
== 0) {
2773 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2775 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2780 TCGv t0
= tcg_temp_local_new();
2781 TCGv t1
= tcg_temp_new();
2782 TCGv t2
= tcg_temp_new();
2783 TCGLabel
*l1
= gen_new_label();
2785 gen_load_gpr(t1
, rs
);
2786 gen_load_gpr(t2
, rt
);
2787 tcg_gen_sub_tl(t0
, t1
, t2
);
2788 tcg_gen_xor_tl(t2
, t1
, t2
);
2789 tcg_gen_xor_tl(t1
, t0
, t1
);
2790 tcg_gen_and_tl(t1
, t1
, t2
);
2792 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2794 /* operands of different sign, first operand and result different sign */
2795 generate_exception(ctx
, EXCP_OVERFLOW
);
2797 gen_store_gpr(t0
, rd
);
2802 if (rs
!= 0 && rt
!= 0) {
2803 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2804 } else if (rs
== 0 && rt
!= 0) {
2805 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2806 } else if (rs
!= 0 && rt
== 0) {
2807 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2809 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2814 if (likely(rs
!= 0 && rt
!= 0)) {
2815 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2816 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2824 /* Conditional move */
2825 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2826 int rd
, int rs
, int rt
)
2831 /* If no destination, treat it as a NOP. */
2835 t0
= tcg_temp_new();
2836 gen_load_gpr(t0
, rt
);
2837 t1
= tcg_const_tl(0);
2838 t2
= tcg_temp_new();
2839 gen_load_gpr(t2
, rs
);
2842 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2845 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2848 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2851 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2860 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2861 int rd
, int rs
, int rt
)
2864 /* If no destination, treat it as a NOP. */
2870 if (likely(rs
!= 0 && rt
!= 0)) {
2871 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2873 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2877 if (rs
!= 0 && rt
!= 0) {
2878 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2879 } else if (rs
== 0 && rt
!= 0) {
2880 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2881 } else if (rs
!= 0 && rt
== 0) {
2882 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2884 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2888 if (likely(rs
!= 0 && rt
!= 0)) {
2889 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2890 } else if (rs
== 0 && rt
!= 0) {
2891 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2892 } else if (rs
!= 0 && rt
== 0) {
2893 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2895 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2899 if (likely(rs
!= 0 && rt
!= 0)) {
2900 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2901 } else if (rs
== 0 && rt
!= 0) {
2902 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2903 } else if (rs
!= 0 && rt
== 0) {
2904 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2906 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2912 /* Set on lower than */
2913 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2914 int rd
, int rs
, int rt
)
2919 /* If no destination, treat it as a NOP. */
2923 t0
= tcg_temp_new();
2924 t1
= tcg_temp_new();
2925 gen_load_gpr(t0
, rs
);
2926 gen_load_gpr(t1
, rt
);
2929 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2932 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2940 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2941 int rd
, int rs
, int rt
)
2946 /* If no destination, treat it as a NOP.
2947 For add & sub, we must generate the overflow exception when needed. */
2951 t0
= tcg_temp_new();
2952 t1
= tcg_temp_new();
2953 gen_load_gpr(t0
, rs
);
2954 gen_load_gpr(t1
, rt
);
2957 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2958 tcg_gen_shl_tl(t0
, t1
, t0
);
2959 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2962 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2963 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2966 tcg_gen_ext32u_tl(t1
, t1
);
2967 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2968 tcg_gen_shr_tl(t0
, t1
, t0
);
2969 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2973 TCGv_i32 t2
= tcg_temp_new_i32();
2974 TCGv_i32 t3
= tcg_temp_new_i32();
2976 tcg_gen_trunc_tl_i32(t2
, t0
);
2977 tcg_gen_trunc_tl_i32(t3
, t1
);
2978 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2979 tcg_gen_rotr_i32(t2
, t3
, t2
);
2980 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2981 tcg_temp_free_i32(t2
);
2982 tcg_temp_free_i32(t3
);
2985 #if defined(TARGET_MIPS64)
2987 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2988 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2991 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2992 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3008 /* Arithmetic on HI/LO registers */
3009 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3011 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3022 #if defined(TARGET_MIPS64)
3024 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3028 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3032 #if defined(TARGET_MIPS64)
3034 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3038 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3043 #if defined(TARGET_MIPS64)
3045 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3049 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3052 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3057 #if defined(TARGET_MIPS64)
3059 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3063 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3066 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3072 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3075 TCGv t0
= tcg_const_tl(addr
);
3076 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3077 gen_store_gpr(t0
, reg
);
3081 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3087 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3090 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3091 addr
= addr_add(ctx
, pc
, offset
);
3092 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3096 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3097 addr
= addr_add(ctx
, pc
, offset
);
3098 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3100 #if defined(TARGET_MIPS64)
3103 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3104 addr
= addr_add(ctx
, pc
, offset
);
3105 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3109 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3112 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3113 addr
= addr_add(ctx
, pc
, offset
);
3114 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3119 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3120 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3121 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3124 #if defined(TARGET_MIPS64)
3125 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3126 case R6_OPC_LDPC
+ (1 << 16):
3127 case R6_OPC_LDPC
+ (2 << 16):
3128 case R6_OPC_LDPC
+ (3 << 16):
3130 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3131 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3132 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3136 MIPS_INVAL("OPC_PCREL");
3137 generate_exception_end(ctx
, EXCP_RI
);
3144 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3153 t0
= tcg_temp_new();
3154 t1
= tcg_temp_new();
3156 gen_load_gpr(t0
, rs
);
3157 gen_load_gpr(t1
, rt
);
3162 TCGv t2
= tcg_temp_new();
3163 TCGv t3
= tcg_temp_new();
3164 tcg_gen_ext32s_tl(t0
, t0
);
3165 tcg_gen_ext32s_tl(t1
, t1
);
3166 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3167 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3168 tcg_gen_and_tl(t2
, t2
, t3
);
3169 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3170 tcg_gen_or_tl(t2
, t2
, t3
);
3171 tcg_gen_movi_tl(t3
, 0);
3172 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3173 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3174 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3181 TCGv t2
= tcg_temp_new();
3182 TCGv t3
= tcg_temp_new();
3183 tcg_gen_ext32s_tl(t0
, t0
);
3184 tcg_gen_ext32s_tl(t1
, t1
);
3185 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3186 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3187 tcg_gen_and_tl(t2
, t2
, t3
);
3188 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3189 tcg_gen_or_tl(t2
, t2
, t3
);
3190 tcg_gen_movi_tl(t3
, 0);
3191 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3192 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3193 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3200 TCGv t2
= tcg_const_tl(0);
3201 TCGv t3
= tcg_const_tl(1);
3202 tcg_gen_ext32u_tl(t0
, t0
);
3203 tcg_gen_ext32u_tl(t1
, t1
);
3204 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3205 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3206 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3213 TCGv t2
= tcg_const_tl(0);
3214 TCGv t3
= tcg_const_tl(1);
3215 tcg_gen_ext32u_tl(t0
, t0
);
3216 tcg_gen_ext32u_tl(t1
, t1
);
3217 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3218 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3219 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3226 TCGv_i32 t2
= tcg_temp_new_i32();
3227 TCGv_i32 t3
= tcg_temp_new_i32();
3228 tcg_gen_trunc_tl_i32(t2
, t0
);
3229 tcg_gen_trunc_tl_i32(t3
, t1
);
3230 tcg_gen_mul_i32(t2
, t2
, t3
);
3231 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3232 tcg_temp_free_i32(t2
);
3233 tcg_temp_free_i32(t3
);
3238 TCGv_i32 t2
= tcg_temp_new_i32();
3239 TCGv_i32 t3
= tcg_temp_new_i32();
3240 tcg_gen_trunc_tl_i32(t2
, t0
);
3241 tcg_gen_trunc_tl_i32(t3
, t1
);
3242 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3243 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3244 tcg_temp_free_i32(t2
);
3245 tcg_temp_free_i32(t3
);
3250 TCGv_i32 t2
= tcg_temp_new_i32();
3251 TCGv_i32 t3
= tcg_temp_new_i32();
3252 tcg_gen_trunc_tl_i32(t2
, t0
);
3253 tcg_gen_trunc_tl_i32(t3
, t1
);
3254 tcg_gen_mul_i32(t2
, t2
, t3
);
3255 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3256 tcg_temp_free_i32(t2
);
3257 tcg_temp_free_i32(t3
);
3262 TCGv_i32 t2
= tcg_temp_new_i32();
3263 TCGv_i32 t3
= tcg_temp_new_i32();
3264 tcg_gen_trunc_tl_i32(t2
, t0
);
3265 tcg_gen_trunc_tl_i32(t3
, t1
);
3266 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3267 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3268 tcg_temp_free_i32(t2
);
3269 tcg_temp_free_i32(t3
);
3272 #if defined(TARGET_MIPS64)
3275 TCGv t2
= tcg_temp_new();
3276 TCGv t3
= tcg_temp_new();
3277 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3278 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3279 tcg_gen_and_tl(t2
, t2
, t3
);
3280 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3281 tcg_gen_or_tl(t2
, t2
, t3
);
3282 tcg_gen_movi_tl(t3
, 0);
3283 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3284 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3291 TCGv t2
= tcg_temp_new();
3292 TCGv t3
= tcg_temp_new();
3293 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3294 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3295 tcg_gen_and_tl(t2
, t2
, t3
);
3296 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3297 tcg_gen_or_tl(t2
, t2
, t3
);
3298 tcg_gen_movi_tl(t3
, 0);
3299 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3300 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3307 TCGv t2
= tcg_const_tl(0);
3308 TCGv t3
= tcg_const_tl(1);
3309 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3310 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3317 TCGv t2
= tcg_const_tl(0);
3318 TCGv t3
= tcg_const_tl(1);
3319 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3320 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3326 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3330 TCGv t2
= tcg_temp_new();
3331 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3336 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3340 TCGv t2
= tcg_temp_new();
3341 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3347 MIPS_INVAL("r6 mul/div");
3348 generate_exception_end(ctx
, EXCP_RI
);
3356 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3357 int acc
, int rs
, int rt
)
3361 t0
= tcg_temp_new();
3362 t1
= tcg_temp_new();
3364 gen_load_gpr(t0
, rs
);
3365 gen_load_gpr(t1
, rt
);
3374 TCGv t2
= tcg_temp_new();
3375 TCGv t3
= tcg_temp_new();
3376 tcg_gen_ext32s_tl(t0
, t0
);
3377 tcg_gen_ext32s_tl(t1
, t1
);
3378 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3379 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3380 tcg_gen_and_tl(t2
, t2
, t3
);
3381 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3382 tcg_gen_or_tl(t2
, t2
, t3
);
3383 tcg_gen_movi_tl(t3
, 0);
3384 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3385 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3386 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3387 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3388 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3395 TCGv t2
= tcg_const_tl(0);
3396 TCGv t3
= tcg_const_tl(1);
3397 tcg_gen_ext32u_tl(t0
, t0
);
3398 tcg_gen_ext32u_tl(t1
, t1
);
3399 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3400 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3401 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3402 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3403 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3410 TCGv_i32 t2
= tcg_temp_new_i32();
3411 TCGv_i32 t3
= tcg_temp_new_i32();
3412 tcg_gen_trunc_tl_i32(t2
, t0
);
3413 tcg_gen_trunc_tl_i32(t3
, t1
);
3414 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3415 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3416 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3417 tcg_temp_free_i32(t2
);
3418 tcg_temp_free_i32(t3
);
3423 TCGv_i32 t2
= tcg_temp_new_i32();
3424 TCGv_i32 t3
= tcg_temp_new_i32();
3425 tcg_gen_trunc_tl_i32(t2
, t0
);
3426 tcg_gen_trunc_tl_i32(t3
, t1
);
3427 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3428 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3429 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3430 tcg_temp_free_i32(t2
);
3431 tcg_temp_free_i32(t3
);
3434 #if defined(TARGET_MIPS64)
3437 TCGv t2
= tcg_temp_new();
3438 TCGv t3
= tcg_temp_new();
3439 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3440 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3441 tcg_gen_and_tl(t2
, t2
, t3
);
3442 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3443 tcg_gen_or_tl(t2
, t2
, t3
);
3444 tcg_gen_movi_tl(t3
, 0);
3445 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3446 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3447 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3454 TCGv t2
= tcg_const_tl(0);
3455 TCGv t3
= tcg_const_tl(1);
3456 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3457 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3458 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3464 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3467 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3472 TCGv_i64 t2
= tcg_temp_new_i64();
3473 TCGv_i64 t3
= tcg_temp_new_i64();
3475 tcg_gen_ext_tl_i64(t2
, t0
);
3476 tcg_gen_ext_tl_i64(t3
, t1
);
3477 tcg_gen_mul_i64(t2
, t2
, t3
);
3478 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3479 tcg_gen_add_i64(t2
, t2
, t3
);
3480 tcg_temp_free_i64(t3
);
3481 gen_move_low32(cpu_LO
[acc
], t2
);
3482 gen_move_high32(cpu_HI
[acc
], t2
);
3483 tcg_temp_free_i64(t2
);
3488 TCGv_i64 t2
= tcg_temp_new_i64();
3489 TCGv_i64 t3
= tcg_temp_new_i64();
3491 tcg_gen_ext32u_tl(t0
, t0
);
3492 tcg_gen_ext32u_tl(t1
, t1
);
3493 tcg_gen_extu_tl_i64(t2
, t0
);
3494 tcg_gen_extu_tl_i64(t3
, t1
);
3495 tcg_gen_mul_i64(t2
, t2
, t3
);
3496 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3497 tcg_gen_add_i64(t2
, t2
, t3
);
3498 tcg_temp_free_i64(t3
);
3499 gen_move_low32(cpu_LO
[acc
], t2
);
3500 gen_move_high32(cpu_HI
[acc
], t2
);
3501 tcg_temp_free_i64(t2
);
3506 TCGv_i64 t2
= tcg_temp_new_i64();
3507 TCGv_i64 t3
= tcg_temp_new_i64();
3509 tcg_gen_ext_tl_i64(t2
, t0
);
3510 tcg_gen_ext_tl_i64(t3
, t1
);
3511 tcg_gen_mul_i64(t2
, t2
, t3
);
3512 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3513 tcg_gen_sub_i64(t2
, t3
, t2
);
3514 tcg_temp_free_i64(t3
);
3515 gen_move_low32(cpu_LO
[acc
], t2
);
3516 gen_move_high32(cpu_HI
[acc
], t2
);
3517 tcg_temp_free_i64(t2
);
3522 TCGv_i64 t2
= tcg_temp_new_i64();
3523 TCGv_i64 t3
= tcg_temp_new_i64();
3525 tcg_gen_ext32u_tl(t0
, t0
);
3526 tcg_gen_ext32u_tl(t1
, t1
);
3527 tcg_gen_extu_tl_i64(t2
, t0
);
3528 tcg_gen_extu_tl_i64(t3
, t1
);
3529 tcg_gen_mul_i64(t2
, t2
, t3
);
3530 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3531 tcg_gen_sub_i64(t2
, t3
, t2
);
3532 tcg_temp_free_i64(t3
);
3533 gen_move_low32(cpu_LO
[acc
], t2
);
3534 gen_move_high32(cpu_HI
[acc
], t2
);
3535 tcg_temp_free_i64(t2
);
3539 MIPS_INVAL("mul/div");
3540 generate_exception_end(ctx
, EXCP_RI
);
3548 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3549 int rd
, int rs
, int rt
)
3551 TCGv t0
= tcg_temp_new();
3552 TCGv t1
= tcg_temp_new();
3554 gen_load_gpr(t0
, rs
);
3555 gen_load_gpr(t1
, rt
);
3558 case OPC_VR54XX_MULS
:
3559 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3561 case OPC_VR54XX_MULSU
:
3562 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3564 case OPC_VR54XX_MACC
:
3565 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MACCU
:
3568 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MSAC
:
3571 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MSACU
:
3574 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MULHI
:
3577 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MULHIU
:
3580 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULSHI
:
3583 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULSHIU
:
3586 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MACCHI
:
3589 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MACCHIU
:
3592 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MSACHI
:
3595 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MSACHIU
:
3598 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3601 MIPS_INVAL("mul vr54xx");
3602 generate_exception_end(ctx
, EXCP_RI
);
3605 gen_store_gpr(t0
, rd
);
3612 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3621 t0
= tcg_temp_new();
3622 gen_load_gpr(t0
, rs
);
3626 gen_helper_clo(cpu_gpr
[rd
], t0
);
3630 gen_helper_clz(cpu_gpr
[rd
], t0
);
3632 #if defined(TARGET_MIPS64)
3635 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3639 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3646 /* Godson integer instructions */
3647 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3648 int rd
, int rs
, int rt
)
3660 case OPC_MULTU_G_2E
:
3661 case OPC_MULTU_G_2F
:
3662 #if defined(TARGET_MIPS64)
3663 case OPC_DMULT_G_2E
:
3664 case OPC_DMULT_G_2F
:
3665 case OPC_DMULTU_G_2E
:
3666 case OPC_DMULTU_G_2F
:
3668 t0
= tcg_temp_new();
3669 t1
= tcg_temp_new();
3672 t0
= tcg_temp_local_new();
3673 t1
= tcg_temp_local_new();
3677 gen_load_gpr(t0
, rs
);
3678 gen_load_gpr(t1
, rt
);
3683 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3684 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3686 case OPC_MULTU_G_2E
:
3687 case OPC_MULTU_G_2F
:
3688 tcg_gen_ext32u_tl(t0
, t0
);
3689 tcg_gen_ext32u_tl(t1
, t1
);
3690 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3691 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3696 TCGLabel
*l1
= gen_new_label();
3697 TCGLabel
*l2
= gen_new_label();
3698 TCGLabel
*l3
= gen_new_label();
3699 tcg_gen_ext32s_tl(t0
, t0
);
3700 tcg_gen_ext32s_tl(t1
, t1
);
3701 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3702 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3705 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3706 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3707 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3710 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3711 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3718 TCGLabel
*l1
= gen_new_label();
3719 TCGLabel
*l2
= gen_new_label();
3720 tcg_gen_ext32u_tl(t0
, t0
);
3721 tcg_gen_ext32u_tl(t1
, t1
);
3722 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3723 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3726 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3727 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3734 TCGLabel
*l1
= gen_new_label();
3735 TCGLabel
*l2
= gen_new_label();
3736 TCGLabel
*l3
= gen_new_label();
3737 tcg_gen_ext32u_tl(t0
, t0
);
3738 tcg_gen_ext32u_tl(t1
, t1
);
3739 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3740 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3741 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3743 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3746 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3747 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3754 TCGLabel
*l1
= gen_new_label();
3755 TCGLabel
*l2
= gen_new_label();
3756 tcg_gen_ext32u_tl(t0
, t0
);
3757 tcg_gen_ext32u_tl(t1
, t1
);
3758 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3759 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3762 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3763 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3767 #if defined(TARGET_MIPS64)
3768 case OPC_DMULT_G_2E
:
3769 case OPC_DMULT_G_2F
:
3770 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3772 case OPC_DMULTU_G_2E
:
3773 case OPC_DMULTU_G_2F
:
3774 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3779 TCGLabel
*l1
= gen_new_label();
3780 TCGLabel
*l2
= gen_new_label();
3781 TCGLabel
*l3
= gen_new_label();
3782 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3783 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3786 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3787 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3788 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3791 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3795 case OPC_DDIVU_G_2E
:
3796 case OPC_DDIVU_G_2F
:
3798 TCGLabel
*l1
= gen_new_label();
3799 TCGLabel
*l2
= gen_new_label();
3800 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3801 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3804 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3811 TCGLabel
*l1
= gen_new_label();
3812 TCGLabel
*l2
= gen_new_label();
3813 TCGLabel
*l3
= gen_new_label();
3814 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3815 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3816 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3821 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3825 case OPC_DMODU_G_2E
:
3826 case OPC_DMODU_G_2F
:
3828 TCGLabel
*l1
= gen_new_label();
3829 TCGLabel
*l2
= gen_new_label();
3830 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3831 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3834 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3845 /* Loongson multimedia instructions */
3846 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3848 uint32_t opc
, shift_max
;
3851 opc
= MASK_LMI(ctx
->opcode
);
3857 t0
= tcg_temp_local_new_i64();
3858 t1
= tcg_temp_local_new_i64();
3861 t0
= tcg_temp_new_i64();
3862 t1
= tcg_temp_new_i64();
3866 gen_load_fpr64(ctx
, t0
, rs
);
3867 gen_load_fpr64(ctx
, t1
, rt
);
3869 #define LMI_HELPER(UP, LO) \
3870 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3871 #define LMI_HELPER_1(UP, LO) \
3872 case OPC_##UP: gen_helper_##LO(t0, t0); break
3873 #define LMI_DIRECT(UP, LO, OP) \
3874 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3877 LMI_HELPER(PADDSH
, paddsh
);
3878 LMI_HELPER(PADDUSH
, paddush
);
3879 LMI_HELPER(PADDH
, paddh
);
3880 LMI_HELPER(PADDW
, paddw
);
3881 LMI_HELPER(PADDSB
, paddsb
);
3882 LMI_HELPER(PADDUSB
, paddusb
);
3883 LMI_HELPER(PADDB
, paddb
);
3885 LMI_HELPER(PSUBSH
, psubsh
);
3886 LMI_HELPER(PSUBUSH
, psubush
);
3887 LMI_HELPER(PSUBH
, psubh
);
3888 LMI_HELPER(PSUBW
, psubw
);
3889 LMI_HELPER(PSUBSB
, psubsb
);
3890 LMI_HELPER(PSUBUSB
, psubusb
);
3891 LMI_HELPER(PSUBB
, psubb
);
3893 LMI_HELPER(PSHUFH
, pshufh
);
3894 LMI_HELPER(PACKSSWH
, packsswh
);
3895 LMI_HELPER(PACKSSHB
, packsshb
);
3896 LMI_HELPER(PACKUSHB
, packushb
);
3898 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3899 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3900 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3901 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3902 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3903 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3905 LMI_HELPER(PAVGH
, pavgh
);
3906 LMI_HELPER(PAVGB
, pavgb
);
3907 LMI_HELPER(PMAXSH
, pmaxsh
);
3908 LMI_HELPER(PMINSH
, pminsh
);
3909 LMI_HELPER(PMAXUB
, pmaxub
);
3910 LMI_HELPER(PMINUB
, pminub
);
3912 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3913 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3914 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3915 LMI_HELPER(PCMPGTH
, pcmpgth
);
3916 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3917 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3919 LMI_HELPER(PSLLW
, psllw
);
3920 LMI_HELPER(PSLLH
, psllh
);
3921 LMI_HELPER(PSRLW
, psrlw
);
3922 LMI_HELPER(PSRLH
, psrlh
);
3923 LMI_HELPER(PSRAW
, psraw
);
3924 LMI_HELPER(PSRAH
, psrah
);
3926 LMI_HELPER(PMULLH
, pmullh
);
3927 LMI_HELPER(PMULHH
, pmulhh
);
3928 LMI_HELPER(PMULHUH
, pmulhuh
);
3929 LMI_HELPER(PMADDHW
, pmaddhw
);
3931 LMI_HELPER(PASUBUB
, pasubub
);
3932 LMI_HELPER_1(BIADD
, biadd
);
3933 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3935 LMI_DIRECT(PADDD
, paddd
, add
);
3936 LMI_DIRECT(PSUBD
, psubd
, sub
);
3937 LMI_DIRECT(XOR_CP2
, xor, xor);
3938 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3939 LMI_DIRECT(AND_CP2
, and, and);
3940 LMI_DIRECT(PANDN
, pandn
, andc
);
3941 LMI_DIRECT(OR
, or, or);
3944 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3947 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3953 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3957 tcg_gen_andi_i64(t1
, t1
, 3);
3958 tcg_gen_shli_i64(t1
, t1
, 4);
3959 tcg_gen_shr_i64(t0
, t0
, t1
);
3960 tcg_gen_ext16u_i64(t0
, t0
);
3964 tcg_gen_add_i64(t0
, t0
, t1
);
3965 tcg_gen_ext32s_i64(t0
, t0
);
3968 tcg_gen_sub_i64(t0
, t0
, t1
);
3969 tcg_gen_ext32s_i64(t0
, t0
);
3991 /* Make sure shift count isn't TCG undefined behaviour. */
3992 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3997 tcg_gen_shl_i64(t0
, t0
, t1
);
4001 /* Since SRA is UndefinedResult without sign-extended inputs,
4002 we can treat SRA and DSRA the same. */
4003 tcg_gen_sar_i64(t0
, t0
, t1
);
4006 /* We want to shift in zeros for SRL; zero-extend first. */
4007 tcg_gen_ext32u_i64(t0
, t0
);
4010 tcg_gen_shr_i64(t0
, t0
, t1
);
4014 if (shift_max
== 32) {
4015 tcg_gen_ext32s_i64(t0
, t0
);
4018 /* Shifts larger than MAX produce zero. */
4019 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4020 tcg_gen_neg_i64(t1
, t1
);
4021 tcg_gen_and_i64(t0
, t0
, t1
);
4027 TCGv_i64 t2
= tcg_temp_new_i64();
4028 TCGLabel
*lab
= gen_new_label();
4030 tcg_gen_mov_i64(t2
, t0
);
4031 tcg_gen_add_i64(t0
, t1
, t2
);
4032 if (opc
== OPC_ADD_CP2
) {
4033 tcg_gen_ext32s_i64(t0
, t0
);
4035 tcg_gen_xor_i64(t1
, t1
, t2
);
4036 tcg_gen_xor_i64(t2
, t2
, t0
);
4037 tcg_gen_andc_i64(t1
, t2
, t1
);
4038 tcg_temp_free_i64(t2
);
4039 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4040 generate_exception(ctx
, EXCP_OVERFLOW
);
4048 TCGv_i64 t2
= tcg_temp_new_i64();
4049 TCGLabel
*lab
= gen_new_label();
4051 tcg_gen_mov_i64(t2
, t0
);
4052 tcg_gen_sub_i64(t0
, t1
, t2
);
4053 if (opc
== OPC_SUB_CP2
) {
4054 tcg_gen_ext32s_i64(t0
, t0
);
4056 tcg_gen_xor_i64(t1
, t1
, t2
);
4057 tcg_gen_xor_i64(t2
, t2
, t0
);
4058 tcg_gen_and_i64(t1
, t1
, t2
);
4059 tcg_temp_free_i64(t2
);
4060 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4061 generate_exception(ctx
, EXCP_OVERFLOW
);
4067 tcg_gen_ext32u_i64(t0
, t0
);
4068 tcg_gen_ext32u_i64(t1
, t1
);
4069 tcg_gen_mul_i64(t0
, t0
, t1
);
4078 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4079 FD field is the CC field? */
4081 MIPS_INVAL("loongson_cp2");
4082 generate_exception_end(ctx
, EXCP_RI
);
4089 gen_store_fpr64(ctx
, t0
, rd
);
4091 tcg_temp_free_i64(t0
);
4092 tcg_temp_free_i64(t1
);
4096 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4097 int rs
, int rt
, int16_t imm
)
4100 TCGv t0
= tcg_temp_new();
4101 TCGv t1
= tcg_temp_new();
4104 /* Load needed operands */
4112 /* Compare two registers */
4114 gen_load_gpr(t0
, rs
);
4115 gen_load_gpr(t1
, rt
);
4125 /* Compare register to immediate */
4126 if (rs
!= 0 || imm
!= 0) {
4127 gen_load_gpr(t0
, rs
);
4128 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4135 case OPC_TEQ
: /* rs == rs */
4136 case OPC_TEQI
: /* r0 == 0 */
4137 case OPC_TGE
: /* rs >= rs */
4138 case OPC_TGEI
: /* r0 >= 0 */
4139 case OPC_TGEU
: /* rs >= rs unsigned */
4140 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4142 generate_exception_end(ctx
, EXCP_TRAP
);
4144 case OPC_TLT
: /* rs < rs */
4145 case OPC_TLTI
: /* r0 < 0 */
4146 case OPC_TLTU
: /* rs < rs unsigned */
4147 case OPC_TLTIU
: /* r0 < 0 unsigned */
4148 case OPC_TNE
: /* rs != rs */
4149 case OPC_TNEI
: /* r0 != 0 */
4150 /* Never trap: treat as NOP. */
4154 TCGLabel
*l1
= gen_new_label();
4159 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4163 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4167 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4171 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4175 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4179 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4182 generate_exception(ctx
, EXCP_TRAP
);
4189 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4191 TranslationBlock
*tb
;
4193 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4194 likely(!ctx
->singlestep_enabled
)) {
4197 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4200 if (ctx
->singlestep_enabled
) {
4201 save_cpu_state(ctx
, 0);
4202 gen_helper_raise_exception_debug(cpu_env
);
4208 /* Branches (before delay slot) */
4209 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4211 int rs
, int rt
, int32_t offset
,
4214 target_ulong btgt
= -1;
4216 int bcond_compute
= 0;
4217 TCGv t0
= tcg_temp_new();
4218 TCGv t1
= tcg_temp_new();
4220 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4221 #ifdef MIPS_DEBUG_DISAS
4222 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4223 TARGET_FMT_lx
"\n", ctx
->pc
);
4225 generate_exception_end(ctx
, EXCP_RI
);
4229 /* Load needed operands */
4235 /* Compare two registers */
4237 gen_load_gpr(t0
, rs
);
4238 gen_load_gpr(t1
, rt
);
4241 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4255 /* Compare to zero */
4257 gen_load_gpr(t0
, rs
);
4260 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4263 #if defined(TARGET_MIPS64)
4265 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4267 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4270 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4275 /* Jump to immediate */
4276 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4280 /* Jump to register */
4281 if (offset
!= 0 && offset
!= 16) {
4282 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4283 others are reserved. */
4284 MIPS_INVAL("jump hint");
4285 generate_exception_end(ctx
, EXCP_RI
);
4288 gen_load_gpr(btarget
, rs
);
4291 MIPS_INVAL("branch/jump");
4292 generate_exception_end(ctx
, EXCP_RI
);
4295 if (bcond_compute
== 0) {
4296 /* No condition to be computed */
4298 case OPC_BEQ
: /* rx == rx */
4299 case OPC_BEQL
: /* rx == rx likely */
4300 case OPC_BGEZ
: /* 0 >= 0 */
4301 case OPC_BGEZL
: /* 0 >= 0 likely */
4302 case OPC_BLEZ
: /* 0 <= 0 */
4303 case OPC_BLEZL
: /* 0 <= 0 likely */
4305 ctx
->hflags
|= MIPS_HFLAG_B
;
4307 case OPC_BGEZAL
: /* 0 >= 0 */
4308 case OPC_BGEZALL
: /* 0 >= 0 likely */
4309 /* Always take and link */
4311 ctx
->hflags
|= MIPS_HFLAG_B
;
4313 case OPC_BNE
: /* rx != rx */
4314 case OPC_BGTZ
: /* 0 > 0 */
4315 case OPC_BLTZ
: /* 0 < 0 */
4318 case OPC_BLTZAL
: /* 0 < 0 */
4319 /* Handle as an unconditional branch to get correct delay
4322 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4323 ctx
->hflags
|= MIPS_HFLAG_B
;
4325 case OPC_BLTZALL
: /* 0 < 0 likely */
4326 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4327 /* Skip the instruction in the delay slot */
4330 case OPC_BNEL
: /* rx != rx likely */
4331 case OPC_BGTZL
: /* 0 > 0 likely */
4332 case OPC_BLTZL
: /* 0 < 0 likely */
4333 /* Skip the instruction in the delay slot */
4337 ctx
->hflags
|= MIPS_HFLAG_B
;
4340 ctx
->hflags
|= MIPS_HFLAG_BX
;
4344 ctx
->hflags
|= MIPS_HFLAG_B
;
4347 ctx
->hflags
|= MIPS_HFLAG_BR
;
4351 ctx
->hflags
|= MIPS_HFLAG_BR
;
4354 MIPS_INVAL("branch/jump");
4355 generate_exception_end(ctx
, EXCP_RI
);
4361 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4364 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4367 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4370 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4373 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4376 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4379 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4383 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4387 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4390 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4393 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4396 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4407 #if defined(TARGET_MIPS64)
4409 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4413 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4416 ctx
->hflags
|= MIPS_HFLAG_BC
;
4419 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4422 ctx
->hflags
|= MIPS_HFLAG_BL
;
4425 MIPS_INVAL("conditional branch/jump");
4426 generate_exception_end(ctx
, EXCP_RI
);
4431 ctx
->btarget
= btgt
;
4433 switch (delayslot_size
) {
4435 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4438 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4443 int post_delay
= insn_bytes
+ delayslot_size
;
4444 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4446 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4450 if (insn_bytes
== 2)
4451 ctx
->hflags
|= MIPS_HFLAG_B16
;
4456 /* special3 bitfield operations */
4457 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4458 int rs
, int lsb
, int msb
)
4460 TCGv t0
= tcg_temp_new();
4461 TCGv t1
= tcg_temp_new();
4463 gen_load_gpr(t1
, rs
);
4466 if (lsb
+ msb
> 31) {
4469 tcg_gen_shri_tl(t0
, t1
, lsb
);
4471 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4473 tcg_gen_ext32s_tl(t0
, t0
);
4476 #if defined(TARGET_MIPS64)
4485 if (lsb
+ msb
> 63) {
4488 tcg_gen_shri_tl(t0
, t1
, lsb
);
4490 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4498 gen_load_gpr(t0
, rt
);
4499 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4500 tcg_gen_ext32s_tl(t0
, t0
);
4502 #if defined(TARGET_MIPS64)
4513 gen_load_gpr(t0
, rt
);
4514 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4519 MIPS_INVAL("bitops");
4520 generate_exception_end(ctx
, EXCP_RI
);
4525 gen_store_gpr(t0
, rt
);
4530 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4535 /* If no destination, treat it as a NOP. */
4539 t0
= tcg_temp_new();
4540 gen_load_gpr(t0
, rt
);
4544 TCGv t1
= tcg_temp_new();
4546 tcg_gen_shri_tl(t1
, t0
, 8);
4547 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4548 tcg_gen_shli_tl(t0
, t0
, 8);
4549 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4550 tcg_gen_or_tl(t0
, t0
, t1
);
4552 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4556 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4559 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4561 #if defined(TARGET_MIPS64)
4564 TCGv t1
= tcg_temp_new();
4566 tcg_gen_shri_tl(t1
, t0
, 8);
4567 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4568 tcg_gen_shli_tl(t0
, t0
, 8);
4569 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4570 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4576 TCGv t1
= tcg_temp_new();
4578 tcg_gen_shri_tl(t1
, t0
, 16);
4579 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4580 tcg_gen_shli_tl(t0
, t0
, 16);
4581 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4582 tcg_gen_or_tl(t0
, t0
, t1
);
4583 tcg_gen_shri_tl(t1
, t0
, 32);
4584 tcg_gen_shli_tl(t0
, t0
, 32);
4585 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4591 MIPS_INVAL("bsfhl");
4592 generate_exception_end(ctx
, EXCP_RI
);
4599 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4608 t0
= tcg_temp_new();
4609 t1
= tcg_temp_new();
4610 gen_load_gpr(t0
, rs
);
4611 gen_load_gpr(t1
, rt
);
4612 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4613 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4614 if (opc
== OPC_LSA
) {
4615 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4624 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4632 t0
= tcg_temp_new();
4633 gen_load_gpr(t0
, rt
);
4637 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4639 #if defined(TARGET_MIPS64)
4641 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4646 TCGv t1
= tcg_temp_new();
4647 gen_load_gpr(t1
, rs
);
4651 TCGv_i64 t2
= tcg_temp_new_i64();
4652 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4653 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4654 gen_move_low32(cpu_gpr
[rd
], t2
);
4655 tcg_temp_free_i64(t2
);
4658 #if defined(TARGET_MIPS64)
4660 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4661 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4662 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4672 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4679 t0
= tcg_temp_new();
4680 gen_load_gpr(t0
, rt
);
4683 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4685 #if defined(TARGET_MIPS64)
4687 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4694 #ifndef CONFIG_USER_ONLY
4695 /* CP0 (MMU and control) */
4696 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4698 TCGv_i64 t0
= tcg_temp_new_i64();
4699 TCGv_i64 t1
= tcg_temp_new_i64();
4701 tcg_gen_ext_tl_i64(t0
, arg
);
4702 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4703 #if defined(TARGET_MIPS64)
4704 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4706 tcg_gen_concat32_i64(t1
, t1
, t0
);
4708 tcg_gen_st_i64(t1
, cpu_env
, off
);
4709 tcg_temp_free_i64(t1
);
4710 tcg_temp_free_i64(t0
);
4713 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4715 TCGv_i64 t0
= tcg_temp_new_i64();
4716 TCGv_i64 t1
= tcg_temp_new_i64();
4718 tcg_gen_ext_tl_i64(t0
, arg
);
4719 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4720 tcg_gen_concat32_i64(t1
, t1
, t0
);
4721 tcg_gen_st_i64(t1
, cpu_env
, off
);
4722 tcg_temp_free_i64(t1
);
4723 tcg_temp_free_i64(t0
);
4726 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4728 TCGv_i64 t0
= tcg_temp_new_i64();
4730 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4731 #if defined(TARGET_MIPS64)
4732 tcg_gen_shri_i64(t0
, t0
, 30);
4734 tcg_gen_shri_i64(t0
, t0
, 32);
4736 gen_move_low32(arg
, t0
);
4737 tcg_temp_free_i64(t0
);
4740 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4742 TCGv_i64 t0
= tcg_temp_new_i64();
4744 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4745 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4746 gen_move_low32(arg
, t0
);
4747 tcg_temp_free_i64(t0
);
4750 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4752 TCGv_i32 t0
= tcg_temp_new_i32();
4754 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4755 tcg_gen_ext_i32_tl(arg
, t0
);
4756 tcg_temp_free_i32(t0
);
4759 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4761 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4762 tcg_gen_ext32s_tl(arg
, arg
);
4765 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4767 TCGv_i32 t0
= tcg_temp_new_i32();
4769 tcg_gen_trunc_tl_i32(t0
, arg
);
4770 tcg_gen_st_i32(t0
, cpu_env
, off
);
4771 tcg_temp_free_i32(t0
);
4774 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4776 const char *rn
= "invalid";
4778 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4779 goto mfhc0_read_zero
;
4786 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4790 goto mfhc0_read_zero
;
4796 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4800 goto mfhc0_read_zero
;
4806 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4807 ctx
->CP0_LLAddr_shift
);
4811 goto mfhc0_read_zero
;
4820 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4824 goto mfhc0_read_zero
;
4828 goto mfhc0_read_zero
;
4831 (void)rn
; /* avoid a compiler warning */
4832 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4836 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4837 tcg_gen_movi_tl(arg
, 0);
4840 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4842 const char *rn
= "invalid";
4843 uint64_t mask
= ctx
->PAMask
>> 36;
4845 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4853 tcg_gen_andi_tl(arg
, arg
, mask
);
4854 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4864 tcg_gen_andi_tl(arg
, arg
, mask
);
4865 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4875 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4876 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4877 relevant for modern MIPS cores supporting MTHC0, therefore
4878 treating MTHC0 to LLAddr as NOP. */
4891 tcg_gen_andi_tl(arg
, arg
, mask
);
4892 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4903 (void)rn
; /* avoid a compiler warning */
4905 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4908 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4910 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4911 tcg_gen_movi_tl(arg
, 0);
4913 tcg_gen_movi_tl(arg
, ~0);
4917 #define CP0_CHECK(c) \
4920 goto cp0_unimplemented; \
4924 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4926 const char *rn
= "invalid";
4929 check_insn(ctx
, ISA_MIPS32
);
4935 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4939 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4940 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4944 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4945 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4949 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4950 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4954 goto cp0_unimplemented
;
4960 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4961 gen_helper_mfc0_random(arg
, cpu_env
);
4965 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4970 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4971 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4975 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4976 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4980 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4981 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4985 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4986 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4990 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4991 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4992 rn
= "VPEScheFBack";
4995 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4996 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5000 goto cp0_unimplemented
;
5007 TCGv_i64 tmp
= tcg_temp_new_i64();
5008 tcg_gen_ld_i64(tmp
, cpu_env
,
5009 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5010 #if defined(TARGET_MIPS64)
5012 /* Move RI/XI fields to bits 31:30 */
5013 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5014 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5017 gen_move_low32(arg
, tmp
);
5018 tcg_temp_free_i64(tmp
);
5023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5024 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5028 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5029 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5033 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5034 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5038 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5039 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5043 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5044 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5049 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5053 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5054 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5058 goto cp0_unimplemented
;
5065 TCGv_i64 tmp
= tcg_temp_new_i64();
5066 tcg_gen_ld_i64(tmp
, cpu_env
,
5067 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5068 #if defined(TARGET_MIPS64)
5070 /* Move RI/XI fields to bits 31:30 */
5071 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5072 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5075 gen_move_low32(arg
, tmp
);
5076 tcg_temp_free_i64(tmp
);
5081 goto cp0_unimplemented
;
5087 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5088 tcg_gen_ext32s_tl(arg
, arg
);
5092 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5093 rn
= "ContextConfig";
5094 goto cp0_unimplemented
;
5097 CP0_CHECK(ctx
->ulri
);
5098 tcg_gen_ld32s_tl(arg
, cpu_env
,
5099 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5103 goto cp0_unimplemented
;
5109 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5113 check_insn(ctx
, ISA_MIPS32R2
);
5114 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5118 goto cp0_unimplemented
;
5124 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5128 check_insn(ctx
, ISA_MIPS32R2
);
5129 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5133 check_insn(ctx
, ISA_MIPS32R2
);
5134 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5138 check_insn(ctx
, ISA_MIPS32R2
);
5139 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5143 check_insn(ctx
, ISA_MIPS32R2
);
5144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5148 check_insn(ctx
, ISA_MIPS32R2
);
5149 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5153 goto cp0_unimplemented
;
5159 check_insn(ctx
, ISA_MIPS32R2
);
5160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5164 goto cp0_unimplemented
;
5170 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5171 tcg_gen_ext32s_tl(arg
, arg
);
5176 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5185 goto cp0_unimplemented
;
5191 /* Mark as an IO operation because we read the time. */
5192 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5195 gen_helper_mfc0_count(arg
, cpu_env
);
5196 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5199 /* Break the TB to be able to take timer interrupts immediately
5200 after reading count. */
5201 ctx
->bstate
= BS_STOP
;
5204 /* 6,7 are implementation dependent */
5206 goto cp0_unimplemented
;
5212 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5213 tcg_gen_ext32s_tl(arg
, arg
);
5217 goto cp0_unimplemented
;
5223 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5226 /* 6,7 are implementation dependent */
5228 goto cp0_unimplemented
;
5234 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5238 check_insn(ctx
, ISA_MIPS32R2
);
5239 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5243 check_insn(ctx
, ISA_MIPS32R2
);
5244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5248 check_insn(ctx
, ISA_MIPS32R2
);
5249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5253 goto cp0_unimplemented
;
5259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5263 goto cp0_unimplemented
;
5269 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5270 tcg_gen_ext32s_tl(arg
, arg
);
5274 goto cp0_unimplemented
;
5280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5284 check_insn(ctx
, ISA_MIPS32R2
);
5285 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5289 goto cp0_unimplemented
;
5295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5311 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5315 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5318 /* 6,7 are implementation dependent */
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5328 goto cp0_unimplemented
;
5334 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5338 goto cp0_unimplemented
;
5344 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5348 goto cp0_unimplemented
;
5354 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5358 goto cp0_unimplemented
;
5364 #if defined(TARGET_MIPS64)
5365 check_insn(ctx
, ISA_MIPS3
);
5366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5367 tcg_gen_ext32s_tl(arg
, arg
);
5372 goto cp0_unimplemented
;
5376 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5377 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5384 goto cp0_unimplemented
;
5388 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5389 rn
= "'Diagnostic"; /* implementation dependent */
5394 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5398 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5399 rn
= "TraceControl";
5402 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5403 rn
= "TraceControl2";
5406 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5407 rn
= "UserTraceData";
5410 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5414 goto cp0_unimplemented
;
5421 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5422 tcg_gen_ext32s_tl(arg
, arg
);
5426 goto cp0_unimplemented
;
5432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5433 rn
= "Performance0";
5436 // gen_helper_mfc0_performance1(arg);
5437 rn
= "Performance1";
5440 // gen_helper_mfc0_performance2(arg);
5441 rn
= "Performance2";
5444 // gen_helper_mfc0_performance3(arg);
5445 rn
= "Performance3";
5448 // gen_helper_mfc0_performance4(arg);
5449 rn
= "Performance4";
5452 // gen_helper_mfc0_performance5(arg);
5453 rn
= "Performance5";
5456 // gen_helper_mfc0_performance6(arg);
5457 rn
= "Performance6";
5460 // gen_helper_mfc0_performance7(arg);
5461 rn
= "Performance7";
5464 goto cp0_unimplemented
;
5468 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5474 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5478 goto cp0_unimplemented
;
5488 TCGv_i64 tmp
= tcg_temp_new_i64();
5489 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5490 gen_move_low32(arg
, tmp
);
5491 tcg_temp_free_i64(tmp
);
5499 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5503 goto cp0_unimplemented
;
5512 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5523 goto cp0_unimplemented
;
5529 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5530 tcg_gen_ext32s_tl(arg
, arg
);
5534 goto cp0_unimplemented
;
5541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5545 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5546 tcg_gen_ld_tl(arg
, cpu_env
,
5547 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5548 tcg_gen_ext32s_tl(arg
, arg
);
5552 goto cp0_unimplemented
;
5556 goto cp0_unimplemented
;
5558 (void)rn
; /* avoid a compiler warning */
5559 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5563 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5564 gen_mfc0_unimplemented(ctx
, arg
);
5567 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5569 const char *rn
= "invalid";
5572 check_insn(ctx
, ISA_MIPS32
);
5574 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5582 gen_helper_mtc0_index(cpu_env
, arg
);
5586 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5587 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5596 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5601 goto cp0_unimplemented
;
5611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5612 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5617 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5621 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5622 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5627 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5632 tcg_gen_st_tl(arg
, cpu_env
,
5633 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5638 tcg_gen_st_tl(arg
, cpu_env
,
5639 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5640 rn
= "VPEScheFBack";
5643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5644 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5648 goto cp0_unimplemented
;
5654 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5658 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5659 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5664 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5668 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5669 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5674 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5679 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5684 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5689 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5693 goto cp0_unimplemented
;
5699 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5703 goto cp0_unimplemented
;
5709 gen_helper_mtc0_context(cpu_env
, arg
);
5713 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5714 rn
= "ContextConfig";
5715 goto cp0_unimplemented
;
5718 CP0_CHECK(ctx
->ulri
);
5719 tcg_gen_st_tl(arg
, cpu_env
,
5720 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5724 goto cp0_unimplemented
;
5730 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5734 check_insn(ctx
, ISA_MIPS32R2
);
5735 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5737 ctx
->bstate
= BS_STOP
;
5740 goto cp0_unimplemented
;
5746 gen_helper_mtc0_wired(cpu_env
, arg
);
5750 check_insn(ctx
, ISA_MIPS32R2
);
5751 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5755 check_insn(ctx
, ISA_MIPS32R2
);
5756 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5760 check_insn(ctx
, ISA_MIPS32R2
);
5761 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5765 check_insn(ctx
, ISA_MIPS32R2
);
5766 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5770 check_insn(ctx
, ISA_MIPS32R2
);
5771 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5775 goto cp0_unimplemented
;
5781 check_insn(ctx
, ISA_MIPS32R2
);
5782 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5783 ctx
->bstate
= BS_STOP
;
5787 goto cp0_unimplemented
;
5805 goto cp0_unimplemented
;
5811 gen_helper_mtc0_count(cpu_env
, arg
);
5814 /* 6,7 are implementation dependent */
5816 goto cp0_unimplemented
;
5822 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5826 goto cp0_unimplemented
;
5832 gen_helper_mtc0_compare(cpu_env
, arg
);
5835 /* 6,7 are implementation dependent */
5837 goto cp0_unimplemented
;
5843 save_cpu_state(ctx
, 1);
5844 gen_helper_mtc0_status(cpu_env
, arg
);
5845 /* BS_STOP isn't good enough here, hflags may have changed. */
5846 gen_save_pc(ctx
->pc
+ 4);
5847 ctx
->bstate
= BS_EXCP
;
5851 check_insn(ctx
, ISA_MIPS32R2
);
5852 gen_helper_mtc0_intctl(cpu_env
, arg
);
5853 /* Stop translation as we may have switched the execution mode */
5854 ctx
->bstate
= BS_STOP
;
5858 check_insn(ctx
, ISA_MIPS32R2
);
5859 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5860 /* Stop translation as we may have switched the execution mode */
5861 ctx
->bstate
= BS_STOP
;
5865 check_insn(ctx
, ISA_MIPS32R2
);
5866 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5867 /* Stop translation as we may have switched the execution mode */
5868 ctx
->bstate
= BS_STOP
;
5872 goto cp0_unimplemented
;
5878 save_cpu_state(ctx
, 1);
5879 gen_helper_mtc0_cause(cpu_env
, arg
);
5883 goto cp0_unimplemented
;
5889 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5893 goto cp0_unimplemented
;
5903 check_insn(ctx
, ISA_MIPS32R2
);
5904 gen_helper_mtc0_ebase(cpu_env
, arg
);
5908 goto cp0_unimplemented
;
5914 gen_helper_mtc0_config0(cpu_env
, arg
);
5916 /* Stop translation as we may have switched the execution mode */
5917 ctx
->bstate
= BS_STOP
;
5920 /* ignored, read only */
5924 gen_helper_mtc0_config2(cpu_env
, arg
);
5926 /* Stop translation as we may have switched the execution mode */
5927 ctx
->bstate
= BS_STOP
;
5930 gen_helper_mtc0_config3(cpu_env
, arg
);
5932 /* Stop translation as we may have switched the execution mode */
5933 ctx
->bstate
= BS_STOP
;
5936 gen_helper_mtc0_config4(cpu_env
, arg
);
5938 ctx
->bstate
= BS_STOP
;
5941 gen_helper_mtc0_config5(cpu_env
, arg
);
5943 /* Stop translation as we may have switched the execution mode */
5944 ctx
->bstate
= BS_STOP
;
5946 /* 6,7 are implementation dependent */
5956 rn
= "Invalid config selector";
5957 goto cp0_unimplemented
;
5963 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5967 goto cp0_unimplemented
;
5973 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5977 goto cp0_unimplemented
;
5983 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5987 goto cp0_unimplemented
;
5993 #if defined(TARGET_MIPS64)
5994 check_insn(ctx
, ISA_MIPS3
);
5995 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6000 goto cp0_unimplemented
;
6004 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6005 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6008 gen_helper_mtc0_framemask(cpu_env
, arg
);
6012 goto cp0_unimplemented
;
6017 rn
= "Diagnostic"; /* implementation dependent */
6022 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6023 /* BS_STOP isn't good enough here, hflags may have changed. */
6024 gen_save_pc(ctx
->pc
+ 4);
6025 ctx
->bstate
= BS_EXCP
;
6029 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6030 rn
= "TraceControl";
6031 /* Stop translation as we may have switched the execution mode */
6032 ctx
->bstate
= BS_STOP
;
6035 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6036 rn
= "TraceControl2";
6037 /* Stop translation as we may have switched the execution mode */
6038 ctx
->bstate
= BS_STOP
;
6041 /* Stop translation as we may have switched the execution mode */
6042 ctx
->bstate
= BS_STOP
;
6043 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6044 rn
= "UserTraceData";
6045 /* Stop translation as we may have switched the execution mode */
6046 ctx
->bstate
= BS_STOP
;
6049 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6050 /* Stop translation as we may have switched the execution mode */
6051 ctx
->bstate
= BS_STOP
;
6055 goto cp0_unimplemented
;
6062 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6066 goto cp0_unimplemented
;
6072 gen_helper_mtc0_performance0(cpu_env
, arg
);
6073 rn
= "Performance0";
6076 // gen_helper_mtc0_performance1(arg);
6077 rn
= "Performance1";
6080 // gen_helper_mtc0_performance2(arg);
6081 rn
= "Performance2";
6084 // gen_helper_mtc0_performance3(arg);
6085 rn
= "Performance3";
6088 // gen_helper_mtc0_performance4(arg);
6089 rn
= "Performance4";
6092 // gen_helper_mtc0_performance5(arg);
6093 rn
= "Performance5";
6096 // gen_helper_mtc0_performance6(arg);
6097 rn
= "Performance6";
6100 // gen_helper_mtc0_performance7(arg);
6101 rn
= "Performance7";
6104 goto cp0_unimplemented
;
6118 goto cp0_unimplemented
;
6127 gen_helper_mtc0_taglo(cpu_env
, arg
);
6134 gen_helper_mtc0_datalo(cpu_env
, arg
);
6138 goto cp0_unimplemented
;
6147 gen_helper_mtc0_taghi(cpu_env
, arg
);
6154 gen_helper_mtc0_datahi(cpu_env
, arg
);
6159 goto cp0_unimplemented
;
6165 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6169 goto cp0_unimplemented
;
6176 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6180 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6181 tcg_gen_st_tl(arg
, cpu_env
,
6182 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6186 goto cp0_unimplemented
;
6188 /* Stop translation as we may have switched the execution mode */
6189 ctx
->bstate
= BS_STOP
;
6192 goto cp0_unimplemented
;
6194 (void)rn
; /* avoid a compiler warning */
6195 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6196 /* For simplicity assume that all writes can cause interrupts. */
6197 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6199 ctx
->bstate
= BS_STOP
;
6204 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6207 #if defined(TARGET_MIPS64)
6208 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6210 const char *rn
= "invalid";
6213 check_insn(ctx
, ISA_MIPS64
);
6219 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6223 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6224 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6228 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6229 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6233 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6234 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6238 goto cp0_unimplemented
;
6244 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6245 gen_helper_mfc0_random(arg
, cpu_env
);
6249 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6250 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6255 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6265 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6269 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6270 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6274 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6275 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6276 rn
= "VPEScheFBack";
6279 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6284 goto cp0_unimplemented
;
6290 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6294 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6295 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6299 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6300 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6304 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6305 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6309 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6310 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6314 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6315 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6319 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6320 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6324 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6325 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6329 goto cp0_unimplemented
;
6335 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6339 goto cp0_unimplemented
;
6345 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6349 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6350 rn
= "ContextConfig";
6351 goto cp0_unimplemented
;
6354 CP0_CHECK(ctx
->ulri
);
6355 tcg_gen_ld_tl(arg
, cpu_env
,
6356 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6360 goto cp0_unimplemented
;
6366 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6370 check_insn(ctx
, ISA_MIPS32R2
);
6371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6375 goto cp0_unimplemented
;
6381 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6385 check_insn(ctx
, ISA_MIPS32R2
);
6386 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6390 check_insn(ctx
, ISA_MIPS32R2
);
6391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6395 check_insn(ctx
, ISA_MIPS32R2
);
6396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6400 check_insn(ctx
, ISA_MIPS32R2
);
6401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6405 check_insn(ctx
, ISA_MIPS32R2
);
6406 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6410 goto cp0_unimplemented
;
6416 check_insn(ctx
, ISA_MIPS32R2
);
6417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6421 goto cp0_unimplemented
;
6427 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6441 goto cp0_unimplemented
;
6447 /* Mark as an IO operation because we read the time. */
6448 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6451 gen_helper_mfc0_count(arg
, cpu_env
);
6452 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6455 /* Break the TB to be able to take timer interrupts immediately
6456 after reading count. */
6457 ctx
->bstate
= BS_STOP
;
6460 /* 6,7 are implementation dependent */
6462 goto cp0_unimplemented
;
6468 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6472 goto cp0_unimplemented
;
6478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6481 /* 6,7 are implementation dependent */
6483 goto cp0_unimplemented
;
6489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6493 check_insn(ctx
, ISA_MIPS32R2
);
6494 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6498 check_insn(ctx
, ISA_MIPS32R2
);
6499 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6503 check_insn(ctx
, ISA_MIPS32R2
);
6504 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6508 goto cp0_unimplemented
;
6514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6518 goto cp0_unimplemented
;
6524 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6528 goto cp0_unimplemented
;
6534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6538 check_insn(ctx
, ISA_MIPS32R2
);
6539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6543 goto cp0_unimplemented
;
6549 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6561 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6569 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6572 /* 6,7 are implementation dependent */
6574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6578 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6582 goto cp0_unimplemented
;
6588 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6592 goto cp0_unimplemented
;
6598 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6602 goto cp0_unimplemented
;
6608 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6612 goto cp0_unimplemented
;
6618 check_insn(ctx
, ISA_MIPS3
);
6619 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6623 goto cp0_unimplemented
;
6627 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6628 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6631 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6635 goto cp0_unimplemented
;
6639 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6640 rn
= "'Diagnostic"; /* implementation dependent */
6645 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6649 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6650 rn
= "TraceControl";
6653 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6654 rn
= "TraceControl2";
6657 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6658 rn
= "UserTraceData";
6661 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6665 goto cp0_unimplemented
;
6672 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6676 goto cp0_unimplemented
;
6682 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6683 rn
= "Performance0";
6686 // gen_helper_dmfc0_performance1(arg);
6687 rn
= "Performance1";
6690 // gen_helper_dmfc0_performance2(arg);
6691 rn
= "Performance2";
6694 // gen_helper_dmfc0_performance3(arg);
6695 rn
= "Performance3";
6698 // gen_helper_dmfc0_performance4(arg);
6699 rn
= "Performance4";
6702 // gen_helper_dmfc0_performance5(arg);
6703 rn
= "Performance5";
6706 // gen_helper_dmfc0_performance6(arg);
6707 rn
= "Performance6";
6710 // gen_helper_dmfc0_performance7(arg);
6711 rn
= "Performance7";
6714 goto cp0_unimplemented
;
6718 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6725 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6729 goto cp0_unimplemented
;
6738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6745 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6749 goto cp0_unimplemented
;
6758 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6765 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6769 goto cp0_unimplemented
;
6775 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6779 goto cp0_unimplemented
;
6786 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6790 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6791 tcg_gen_ld_tl(arg
, cpu_env
,
6792 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6796 goto cp0_unimplemented
;
6800 goto cp0_unimplemented
;
6802 (void)rn
; /* avoid a compiler warning */
6803 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6807 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6808 gen_mfc0_unimplemented(ctx
, arg
);
6811 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6813 const char *rn
= "invalid";
6816 check_insn(ctx
, ISA_MIPS64
);
6818 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6826 gen_helper_mtc0_index(cpu_env
, arg
);
6830 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6831 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6835 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6840 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6845 goto cp0_unimplemented
;
6855 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6856 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6860 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6861 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6865 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6866 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6870 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6871 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6875 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6876 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6880 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6881 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6882 rn
= "VPEScheFBack";
6885 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6886 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6890 goto cp0_unimplemented
;
6896 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6900 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6901 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6905 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6906 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6910 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6911 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6915 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6916 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6920 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6921 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6925 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6926 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6930 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6931 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6935 goto cp0_unimplemented
;
6941 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6945 goto cp0_unimplemented
;
6951 gen_helper_mtc0_context(cpu_env
, arg
);
6955 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6956 rn
= "ContextConfig";
6957 goto cp0_unimplemented
;
6960 CP0_CHECK(ctx
->ulri
);
6961 tcg_gen_st_tl(arg
, cpu_env
,
6962 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6966 goto cp0_unimplemented
;
6972 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6976 check_insn(ctx
, ISA_MIPS32R2
);
6977 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6981 goto cp0_unimplemented
;
6987 gen_helper_mtc0_wired(cpu_env
, arg
);
6991 check_insn(ctx
, ISA_MIPS32R2
);
6992 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6996 check_insn(ctx
, ISA_MIPS32R2
);
6997 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7001 check_insn(ctx
, ISA_MIPS32R2
);
7002 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7006 check_insn(ctx
, ISA_MIPS32R2
);
7007 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7011 check_insn(ctx
, ISA_MIPS32R2
);
7012 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7016 goto cp0_unimplemented
;
7022 check_insn(ctx
, ISA_MIPS32R2
);
7023 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7024 ctx
->bstate
= BS_STOP
;
7028 goto cp0_unimplemented
;
7046 goto cp0_unimplemented
;
7052 gen_helper_mtc0_count(cpu_env
, arg
);
7055 /* 6,7 are implementation dependent */
7057 goto cp0_unimplemented
;
7059 /* Stop translation as we may have switched the execution mode */
7060 ctx
->bstate
= BS_STOP
;
7065 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7069 goto cp0_unimplemented
;
7075 gen_helper_mtc0_compare(cpu_env
, arg
);
7078 /* 6,7 are implementation dependent */
7080 goto cp0_unimplemented
;
7082 /* Stop translation as we may have switched the execution mode */
7083 ctx
->bstate
= BS_STOP
;
7088 save_cpu_state(ctx
, 1);
7089 gen_helper_mtc0_status(cpu_env
, arg
);
7090 /* BS_STOP isn't good enough here, hflags may have changed. */
7091 gen_save_pc(ctx
->pc
+ 4);
7092 ctx
->bstate
= BS_EXCP
;
7096 check_insn(ctx
, ISA_MIPS32R2
);
7097 gen_helper_mtc0_intctl(cpu_env
, arg
);
7098 /* Stop translation as we may have switched the execution mode */
7099 ctx
->bstate
= BS_STOP
;
7103 check_insn(ctx
, ISA_MIPS32R2
);
7104 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7105 /* Stop translation as we may have switched the execution mode */
7106 ctx
->bstate
= BS_STOP
;
7110 check_insn(ctx
, ISA_MIPS32R2
);
7111 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7112 /* Stop translation as we may have switched the execution mode */
7113 ctx
->bstate
= BS_STOP
;
7117 goto cp0_unimplemented
;
7123 save_cpu_state(ctx
, 1);
7124 /* Mark as an IO operation because we may trigger a software
7126 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7129 gen_helper_mtc0_cause(cpu_env
, arg
);
7130 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7133 /* Stop translation as we may have triggered an intetrupt */
7134 ctx
->bstate
= BS_STOP
;
7138 goto cp0_unimplemented
;
7144 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7148 goto cp0_unimplemented
;
7158 check_insn(ctx
, ISA_MIPS32R2
);
7159 gen_helper_mtc0_ebase(cpu_env
, arg
);
7163 goto cp0_unimplemented
;
7169 gen_helper_mtc0_config0(cpu_env
, arg
);
7171 /* Stop translation as we may have switched the execution mode */
7172 ctx
->bstate
= BS_STOP
;
7175 /* ignored, read only */
7179 gen_helper_mtc0_config2(cpu_env
, arg
);
7181 /* Stop translation as we may have switched the execution mode */
7182 ctx
->bstate
= BS_STOP
;
7185 gen_helper_mtc0_config3(cpu_env
, arg
);
7187 /* Stop translation as we may have switched the execution mode */
7188 ctx
->bstate
= BS_STOP
;
7191 /* currently ignored */
7195 gen_helper_mtc0_config5(cpu_env
, arg
);
7197 /* Stop translation as we may have switched the execution mode */
7198 ctx
->bstate
= BS_STOP
;
7200 /* 6,7 are implementation dependent */
7202 rn
= "Invalid config selector";
7203 goto cp0_unimplemented
;
7209 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7213 goto cp0_unimplemented
;
7219 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7223 goto cp0_unimplemented
;
7229 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7233 goto cp0_unimplemented
;
7239 check_insn(ctx
, ISA_MIPS3
);
7240 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7244 goto cp0_unimplemented
;
7248 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7249 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7252 gen_helper_mtc0_framemask(cpu_env
, arg
);
7256 goto cp0_unimplemented
;
7261 rn
= "Diagnostic"; /* implementation dependent */
7266 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7267 /* BS_STOP isn't good enough here, hflags may have changed. */
7268 gen_save_pc(ctx
->pc
+ 4);
7269 ctx
->bstate
= BS_EXCP
;
7273 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7274 /* Stop translation as we may have switched the execution mode */
7275 ctx
->bstate
= BS_STOP
;
7276 rn
= "TraceControl";
7279 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7280 /* Stop translation as we may have switched the execution mode */
7281 ctx
->bstate
= BS_STOP
;
7282 rn
= "TraceControl2";
7285 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7286 /* Stop translation as we may have switched the execution mode */
7287 ctx
->bstate
= BS_STOP
;
7288 rn
= "UserTraceData";
7291 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7292 /* Stop translation as we may have switched the execution mode */
7293 ctx
->bstate
= BS_STOP
;
7297 goto cp0_unimplemented
;
7304 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7308 goto cp0_unimplemented
;
7314 gen_helper_mtc0_performance0(cpu_env
, arg
);
7315 rn
= "Performance0";
7318 // gen_helper_mtc0_performance1(cpu_env, arg);
7319 rn
= "Performance1";
7322 // gen_helper_mtc0_performance2(cpu_env, arg);
7323 rn
= "Performance2";
7326 // gen_helper_mtc0_performance3(cpu_env, arg);
7327 rn
= "Performance3";
7330 // gen_helper_mtc0_performance4(cpu_env, arg);
7331 rn
= "Performance4";
7334 // gen_helper_mtc0_performance5(cpu_env, arg);
7335 rn
= "Performance5";
7338 // gen_helper_mtc0_performance6(cpu_env, arg);
7339 rn
= "Performance6";
7342 // gen_helper_mtc0_performance7(cpu_env, arg);
7343 rn
= "Performance7";
7346 goto cp0_unimplemented
;
7360 goto cp0_unimplemented
;
7369 gen_helper_mtc0_taglo(cpu_env
, arg
);
7376 gen_helper_mtc0_datalo(cpu_env
, arg
);
7380 goto cp0_unimplemented
;
7389 gen_helper_mtc0_taghi(cpu_env
, arg
);
7396 gen_helper_mtc0_datahi(cpu_env
, arg
);
7401 goto cp0_unimplemented
;
7407 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7411 goto cp0_unimplemented
;
7418 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7422 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7423 tcg_gen_st_tl(arg
, cpu_env
,
7424 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7428 goto cp0_unimplemented
;
7430 /* Stop translation as we may have switched the execution mode */
7431 ctx
->bstate
= BS_STOP
;
7434 goto cp0_unimplemented
;
7436 (void)rn
; /* avoid a compiler warning */
7437 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7438 /* For simplicity assume that all writes can cause interrupts. */
7439 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7441 ctx
->bstate
= BS_STOP
;
7446 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7448 #endif /* TARGET_MIPS64 */
7450 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7451 int u
, int sel
, int h
)
7453 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7454 TCGv t0
= tcg_temp_local_new();
7456 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7457 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7458 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7459 tcg_gen_movi_tl(t0
, -1);
7460 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7461 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7462 tcg_gen_movi_tl(t0
, -1);
7468 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7471 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7481 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7484 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7487 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7490 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7493 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7496 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7499 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7502 gen_mfc0(ctx
, t0
, rt
, sel
);
7509 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7512 gen_mfc0(ctx
, t0
, rt
, sel
);
7518 gen_helper_mftc0_status(t0
, cpu_env
);
7521 gen_mfc0(ctx
, t0
, rt
, sel
);
7527 gen_helper_mftc0_cause(t0
, cpu_env
);
7537 gen_helper_mftc0_epc(t0
, cpu_env
);
7547 gen_helper_mftc0_ebase(t0
, cpu_env
);
7557 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7567 gen_helper_mftc0_debug(t0
, cpu_env
);
7570 gen_mfc0(ctx
, t0
, rt
, sel
);
7575 gen_mfc0(ctx
, t0
, rt
, sel
);
7577 } else switch (sel
) {
7578 /* GPR registers. */
7580 gen_helper_1e0i(mftgpr
, t0
, rt
);
7582 /* Auxiliary CPU registers */
7586 gen_helper_1e0i(mftlo
, t0
, 0);
7589 gen_helper_1e0i(mfthi
, t0
, 0);
7592 gen_helper_1e0i(mftacx
, t0
, 0);
7595 gen_helper_1e0i(mftlo
, t0
, 1);
7598 gen_helper_1e0i(mfthi
, t0
, 1);
7601 gen_helper_1e0i(mftacx
, t0
, 1);
7604 gen_helper_1e0i(mftlo
, t0
, 2);
7607 gen_helper_1e0i(mfthi
, t0
, 2);
7610 gen_helper_1e0i(mftacx
, t0
, 2);
7613 gen_helper_1e0i(mftlo
, t0
, 3);
7616 gen_helper_1e0i(mfthi
, t0
, 3);
7619 gen_helper_1e0i(mftacx
, t0
, 3);
7622 gen_helper_mftdsp(t0
, cpu_env
);
7628 /* Floating point (COP1). */
7630 /* XXX: For now we support only a single FPU context. */
7632 TCGv_i32 fp0
= tcg_temp_new_i32();
7634 gen_load_fpr32(ctx
, fp0
, rt
);
7635 tcg_gen_ext_i32_tl(t0
, fp0
);
7636 tcg_temp_free_i32(fp0
);
7638 TCGv_i32 fp0
= tcg_temp_new_i32();
7640 gen_load_fpr32h(ctx
, fp0
, rt
);
7641 tcg_gen_ext_i32_tl(t0
, fp0
);
7642 tcg_temp_free_i32(fp0
);
7646 /* XXX: For now we support only a single FPU context. */
7647 gen_helper_1e0i(cfc1
, t0
, rt
);
7649 /* COP2: Not implemented. */
7656 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7657 gen_store_gpr(t0
, rd
);
7663 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7664 generate_exception_end(ctx
, EXCP_RI
);
7667 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7668 int u
, int sel
, int h
)
7670 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7671 TCGv t0
= tcg_temp_local_new();
7673 gen_load_gpr(t0
, rt
);
7674 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7675 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7676 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7678 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7679 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7686 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7689 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7699 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7702 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7705 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7708 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7711 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7714 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7717 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7720 gen_mtc0(ctx
, t0
, rd
, sel
);
7727 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7730 gen_mtc0(ctx
, t0
, rd
, sel
);
7736 gen_helper_mttc0_status(cpu_env
, t0
);
7739 gen_mtc0(ctx
, t0
, rd
, sel
);
7745 gen_helper_mttc0_cause(cpu_env
, t0
);
7755 gen_helper_mttc0_ebase(cpu_env
, t0
);
7765 gen_helper_mttc0_debug(cpu_env
, t0
);
7768 gen_mtc0(ctx
, t0
, rd
, sel
);
7773 gen_mtc0(ctx
, t0
, rd
, sel
);
7775 } else switch (sel
) {
7776 /* GPR registers. */
7778 gen_helper_0e1i(mttgpr
, t0
, rd
);
7780 /* Auxiliary CPU registers */
7784 gen_helper_0e1i(mttlo
, t0
, 0);
7787 gen_helper_0e1i(mtthi
, t0
, 0);
7790 gen_helper_0e1i(mttacx
, t0
, 0);
7793 gen_helper_0e1i(mttlo
, t0
, 1);
7796 gen_helper_0e1i(mtthi
, t0
, 1);
7799 gen_helper_0e1i(mttacx
, t0
, 1);
7802 gen_helper_0e1i(mttlo
, t0
, 2);
7805 gen_helper_0e1i(mtthi
, t0
, 2);
7808 gen_helper_0e1i(mttacx
, t0
, 2);
7811 gen_helper_0e1i(mttlo
, t0
, 3);
7814 gen_helper_0e1i(mtthi
, t0
, 3);
7817 gen_helper_0e1i(mttacx
, t0
, 3);
7820 gen_helper_mttdsp(cpu_env
, t0
);
7826 /* Floating point (COP1). */
7828 /* XXX: For now we support only a single FPU context. */
7830 TCGv_i32 fp0
= tcg_temp_new_i32();
7832 tcg_gen_trunc_tl_i32(fp0
, t0
);
7833 gen_store_fpr32(ctx
, fp0
, rd
);
7834 tcg_temp_free_i32(fp0
);
7836 TCGv_i32 fp0
= tcg_temp_new_i32();
7838 tcg_gen_trunc_tl_i32(fp0
, t0
);
7839 gen_store_fpr32h(ctx
, fp0
, rd
);
7840 tcg_temp_free_i32(fp0
);
7844 /* XXX: For now we support only a single FPU context. */
7846 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7848 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7849 tcg_temp_free_i32(fs_tmp
);
7851 /* Stop translation as we may have changed hflags */
7852 ctx
->bstate
= BS_STOP
;
7854 /* COP2: Not implemented. */
7861 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7867 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7868 generate_exception_end(ctx
, EXCP_RI
);
7871 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7873 const char *opn
= "ldst";
7875 check_cp0_enabled(ctx
);
7882 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7887 TCGv t0
= tcg_temp_new();
7889 gen_load_gpr(t0
, rt
);
7890 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7895 #if defined(TARGET_MIPS64)
7897 check_insn(ctx
, ISA_MIPS3
);
7902 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7906 check_insn(ctx
, ISA_MIPS3
);
7908 TCGv t0
= tcg_temp_new();
7910 gen_load_gpr(t0
, rt
);
7911 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7923 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7929 TCGv t0
= tcg_temp_new();
7930 gen_load_gpr(t0
, rt
);
7931 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7937 check_insn(ctx
, ASE_MT
);
7942 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7943 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7947 check_insn(ctx
, ASE_MT
);
7948 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7949 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7954 if (!env
->tlb
->helper_tlbwi
)
7956 gen_helper_tlbwi(cpu_env
);
7961 if (!env
->tlb
->helper_tlbinv
) {
7964 gen_helper_tlbinv(cpu_env
);
7965 } /* treat as nop if TLBINV not supported */
7970 if (!env
->tlb
->helper_tlbinvf
) {
7973 gen_helper_tlbinvf(cpu_env
);
7974 } /* treat as nop if TLBINV not supported */
7978 if (!env
->tlb
->helper_tlbwr
)
7980 gen_helper_tlbwr(cpu_env
);
7984 if (!env
->tlb
->helper_tlbp
)
7986 gen_helper_tlbp(cpu_env
);
7990 if (!env
->tlb
->helper_tlbr
)
7992 gen_helper_tlbr(cpu_env
);
7994 case OPC_ERET
: /* OPC_ERETNC */
7995 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7996 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7999 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8000 if (ctx
->opcode
& (1 << bit_shift
)) {
8003 check_insn(ctx
, ISA_MIPS32R5
);
8004 gen_helper_eretnc(cpu_env
);
8008 check_insn(ctx
, ISA_MIPS2
);
8009 gen_helper_eret(cpu_env
);
8011 ctx
->bstate
= BS_EXCP
;
8016 check_insn(ctx
, ISA_MIPS32
);
8017 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8018 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8021 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8023 generate_exception_end(ctx
, EXCP_RI
);
8025 gen_helper_deret(cpu_env
);
8026 ctx
->bstate
= BS_EXCP
;
8031 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8032 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8033 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8036 /* If we get an exception, we want to restart at next instruction */
8038 save_cpu_state(ctx
, 1);
8040 gen_helper_wait(cpu_env
);
8041 ctx
->bstate
= BS_EXCP
;
8046 generate_exception_end(ctx
, EXCP_RI
);
8049 (void)opn
; /* avoid a compiler warning */
8051 #endif /* !CONFIG_USER_ONLY */
8053 /* CP1 Branches (before delay slot) */
8054 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8055 int32_t cc
, int32_t offset
)
8057 target_ulong btarget
;
8058 TCGv_i32 t0
= tcg_temp_new_i32();
8060 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8061 generate_exception_end(ctx
, EXCP_RI
);
8066 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8068 btarget
= ctx
->pc
+ 4 + offset
;
8072 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8073 tcg_gen_not_i32(t0
, t0
);
8074 tcg_gen_andi_i32(t0
, t0
, 1);
8075 tcg_gen_extu_i32_tl(bcond
, t0
);
8078 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8079 tcg_gen_not_i32(t0
, t0
);
8080 tcg_gen_andi_i32(t0
, t0
, 1);
8081 tcg_gen_extu_i32_tl(bcond
, t0
);
8084 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8085 tcg_gen_andi_i32(t0
, t0
, 1);
8086 tcg_gen_extu_i32_tl(bcond
, t0
);
8089 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8090 tcg_gen_andi_i32(t0
, t0
, 1);
8091 tcg_gen_extu_i32_tl(bcond
, t0
);
8093 ctx
->hflags
|= MIPS_HFLAG_BL
;
8097 TCGv_i32 t1
= tcg_temp_new_i32();
8098 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8099 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8100 tcg_gen_nand_i32(t0
, t0
, t1
);
8101 tcg_temp_free_i32(t1
);
8102 tcg_gen_andi_i32(t0
, t0
, 1);
8103 tcg_gen_extu_i32_tl(bcond
, t0
);
8108 TCGv_i32 t1
= tcg_temp_new_i32();
8109 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8110 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8111 tcg_gen_or_i32(t0
, t0
, t1
);
8112 tcg_temp_free_i32(t1
);
8113 tcg_gen_andi_i32(t0
, t0
, 1);
8114 tcg_gen_extu_i32_tl(bcond
, t0
);
8119 TCGv_i32 t1
= tcg_temp_new_i32();
8120 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8121 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8122 tcg_gen_and_i32(t0
, t0
, t1
);
8123 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8124 tcg_gen_and_i32(t0
, t0
, t1
);
8125 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8126 tcg_gen_nand_i32(t0
, t0
, t1
);
8127 tcg_temp_free_i32(t1
);
8128 tcg_gen_andi_i32(t0
, t0
, 1);
8129 tcg_gen_extu_i32_tl(bcond
, t0
);
8134 TCGv_i32 t1
= tcg_temp_new_i32();
8135 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8136 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8137 tcg_gen_or_i32(t0
, t0
, t1
);
8138 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8139 tcg_gen_or_i32(t0
, t0
, t1
);
8140 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8141 tcg_gen_or_i32(t0
, t0
, t1
);
8142 tcg_temp_free_i32(t1
);
8143 tcg_gen_andi_i32(t0
, t0
, 1);
8144 tcg_gen_extu_i32_tl(bcond
, t0
);
8147 ctx
->hflags
|= MIPS_HFLAG_BC
;
8150 MIPS_INVAL("cp1 cond branch");
8151 generate_exception_end(ctx
, EXCP_RI
);
8154 ctx
->btarget
= btarget
;
8155 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8157 tcg_temp_free_i32(t0
);
8160 /* R6 CP1 Branches */
8161 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8162 int32_t ft
, int32_t offset
,
8165 target_ulong btarget
;
8166 TCGv_i64 t0
= tcg_temp_new_i64();
8168 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8169 #ifdef MIPS_DEBUG_DISAS
8170 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8173 generate_exception_end(ctx
, EXCP_RI
);
8177 gen_load_fpr64(ctx
, t0
, ft
);
8178 tcg_gen_andi_i64(t0
, t0
, 1);
8180 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8184 tcg_gen_xori_i64(t0
, t0
, 1);
8185 ctx
->hflags
|= MIPS_HFLAG_BC
;
8188 /* t0 already set */
8189 ctx
->hflags
|= MIPS_HFLAG_BC
;
8192 MIPS_INVAL("cp1 cond branch");
8193 generate_exception_end(ctx
, EXCP_RI
);
8197 tcg_gen_trunc_i64_tl(bcond
, t0
);
8199 ctx
->btarget
= btarget
;
8201 switch (delayslot_size
) {
8203 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8206 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8211 tcg_temp_free_i64(t0
);
8214 /* Coprocessor 1 (FPU) */
8216 #define FOP(func, fmt) (((fmt) << 21) | (func))
8219 OPC_ADD_S
= FOP(0, FMT_S
),
8220 OPC_SUB_S
= FOP(1, FMT_S
),
8221 OPC_MUL_S
= FOP(2, FMT_S
),
8222 OPC_DIV_S
= FOP(3, FMT_S
),
8223 OPC_SQRT_S
= FOP(4, FMT_S
),
8224 OPC_ABS_S
= FOP(5, FMT_S
),
8225 OPC_MOV_S
= FOP(6, FMT_S
),
8226 OPC_NEG_S
= FOP(7, FMT_S
),
8227 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8228 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8229 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8230 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8231 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8232 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8233 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8234 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8235 OPC_SEL_S
= FOP(16, FMT_S
),
8236 OPC_MOVCF_S
= FOP(17, FMT_S
),
8237 OPC_MOVZ_S
= FOP(18, FMT_S
),
8238 OPC_MOVN_S
= FOP(19, FMT_S
),
8239 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8240 OPC_RECIP_S
= FOP(21, FMT_S
),
8241 OPC_RSQRT_S
= FOP(22, FMT_S
),
8242 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8243 OPC_MADDF_S
= FOP(24, FMT_S
),
8244 OPC_MSUBF_S
= FOP(25, FMT_S
),
8245 OPC_RINT_S
= FOP(26, FMT_S
),
8246 OPC_CLASS_S
= FOP(27, FMT_S
),
8247 OPC_MIN_S
= FOP(28, FMT_S
),
8248 OPC_RECIP2_S
= FOP(28, FMT_S
),
8249 OPC_MINA_S
= FOP(29, FMT_S
),
8250 OPC_RECIP1_S
= FOP(29, FMT_S
),
8251 OPC_MAX_S
= FOP(30, FMT_S
),
8252 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8253 OPC_MAXA_S
= FOP(31, FMT_S
),
8254 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8255 OPC_CVT_D_S
= FOP(33, FMT_S
),
8256 OPC_CVT_W_S
= FOP(36, FMT_S
),
8257 OPC_CVT_L_S
= FOP(37, FMT_S
),
8258 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8259 OPC_CMP_F_S
= FOP (48, FMT_S
),
8260 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8261 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8262 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8263 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8264 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8265 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8266 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8267 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8268 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8269 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8270 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8271 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8272 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8273 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8274 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8276 OPC_ADD_D
= FOP(0, FMT_D
),
8277 OPC_SUB_D
= FOP(1, FMT_D
),
8278 OPC_MUL_D
= FOP(2, FMT_D
),
8279 OPC_DIV_D
= FOP(3, FMT_D
),
8280 OPC_SQRT_D
= FOP(4, FMT_D
),
8281 OPC_ABS_D
= FOP(5, FMT_D
),
8282 OPC_MOV_D
= FOP(6, FMT_D
),
8283 OPC_NEG_D
= FOP(7, FMT_D
),
8284 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8285 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8286 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8287 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8288 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8289 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8290 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8291 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8292 OPC_SEL_D
= FOP(16, FMT_D
),
8293 OPC_MOVCF_D
= FOP(17, FMT_D
),
8294 OPC_MOVZ_D
= FOP(18, FMT_D
),
8295 OPC_MOVN_D
= FOP(19, FMT_D
),
8296 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8297 OPC_RECIP_D
= FOP(21, FMT_D
),
8298 OPC_RSQRT_D
= FOP(22, FMT_D
),
8299 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8300 OPC_MADDF_D
= FOP(24, FMT_D
),
8301 OPC_MSUBF_D
= FOP(25, FMT_D
),
8302 OPC_RINT_D
= FOP(26, FMT_D
),
8303 OPC_CLASS_D
= FOP(27, FMT_D
),
8304 OPC_MIN_D
= FOP(28, FMT_D
),
8305 OPC_RECIP2_D
= FOP(28, FMT_D
),
8306 OPC_MINA_D
= FOP(29, FMT_D
),
8307 OPC_RECIP1_D
= FOP(29, FMT_D
),
8308 OPC_MAX_D
= FOP(30, FMT_D
),
8309 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8310 OPC_MAXA_D
= FOP(31, FMT_D
),
8311 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8312 OPC_CVT_S_D
= FOP(32, FMT_D
),
8313 OPC_CVT_W_D
= FOP(36, FMT_D
),
8314 OPC_CVT_L_D
= FOP(37, FMT_D
),
8315 OPC_CMP_F_D
= FOP (48, FMT_D
),
8316 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8317 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8318 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8319 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8320 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8321 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8322 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8323 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8324 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8325 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8326 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8327 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8328 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8329 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8330 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8332 OPC_CVT_S_W
= FOP(32, FMT_W
),
8333 OPC_CVT_D_W
= FOP(33, FMT_W
),
8334 OPC_CVT_S_L
= FOP(32, FMT_L
),
8335 OPC_CVT_D_L
= FOP(33, FMT_L
),
8336 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8338 OPC_ADD_PS
= FOP(0, FMT_PS
),
8339 OPC_SUB_PS
= FOP(1, FMT_PS
),
8340 OPC_MUL_PS
= FOP(2, FMT_PS
),
8341 OPC_DIV_PS
= FOP(3, FMT_PS
),
8342 OPC_ABS_PS
= FOP(5, FMT_PS
),
8343 OPC_MOV_PS
= FOP(6, FMT_PS
),
8344 OPC_NEG_PS
= FOP(7, FMT_PS
),
8345 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8346 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8347 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8348 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8349 OPC_MULR_PS
= FOP(26, FMT_PS
),
8350 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8351 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8352 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8353 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8355 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8356 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8357 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8358 OPC_PLL_PS
= FOP(44, FMT_PS
),
8359 OPC_PLU_PS
= FOP(45, FMT_PS
),
8360 OPC_PUL_PS
= FOP(46, FMT_PS
),
8361 OPC_PUU_PS
= FOP(47, FMT_PS
),
8362 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8363 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8364 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8365 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8366 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8367 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8368 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8369 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8370 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8371 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8372 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8373 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8374 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8375 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8376 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8377 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8381 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8382 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8383 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8384 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8385 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8386 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8387 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8388 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8389 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8390 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8391 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8392 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8393 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8394 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8395 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8396 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8397 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8398 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8399 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8400 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8401 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8402 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8404 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8405 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8406 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8407 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8408 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8409 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8410 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8411 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8412 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8413 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8414 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8415 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8416 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8417 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8418 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8419 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8420 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8421 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8422 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8423 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8424 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8425 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8427 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8429 TCGv t0
= tcg_temp_new();
8434 TCGv_i32 fp0
= tcg_temp_new_i32();
8436 gen_load_fpr32(ctx
, fp0
, fs
);
8437 tcg_gen_ext_i32_tl(t0
, fp0
);
8438 tcg_temp_free_i32(fp0
);
8440 gen_store_gpr(t0
, rt
);
8443 gen_load_gpr(t0
, rt
);
8445 TCGv_i32 fp0
= tcg_temp_new_i32();
8447 tcg_gen_trunc_tl_i32(fp0
, t0
);
8448 gen_store_fpr32(ctx
, fp0
, fs
);
8449 tcg_temp_free_i32(fp0
);
8453 gen_helper_1e0i(cfc1
, t0
, fs
);
8454 gen_store_gpr(t0
, rt
);
8457 gen_load_gpr(t0
, rt
);
8458 save_cpu_state(ctx
, 0);
8460 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8462 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8463 tcg_temp_free_i32(fs_tmp
);
8465 /* Stop translation as we may have changed hflags */
8466 ctx
->bstate
= BS_STOP
;
8468 #if defined(TARGET_MIPS64)
8470 gen_load_fpr64(ctx
, t0
, fs
);
8471 gen_store_gpr(t0
, rt
);
8474 gen_load_gpr(t0
, rt
);
8475 gen_store_fpr64(ctx
, t0
, fs
);
8480 TCGv_i32 fp0
= tcg_temp_new_i32();
8482 gen_load_fpr32h(ctx
, fp0
, fs
);
8483 tcg_gen_ext_i32_tl(t0
, fp0
);
8484 tcg_temp_free_i32(fp0
);
8486 gen_store_gpr(t0
, rt
);
8489 gen_load_gpr(t0
, rt
);
8491 TCGv_i32 fp0
= tcg_temp_new_i32();
8493 tcg_gen_trunc_tl_i32(fp0
, t0
);
8494 gen_store_fpr32h(ctx
, fp0
, fs
);
8495 tcg_temp_free_i32(fp0
);
8499 MIPS_INVAL("cp1 move");
8500 generate_exception_end(ctx
, EXCP_RI
);
8508 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8524 l1
= gen_new_label();
8525 t0
= tcg_temp_new_i32();
8526 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8527 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8528 tcg_temp_free_i32(t0
);
8530 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8532 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8537 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8541 TCGv_i32 t0
= tcg_temp_new_i32();
8542 TCGLabel
*l1
= gen_new_label();
8549 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8550 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8551 gen_load_fpr32(ctx
, t0
, fs
);
8552 gen_store_fpr32(ctx
, t0
, fd
);
8554 tcg_temp_free_i32(t0
);
8557 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8560 TCGv_i32 t0
= tcg_temp_new_i32();
8562 TCGLabel
*l1
= gen_new_label();
8569 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8570 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8571 tcg_temp_free_i32(t0
);
8572 fp0
= tcg_temp_new_i64();
8573 gen_load_fpr64(ctx
, fp0
, fs
);
8574 gen_store_fpr64(ctx
, fp0
, fd
);
8575 tcg_temp_free_i64(fp0
);
8579 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8583 TCGv_i32 t0
= tcg_temp_new_i32();
8584 TCGLabel
*l1
= gen_new_label();
8585 TCGLabel
*l2
= gen_new_label();
8592 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8593 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8594 gen_load_fpr32(ctx
, t0
, fs
);
8595 gen_store_fpr32(ctx
, t0
, fd
);
8598 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8599 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8600 gen_load_fpr32h(ctx
, t0
, fs
);
8601 gen_store_fpr32h(ctx
, t0
, fd
);
8602 tcg_temp_free_i32(t0
);
8606 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8609 TCGv_i32 t1
= tcg_const_i32(0);
8610 TCGv_i32 fp0
= tcg_temp_new_i32();
8611 TCGv_i32 fp1
= tcg_temp_new_i32();
8612 TCGv_i32 fp2
= tcg_temp_new_i32();
8613 gen_load_fpr32(ctx
, fp0
, fd
);
8614 gen_load_fpr32(ctx
, fp1
, ft
);
8615 gen_load_fpr32(ctx
, fp2
, fs
);
8619 tcg_gen_andi_i32(fp0
, fp0
, 1);
8620 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8623 tcg_gen_andi_i32(fp1
, fp1
, 1);
8624 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8627 tcg_gen_andi_i32(fp1
, fp1
, 1);
8628 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8631 MIPS_INVAL("gen_sel_s");
8632 generate_exception_end(ctx
, EXCP_RI
);
8636 gen_store_fpr32(ctx
, fp0
, fd
);
8637 tcg_temp_free_i32(fp2
);
8638 tcg_temp_free_i32(fp1
);
8639 tcg_temp_free_i32(fp0
);
8640 tcg_temp_free_i32(t1
);
8643 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8646 TCGv_i64 t1
= tcg_const_i64(0);
8647 TCGv_i64 fp0
= tcg_temp_new_i64();
8648 TCGv_i64 fp1
= tcg_temp_new_i64();
8649 TCGv_i64 fp2
= tcg_temp_new_i64();
8650 gen_load_fpr64(ctx
, fp0
, fd
);
8651 gen_load_fpr64(ctx
, fp1
, ft
);
8652 gen_load_fpr64(ctx
, fp2
, fs
);
8656 tcg_gen_andi_i64(fp0
, fp0
, 1);
8657 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8660 tcg_gen_andi_i64(fp1
, fp1
, 1);
8661 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8664 tcg_gen_andi_i64(fp1
, fp1
, 1);
8665 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8668 MIPS_INVAL("gen_sel_d");
8669 generate_exception_end(ctx
, EXCP_RI
);
8673 gen_store_fpr64(ctx
, fp0
, fd
);
8674 tcg_temp_free_i64(fp2
);
8675 tcg_temp_free_i64(fp1
);
8676 tcg_temp_free_i64(fp0
);
8677 tcg_temp_free_i64(t1
);
8680 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8681 int ft
, int fs
, int fd
, int cc
)
8683 uint32_t func
= ctx
->opcode
& 0x3f;
8687 TCGv_i32 fp0
= tcg_temp_new_i32();
8688 TCGv_i32 fp1
= tcg_temp_new_i32();
8690 gen_load_fpr32(ctx
, fp0
, fs
);
8691 gen_load_fpr32(ctx
, fp1
, ft
);
8692 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8693 tcg_temp_free_i32(fp1
);
8694 gen_store_fpr32(ctx
, fp0
, fd
);
8695 tcg_temp_free_i32(fp0
);
8700 TCGv_i32 fp0
= tcg_temp_new_i32();
8701 TCGv_i32 fp1
= tcg_temp_new_i32();
8703 gen_load_fpr32(ctx
, fp0
, fs
);
8704 gen_load_fpr32(ctx
, fp1
, ft
);
8705 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8706 tcg_temp_free_i32(fp1
);
8707 gen_store_fpr32(ctx
, fp0
, fd
);
8708 tcg_temp_free_i32(fp0
);
8713 TCGv_i32 fp0
= tcg_temp_new_i32();
8714 TCGv_i32 fp1
= tcg_temp_new_i32();
8716 gen_load_fpr32(ctx
, fp0
, fs
);
8717 gen_load_fpr32(ctx
, fp1
, ft
);
8718 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8719 tcg_temp_free_i32(fp1
);
8720 gen_store_fpr32(ctx
, fp0
, fd
);
8721 tcg_temp_free_i32(fp0
);
8726 TCGv_i32 fp0
= tcg_temp_new_i32();
8727 TCGv_i32 fp1
= tcg_temp_new_i32();
8729 gen_load_fpr32(ctx
, fp0
, fs
);
8730 gen_load_fpr32(ctx
, fp1
, ft
);
8731 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8732 tcg_temp_free_i32(fp1
);
8733 gen_store_fpr32(ctx
, fp0
, fd
);
8734 tcg_temp_free_i32(fp0
);
8739 TCGv_i32 fp0
= tcg_temp_new_i32();
8741 gen_load_fpr32(ctx
, fp0
, fs
);
8742 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8743 gen_store_fpr32(ctx
, fp0
, fd
);
8744 tcg_temp_free_i32(fp0
);
8749 TCGv_i32 fp0
= tcg_temp_new_i32();
8751 gen_load_fpr32(ctx
, fp0
, fs
);
8752 gen_helper_float_abs_s(fp0
, fp0
);
8753 gen_store_fpr32(ctx
, fp0
, fd
);
8754 tcg_temp_free_i32(fp0
);
8759 TCGv_i32 fp0
= tcg_temp_new_i32();
8761 gen_load_fpr32(ctx
, fp0
, fs
);
8762 gen_store_fpr32(ctx
, fp0
, fd
);
8763 tcg_temp_free_i32(fp0
);
8768 TCGv_i32 fp0
= tcg_temp_new_i32();
8770 gen_load_fpr32(ctx
, fp0
, fs
);
8771 gen_helper_float_chs_s(fp0
, fp0
);
8772 gen_store_fpr32(ctx
, fp0
, fd
);
8773 tcg_temp_free_i32(fp0
);
8777 check_cp1_64bitmode(ctx
);
8779 TCGv_i32 fp32
= tcg_temp_new_i32();
8780 TCGv_i64 fp64
= tcg_temp_new_i64();
8782 gen_load_fpr32(ctx
, fp32
, fs
);
8783 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8784 tcg_temp_free_i32(fp32
);
8785 gen_store_fpr64(ctx
, fp64
, fd
);
8786 tcg_temp_free_i64(fp64
);
8790 check_cp1_64bitmode(ctx
);
8792 TCGv_i32 fp32
= tcg_temp_new_i32();
8793 TCGv_i64 fp64
= tcg_temp_new_i64();
8795 gen_load_fpr32(ctx
, fp32
, fs
);
8796 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8797 tcg_temp_free_i32(fp32
);
8798 gen_store_fpr64(ctx
, fp64
, fd
);
8799 tcg_temp_free_i64(fp64
);
8803 check_cp1_64bitmode(ctx
);
8805 TCGv_i32 fp32
= tcg_temp_new_i32();
8806 TCGv_i64 fp64
= tcg_temp_new_i64();
8808 gen_load_fpr32(ctx
, fp32
, fs
);
8809 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8810 tcg_temp_free_i32(fp32
);
8811 gen_store_fpr64(ctx
, fp64
, fd
);
8812 tcg_temp_free_i64(fp64
);
8816 check_cp1_64bitmode(ctx
);
8818 TCGv_i32 fp32
= tcg_temp_new_i32();
8819 TCGv_i64 fp64
= tcg_temp_new_i64();
8821 gen_load_fpr32(ctx
, fp32
, fs
);
8822 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8823 tcg_temp_free_i32(fp32
);
8824 gen_store_fpr64(ctx
, fp64
, fd
);
8825 tcg_temp_free_i64(fp64
);
8830 TCGv_i32 fp0
= tcg_temp_new_i32();
8832 gen_load_fpr32(ctx
, fp0
, fs
);
8833 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8834 gen_store_fpr32(ctx
, fp0
, fd
);
8835 tcg_temp_free_i32(fp0
);
8840 TCGv_i32 fp0
= tcg_temp_new_i32();
8842 gen_load_fpr32(ctx
, fp0
, fs
);
8843 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8844 gen_store_fpr32(ctx
, fp0
, fd
);
8845 tcg_temp_free_i32(fp0
);
8850 TCGv_i32 fp0
= tcg_temp_new_i32();
8852 gen_load_fpr32(ctx
, fp0
, fs
);
8853 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8854 gen_store_fpr32(ctx
, fp0
, fd
);
8855 tcg_temp_free_i32(fp0
);
8860 TCGv_i32 fp0
= tcg_temp_new_i32();
8862 gen_load_fpr32(ctx
, fp0
, fs
);
8863 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8864 gen_store_fpr32(ctx
, fp0
, fd
);
8865 tcg_temp_free_i32(fp0
);
8869 check_insn(ctx
, ISA_MIPS32R6
);
8870 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8873 check_insn(ctx
, ISA_MIPS32R6
);
8874 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8877 check_insn(ctx
, ISA_MIPS32R6
);
8878 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8881 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8882 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8885 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8887 TCGLabel
*l1
= gen_new_label();
8891 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8893 fp0
= tcg_temp_new_i32();
8894 gen_load_fpr32(ctx
, fp0
, fs
);
8895 gen_store_fpr32(ctx
, fp0
, fd
);
8896 tcg_temp_free_i32(fp0
);
8901 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8903 TCGLabel
*l1
= gen_new_label();
8907 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8908 fp0
= tcg_temp_new_i32();
8909 gen_load_fpr32(ctx
, fp0
, fs
);
8910 gen_store_fpr32(ctx
, fp0
, fd
);
8911 tcg_temp_free_i32(fp0
);
8918 TCGv_i32 fp0
= tcg_temp_new_i32();
8920 gen_load_fpr32(ctx
, fp0
, fs
);
8921 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8922 gen_store_fpr32(ctx
, fp0
, fd
);
8923 tcg_temp_free_i32(fp0
);
8928 TCGv_i32 fp0
= tcg_temp_new_i32();
8930 gen_load_fpr32(ctx
, fp0
, fs
);
8931 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8932 gen_store_fpr32(ctx
, fp0
, fd
);
8933 tcg_temp_free_i32(fp0
);
8937 check_insn(ctx
, ISA_MIPS32R6
);
8939 TCGv_i32 fp0
= tcg_temp_new_i32();
8940 TCGv_i32 fp1
= tcg_temp_new_i32();
8941 TCGv_i32 fp2
= tcg_temp_new_i32();
8942 gen_load_fpr32(ctx
, fp0
, fs
);
8943 gen_load_fpr32(ctx
, fp1
, ft
);
8944 gen_load_fpr32(ctx
, fp2
, fd
);
8945 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8946 gen_store_fpr32(ctx
, fp2
, fd
);
8947 tcg_temp_free_i32(fp2
);
8948 tcg_temp_free_i32(fp1
);
8949 tcg_temp_free_i32(fp0
);
8953 check_insn(ctx
, ISA_MIPS32R6
);
8955 TCGv_i32 fp0
= tcg_temp_new_i32();
8956 TCGv_i32 fp1
= tcg_temp_new_i32();
8957 TCGv_i32 fp2
= tcg_temp_new_i32();
8958 gen_load_fpr32(ctx
, fp0
, fs
);
8959 gen_load_fpr32(ctx
, fp1
, ft
);
8960 gen_load_fpr32(ctx
, fp2
, fd
);
8961 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8962 gen_store_fpr32(ctx
, fp2
, fd
);
8963 tcg_temp_free_i32(fp2
);
8964 tcg_temp_free_i32(fp1
);
8965 tcg_temp_free_i32(fp0
);
8969 check_insn(ctx
, ISA_MIPS32R6
);
8971 TCGv_i32 fp0
= tcg_temp_new_i32();
8972 gen_load_fpr32(ctx
, fp0
, fs
);
8973 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8974 gen_store_fpr32(ctx
, fp0
, fd
);
8975 tcg_temp_free_i32(fp0
);
8979 check_insn(ctx
, ISA_MIPS32R6
);
8981 TCGv_i32 fp0
= tcg_temp_new_i32();
8982 gen_load_fpr32(ctx
, fp0
, fs
);
8983 gen_helper_float_class_s(fp0
, fp0
);
8984 gen_store_fpr32(ctx
, fp0
, fd
);
8985 tcg_temp_free_i32(fp0
);
8988 case OPC_MIN_S
: /* OPC_RECIP2_S */
8989 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8991 TCGv_i32 fp0
= tcg_temp_new_i32();
8992 TCGv_i32 fp1
= tcg_temp_new_i32();
8993 TCGv_i32 fp2
= tcg_temp_new_i32();
8994 gen_load_fpr32(ctx
, fp0
, fs
);
8995 gen_load_fpr32(ctx
, fp1
, ft
);
8996 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8997 gen_store_fpr32(ctx
, fp2
, fd
);
8998 tcg_temp_free_i32(fp2
);
8999 tcg_temp_free_i32(fp1
);
9000 tcg_temp_free_i32(fp0
);
9003 check_cp1_64bitmode(ctx
);
9005 TCGv_i32 fp0
= tcg_temp_new_i32();
9006 TCGv_i32 fp1
= tcg_temp_new_i32();
9008 gen_load_fpr32(ctx
, fp0
, fs
);
9009 gen_load_fpr32(ctx
, fp1
, ft
);
9010 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9011 tcg_temp_free_i32(fp1
);
9012 gen_store_fpr32(ctx
, fp0
, fd
);
9013 tcg_temp_free_i32(fp0
);
9017 case OPC_MINA_S
: /* OPC_RECIP1_S */
9018 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9020 TCGv_i32 fp0
= tcg_temp_new_i32();
9021 TCGv_i32 fp1
= tcg_temp_new_i32();
9022 TCGv_i32 fp2
= tcg_temp_new_i32();
9023 gen_load_fpr32(ctx
, fp0
, fs
);
9024 gen_load_fpr32(ctx
, fp1
, ft
);
9025 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9026 gen_store_fpr32(ctx
, fp2
, fd
);
9027 tcg_temp_free_i32(fp2
);
9028 tcg_temp_free_i32(fp1
);
9029 tcg_temp_free_i32(fp0
);
9032 check_cp1_64bitmode(ctx
);
9034 TCGv_i32 fp0
= tcg_temp_new_i32();
9036 gen_load_fpr32(ctx
, fp0
, fs
);
9037 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9038 gen_store_fpr32(ctx
, fp0
, fd
);
9039 tcg_temp_free_i32(fp0
);
9043 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9044 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9046 TCGv_i32 fp0
= tcg_temp_new_i32();
9047 TCGv_i32 fp1
= tcg_temp_new_i32();
9048 gen_load_fpr32(ctx
, fp0
, fs
);
9049 gen_load_fpr32(ctx
, fp1
, ft
);
9050 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9051 gen_store_fpr32(ctx
, fp1
, fd
);
9052 tcg_temp_free_i32(fp1
);
9053 tcg_temp_free_i32(fp0
);
9056 check_cp1_64bitmode(ctx
);
9058 TCGv_i32 fp0
= tcg_temp_new_i32();
9060 gen_load_fpr32(ctx
, fp0
, fs
);
9061 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9062 gen_store_fpr32(ctx
, fp0
, fd
);
9063 tcg_temp_free_i32(fp0
);
9067 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9068 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9070 TCGv_i32 fp0
= tcg_temp_new_i32();
9071 TCGv_i32 fp1
= tcg_temp_new_i32();
9072 gen_load_fpr32(ctx
, fp0
, fs
);
9073 gen_load_fpr32(ctx
, fp1
, ft
);
9074 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9075 gen_store_fpr32(ctx
, fp1
, fd
);
9076 tcg_temp_free_i32(fp1
);
9077 tcg_temp_free_i32(fp0
);
9080 check_cp1_64bitmode(ctx
);
9082 TCGv_i32 fp0
= tcg_temp_new_i32();
9083 TCGv_i32 fp1
= tcg_temp_new_i32();
9085 gen_load_fpr32(ctx
, fp0
, fs
);
9086 gen_load_fpr32(ctx
, fp1
, ft
);
9087 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9088 tcg_temp_free_i32(fp1
);
9089 gen_store_fpr32(ctx
, fp0
, fd
);
9090 tcg_temp_free_i32(fp0
);
9095 check_cp1_registers(ctx
, fd
);
9097 TCGv_i32 fp32
= tcg_temp_new_i32();
9098 TCGv_i64 fp64
= tcg_temp_new_i64();
9100 gen_load_fpr32(ctx
, fp32
, fs
);
9101 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9102 tcg_temp_free_i32(fp32
);
9103 gen_store_fpr64(ctx
, fp64
, fd
);
9104 tcg_temp_free_i64(fp64
);
9109 TCGv_i32 fp0
= tcg_temp_new_i32();
9111 gen_load_fpr32(ctx
, fp0
, fs
);
9112 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9113 gen_store_fpr32(ctx
, fp0
, fd
);
9114 tcg_temp_free_i32(fp0
);
9118 check_cp1_64bitmode(ctx
);
9120 TCGv_i32 fp32
= tcg_temp_new_i32();
9121 TCGv_i64 fp64
= tcg_temp_new_i64();
9123 gen_load_fpr32(ctx
, fp32
, fs
);
9124 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9125 tcg_temp_free_i32(fp32
);
9126 gen_store_fpr64(ctx
, fp64
, fd
);
9127 tcg_temp_free_i64(fp64
);
9133 TCGv_i64 fp64
= tcg_temp_new_i64();
9134 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9135 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9137 gen_load_fpr32(ctx
, fp32_0
, fs
);
9138 gen_load_fpr32(ctx
, fp32_1
, ft
);
9139 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9140 tcg_temp_free_i32(fp32_1
);
9141 tcg_temp_free_i32(fp32_0
);
9142 gen_store_fpr64(ctx
, fp64
, fd
);
9143 tcg_temp_free_i64(fp64
);
9155 case OPC_CMP_NGLE_S
:
9162 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9163 if (ctx
->opcode
& (1 << 6)) {
9164 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9166 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9170 check_cp1_registers(ctx
, fs
| ft
| fd
);
9172 TCGv_i64 fp0
= tcg_temp_new_i64();
9173 TCGv_i64 fp1
= tcg_temp_new_i64();
9175 gen_load_fpr64(ctx
, fp0
, fs
);
9176 gen_load_fpr64(ctx
, fp1
, ft
);
9177 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9178 tcg_temp_free_i64(fp1
);
9179 gen_store_fpr64(ctx
, fp0
, fd
);
9180 tcg_temp_free_i64(fp0
);
9184 check_cp1_registers(ctx
, fs
| ft
| fd
);
9186 TCGv_i64 fp0
= tcg_temp_new_i64();
9187 TCGv_i64 fp1
= tcg_temp_new_i64();
9189 gen_load_fpr64(ctx
, fp0
, fs
);
9190 gen_load_fpr64(ctx
, fp1
, ft
);
9191 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9192 tcg_temp_free_i64(fp1
);
9193 gen_store_fpr64(ctx
, fp0
, fd
);
9194 tcg_temp_free_i64(fp0
);
9198 check_cp1_registers(ctx
, fs
| ft
| fd
);
9200 TCGv_i64 fp0
= tcg_temp_new_i64();
9201 TCGv_i64 fp1
= tcg_temp_new_i64();
9203 gen_load_fpr64(ctx
, fp0
, fs
);
9204 gen_load_fpr64(ctx
, fp1
, ft
);
9205 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9206 tcg_temp_free_i64(fp1
);
9207 gen_store_fpr64(ctx
, fp0
, fd
);
9208 tcg_temp_free_i64(fp0
);
9212 check_cp1_registers(ctx
, fs
| ft
| fd
);
9214 TCGv_i64 fp0
= tcg_temp_new_i64();
9215 TCGv_i64 fp1
= tcg_temp_new_i64();
9217 gen_load_fpr64(ctx
, fp0
, fs
);
9218 gen_load_fpr64(ctx
, fp1
, ft
);
9219 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9220 tcg_temp_free_i64(fp1
);
9221 gen_store_fpr64(ctx
, fp0
, fd
);
9222 tcg_temp_free_i64(fp0
);
9226 check_cp1_registers(ctx
, fs
| fd
);
9228 TCGv_i64 fp0
= tcg_temp_new_i64();
9230 gen_load_fpr64(ctx
, fp0
, fs
);
9231 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9232 gen_store_fpr64(ctx
, fp0
, fd
);
9233 tcg_temp_free_i64(fp0
);
9237 check_cp1_registers(ctx
, fs
| fd
);
9239 TCGv_i64 fp0
= tcg_temp_new_i64();
9241 gen_load_fpr64(ctx
, fp0
, fs
);
9242 gen_helper_float_abs_d(fp0
, fp0
);
9243 gen_store_fpr64(ctx
, fp0
, fd
);
9244 tcg_temp_free_i64(fp0
);
9248 check_cp1_registers(ctx
, fs
| fd
);
9250 TCGv_i64 fp0
= tcg_temp_new_i64();
9252 gen_load_fpr64(ctx
, fp0
, fs
);
9253 gen_store_fpr64(ctx
, fp0
, fd
);
9254 tcg_temp_free_i64(fp0
);
9258 check_cp1_registers(ctx
, fs
| fd
);
9260 TCGv_i64 fp0
= tcg_temp_new_i64();
9262 gen_load_fpr64(ctx
, fp0
, fs
);
9263 gen_helper_float_chs_d(fp0
, fp0
);
9264 gen_store_fpr64(ctx
, fp0
, fd
);
9265 tcg_temp_free_i64(fp0
);
9269 check_cp1_64bitmode(ctx
);
9271 TCGv_i64 fp0
= tcg_temp_new_i64();
9273 gen_load_fpr64(ctx
, fp0
, fs
);
9274 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9275 gen_store_fpr64(ctx
, fp0
, fd
);
9276 tcg_temp_free_i64(fp0
);
9280 check_cp1_64bitmode(ctx
);
9282 TCGv_i64 fp0
= tcg_temp_new_i64();
9284 gen_load_fpr64(ctx
, fp0
, fs
);
9285 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9286 gen_store_fpr64(ctx
, fp0
, fd
);
9287 tcg_temp_free_i64(fp0
);
9291 check_cp1_64bitmode(ctx
);
9293 TCGv_i64 fp0
= tcg_temp_new_i64();
9295 gen_load_fpr64(ctx
, fp0
, fs
);
9296 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9297 gen_store_fpr64(ctx
, fp0
, fd
);
9298 tcg_temp_free_i64(fp0
);
9302 check_cp1_64bitmode(ctx
);
9304 TCGv_i64 fp0
= tcg_temp_new_i64();
9306 gen_load_fpr64(ctx
, fp0
, fs
);
9307 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9308 gen_store_fpr64(ctx
, fp0
, fd
);
9309 tcg_temp_free_i64(fp0
);
9313 check_cp1_registers(ctx
, fs
);
9315 TCGv_i32 fp32
= tcg_temp_new_i32();
9316 TCGv_i64 fp64
= tcg_temp_new_i64();
9318 gen_load_fpr64(ctx
, fp64
, fs
);
9319 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9320 tcg_temp_free_i64(fp64
);
9321 gen_store_fpr32(ctx
, fp32
, fd
);
9322 tcg_temp_free_i32(fp32
);
9326 check_cp1_registers(ctx
, fs
);
9328 TCGv_i32 fp32
= tcg_temp_new_i32();
9329 TCGv_i64 fp64
= tcg_temp_new_i64();
9331 gen_load_fpr64(ctx
, fp64
, fs
);
9332 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9333 tcg_temp_free_i64(fp64
);
9334 gen_store_fpr32(ctx
, fp32
, fd
);
9335 tcg_temp_free_i32(fp32
);
9339 check_cp1_registers(ctx
, fs
);
9341 TCGv_i32 fp32
= tcg_temp_new_i32();
9342 TCGv_i64 fp64
= tcg_temp_new_i64();
9344 gen_load_fpr64(ctx
, fp64
, fs
);
9345 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9346 tcg_temp_free_i64(fp64
);
9347 gen_store_fpr32(ctx
, fp32
, fd
);
9348 tcg_temp_free_i32(fp32
);
9352 check_cp1_registers(ctx
, fs
);
9354 TCGv_i32 fp32
= tcg_temp_new_i32();
9355 TCGv_i64 fp64
= tcg_temp_new_i64();
9357 gen_load_fpr64(ctx
, fp64
, fs
);
9358 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9359 tcg_temp_free_i64(fp64
);
9360 gen_store_fpr32(ctx
, fp32
, fd
);
9361 tcg_temp_free_i32(fp32
);
9365 check_insn(ctx
, ISA_MIPS32R6
);
9366 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9369 check_insn(ctx
, ISA_MIPS32R6
);
9370 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9373 check_insn(ctx
, ISA_MIPS32R6
);
9374 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9378 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9383 TCGLabel
*l1
= gen_new_label();
9387 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9389 fp0
= tcg_temp_new_i64();
9390 gen_load_fpr64(ctx
, fp0
, fs
);
9391 gen_store_fpr64(ctx
, fp0
, fd
);
9392 tcg_temp_free_i64(fp0
);
9397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9399 TCGLabel
*l1
= gen_new_label();
9403 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9404 fp0
= tcg_temp_new_i64();
9405 gen_load_fpr64(ctx
, fp0
, fs
);
9406 gen_store_fpr64(ctx
, fp0
, fd
);
9407 tcg_temp_free_i64(fp0
);
9413 check_cp1_registers(ctx
, fs
| fd
);
9415 TCGv_i64 fp0
= tcg_temp_new_i64();
9417 gen_load_fpr64(ctx
, fp0
, fs
);
9418 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9419 gen_store_fpr64(ctx
, fp0
, fd
);
9420 tcg_temp_free_i64(fp0
);
9424 check_cp1_registers(ctx
, fs
| fd
);
9426 TCGv_i64 fp0
= tcg_temp_new_i64();
9428 gen_load_fpr64(ctx
, fp0
, fs
);
9429 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9430 gen_store_fpr64(ctx
, fp0
, fd
);
9431 tcg_temp_free_i64(fp0
);
9435 check_insn(ctx
, ISA_MIPS32R6
);
9437 TCGv_i64 fp0
= tcg_temp_new_i64();
9438 TCGv_i64 fp1
= tcg_temp_new_i64();
9439 TCGv_i64 fp2
= tcg_temp_new_i64();
9440 gen_load_fpr64(ctx
, fp0
, fs
);
9441 gen_load_fpr64(ctx
, fp1
, ft
);
9442 gen_load_fpr64(ctx
, fp2
, fd
);
9443 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9444 gen_store_fpr64(ctx
, fp2
, fd
);
9445 tcg_temp_free_i64(fp2
);
9446 tcg_temp_free_i64(fp1
);
9447 tcg_temp_free_i64(fp0
);
9451 check_insn(ctx
, ISA_MIPS32R6
);
9453 TCGv_i64 fp0
= tcg_temp_new_i64();
9454 TCGv_i64 fp1
= tcg_temp_new_i64();
9455 TCGv_i64 fp2
= tcg_temp_new_i64();
9456 gen_load_fpr64(ctx
, fp0
, fs
);
9457 gen_load_fpr64(ctx
, fp1
, ft
);
9458 gen_load_fpr64(ctx
, fp2
, fd
);
9459 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9460 gen_store_fpr64(ctx
, fp2
, fd
);
9461 tcg_temp_free_i64(fp2
);
9462 tcg_temp_free_i64(fp1
);
9463 tcg_temp_free_i64(fp0
);
9467 check_insn(ctx
, ISA_MIPS32R6
);
9469 TCGv_i64 fp0
= tcg_temp_new_i64();
9470 gen_load_fpr64(ctx
, fp0
, fs
);
9471 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9472 gen_store_fpr64(ctx
, fp0
, fd
);
9473 tcg_temp_free_i64(fp0
);
9477 check_insn(ctx
, ISA_MIPS32R6
);
9479 TCGv_i64 fp0
= tcg_temp_new_i64();
9480 gen_load_fpr64(ctx
, fp0
, fs
);
9481 gen_helper_float_class_d(fp0
, fp0
);
9482 gen_store_fpr64(ctx
, fp0
, fd
);
9483 tcg_temp_free_i64(fp0
);
9486 case OPC_MIN_D
: /* OPC_RECIP2_D */
9487 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9489 TCGv_i64 fp0
= tcg_temp_new_i64();
9490 TCGv_i64 fp1
= tcg_temp_new_i64();
9491 gen_load_fpr64(ctx
, fp0
, fs
);
9492 gen_load_fpr64(ctx
, fp1
, ft
);
9493 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9494 gen_store_fpr64(ctx
, fp1
, fd
);
9495 tcg_temp_free_i64(fp1
);
9496 tcg_temp_free_i64(fp0
);
9499 check_cp1_64bitmode(ctx
);
9501 TCGv_i64 fp0
= tcg_temp_new_i64();
9502 TCGv_i64 fp1
= tcg_temp_new_i64();
9504 gen_load_fpr64(ctx
, fp0
, fs
);
9505 gen_load_fpr64(ctx
, fp1
, ft
);
9506 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9507 tcg_temp_free_i64(fp1
);
9508 gen_store_fpr64(ctx
, fp0
, fd
);
9509 tcg_temp_free_i64(fp0
);
9513 case OPC_MINA_D
: /* OPC_RECIP1_D */
9514 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9516 TCGv_i64 fp0
= tcg_temp_new_i64();
9517 TCGv_i64 fp1
= tcg_temp_new_i64();
9518 gen_load_fpr64(ctx
, fp0
, fs
);
9519 gen_load_fpr64(ctx
, fp1
, ft
);
9520 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9521 gen_store_fpr64(ctx
, fp1
, fd
);
9522 tcg_temp_free_i64(fp1
);
9523 tcg_temp_free_i64(fp0
);
9526 check_cp1_64bitmode(ctx
);
9528 TCGv_i64 fp0
= tcg_temp_new_i64();
9530 gen_load_fpr64(ctx
, fp0
, fs
);
9531 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9532 gen_store_fpr64(ctx
, fp0
, fd
);
9533 tcg_temp_free_i64(fp0
);
9537 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9538 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9540 TCGv_i64 fp0
= tcg_temp_new_i64();
9541 TCGv_i64 fp1
= tcg_temp_new_i64();
9542 gen_load_fpr64(ctx
, fp0
, fs
);
9543 gen_load_fpr64(ctx
, fp1
, ft
);
9544 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9545 gen_store_fpr64(ctx
, fp1
, fd
);
9546 tcg_temp_free_i64(fp1
);
9547 tcg_temp_free_i64(fp0
);
9550 check_cp1_64bitmode(ctx
);
9552 TCGv_i64 fp0
= tcg_temp_new_i64();
9554 gen_load_fpr64(ctx
, fp0
, fs
);
9555 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9556 gen_store_fpr64(ctx
, fp0
, fd
);
9557 tcg_temp_free_i64(fp0
);
9561 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9562 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9564 TCGv_i64 fp0
= tcg_temp_new_i64();
9565 TCGv_i64 fp1
= tcg_temp_new_i64();
9566 gen_load_fpr64(ctx
, fp0
, fs
);
9567 gen_load_fpr64(ctx
, fp1
, ft
);
9568 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9569 gen_store_fpr64(ctx
, fp1
, fd
);
9570 tcg_temp_free_i64(fp1
);
9571 tcg_temp_free_i64(fp0
);
9574 check_cp1_64bitmode(ctx
);
9576 TCGv_i64 fp0
= tcg_temp_new_i64();
9577 TCGv_i64 fp1
= tcg_temp_new_i64();
9579 gen_load_fpr64(ctx
, fp0
, fs
);
9580 gen_load_fpr64(ctx
, fp1
, ft
);
9581 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9582 tcg_temp_free_i64(fp1
);
9583 gen_store_fpr64(ctx
, fp0
, fd
);
9584 tcg_temp_free_i64(fp0
);
9597 case OPC_CMP_NGLE_D
:
9604 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9605 if (ctx
->opcode
& (1 << 6)) {
9606 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9608 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9612 check_cp1_registers(ctx
, fs
);
9614 TCGv_i32 fp32
= tcg_temp_new_i32();
9615 TCGv_i64 fp64
= tcg_temp_new_i64();
9617 gen_load_fpr64(ctx
, fp64
, fs
);
9618 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9619 tcg_temp_free_i64(fp64
);
9620 gen_store_fpr32(ctx
, fp32
, fd
);
9621 tcg_temp_free_i32(fp32
);
9625 check_cp1_registers(ctx
, fs
);
9627 TCGv_i32 fp32
= tcg_temp_new_i32();
9628 TCGv_i64 fp64
= tcg_temp_new_i64();
9630 gen_load_fpr64(ctx
, fp64
, fs
);
9631 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9632 tcg_temp_free_i64(fp64
);
9633 gen_store_fpr32(ctx
, fp32
, fd
);
9634 tcg_temp_free_i32(fp32
);
9638 check_cp1_64bitmode(ctx
);
9640 TCGv_i64 fp0
= tcg_temp_new_i64();
9642 gen_load_fpr64(ctx
, fp0
, fs
);
9643 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9644 gen_store_fpr64(ctx
, fp0
, fd
);
9645 tcg_temp_free_i64(fp0
);
9650 TCGv_i32 fp0
= tcg_temp_new_i32();
9652 gen_load_fpr32(ctx
, fp0
, fs
);
9653 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9654 gen_store_fpr32(ctx
, fp0
, fd
);
9655 tcg_temp_free_i32(fp0
);
9659 check_cp1_registers(ctx
, fd
);
9661 TCGv_i32 fp32
= tcg_temp_new_i32();
9662 TCGv_i64 fp64
= tcg_temp_new_i64();
9664 gen_load_fpr32(ctx
, fp32
, fs
);
9665 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9666 tcg_temp_free_i32(fp32
);
9667 gen_store_fpr64(ctx
, fp64
, fd
);
9668 tcg_temp_free_i64(fp64
);
9672 check_cp1_64bitmode(ctx
);
9674 TCGv_i32 fp32
= tcg_temp_new_i32();
9675 TCGv_i64 fp64
= tcg_temp_new_i64();
9677 gen_load_fpr64(ctx
, fp64
, fs
);
9678 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9679 tcg_temp_free_i64(fp64
);
9680 gen_store_fpr32(ctx
, fp32
, fd
);
9681 tcg_temp_free_i32(fp32
);
9685 check_cp1_64bitmode(ctx
);
9687 TCGv_i64 fp0
= tcg_temp_new_i64();
9689 gen_load_fpr64(ctx
, fp0
, fs
);
9690 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9691 gen_store_fpr64(ctx
, fp0
, fd
);
9692 tcg_temp_free_i64(fp0
);
9698 TCGv_i64 fp0
= tcg_temp_new_i64();
9700 gen_load_fpr64(ctx
, fp0
, fs
);
9701 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9702 gen_store_fpr64(ctx
, fp0
, fd
);
9703 tcg_temp_free_i64(fp0
);
9709 TCGv_i64 fp0
= tcg_temp_new_i64();
9710 TCGv_i64 fp1
= tcg_temp_new_i64();
9712 gen_load_fpr64(ctx
, fp0
, fs
);
9713 gen_load_fpr64(ctx
, fp1
, ft
);
9714 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9715 tcg_temp_free_i64(fp1
);
9716 gen_store_fpr64(ctx
, fp0
, fd
);
9717 tcg_temp_free_i64(fp0
);
9723 TCGv_i64 fp0
= tcg_temp_new_i64();
9724 TCGv_i64 fp1
= tcg_temp_new_i64();
9726 gen_load_fpr64(ctx
, fp0
, fs
);
9727 gen_load_fpr64(ctx
, fp1
, ft
);
9728 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9729 tcg_temp_free_i64(fp1
);
9730 gen_store_fpr64(ctx
, fp0
, fd
);
9731 tcg_temp_free_i64(fp0
);
9737 TCGv_i64 fp0
= tcg_temp_new_i64();
9738 TCGv_i64 fp1
= tcg_temp_new_i64();
9740 gen_load_fpr64(ctx
, fp0
, fs
);
9741 gen_load_fpr64(ctx
, fp1
, ft
);
9742 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9743 tcg_temp_free_i64(fp1
);
9744 gen_store_fpr64(ctx
, fp0
, fd
);
9745 tcg_temp_free_i64(fp0
);
9751 TCGv_i64 fp0
= tcg_temp_new_i64();
9753 gen_load_fpr64(ctx
, fp0
, fs
);
9754 gen_helper_float_abs_ps(fp0
, fp0
);
9755 gen_store_fpr64(ctx
, fp0
, fd
);
9756 tcg_temp_free_i64(fp0
);
9762 TCGv_i64 fp0
= tcg_temp_new_i64();
9764 gen_load_fpr64(ctx
, fp0
, fs
);
9765 gen_store_fpr64(ctx
, fp0
, fd
);
9766 tcg_temp_free_i64(fp0
);
9772 TCGv_i64 fp0
= tcg_temp_new_i64();
9774 gen_load_fpr64(ctx
, fp0
, fs
);
9775 gen_helper_float_chs_ps(fp0
, fp0
);
9776 gen_store_fpr64(ctx
, fp0
, fd
);
9777 tcg_temp_free_i64(fp0
);
9782 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9787 TCGLabel
*l1
= gen_new_label();
9791 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9792 fp0
= tcg_temp_new_i64();
9793 gen_load_fpr64(ctx
, fp0
, fs
);
9794 gen_store_fpr64(ctx
, fp0
, fd
);
9795 tcg_temp_free_i64(fp0
);
9802 TCGLabel
*l1
= gen_new_label();
9806 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9807 fp0
= tcg_temp_new_i64();
9808 gen_load_fpr64(ctx
, fp0
, fs
);
9809 gen_store_fpr64(ctx
, fp0
, fd
);
9810 tcg_temp_free_i64(fp0
);
9818 TCGv_i64 fp0
= tcg_temp_new_i64();
9819 TCGv_i64 fp1
= tcg_temp_new_i64();
9821 gen_load_fpr64(ctx
, fp0
, ft
);
9822 gen_load_fpr64(ctx
, fp1
, fs
);
9823 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9824 tcg_temp_free_i64(fp1
);
9825 gen_store_fpr64(ctx
, fp0
, fd
);
9826 tcg_temp_free_i64(fp0
);
9832 TCGv_i64 fp0
= tcg_temp_new_i64();
9833 TCGv_i64 fp1
= tcg_temp_new_i64();
9835 gen_load_fpr64(ctx
, fp0
, ft
);
9836 gen_load_fpr64(ctx
, fp1
, fs
);
9837 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9838 tcg_temp_free_i64(fp1
);
9839 gen_store_fpr64(ctx
, fp0
, fd
);
9840 tcg_temp_free_i64(fp0
);
9846 TCGv_i64 fp0
= tcg_temp_new_i64();
9847 TCGv_i64 fp1
= tcg_temp_new_i64();
9849 gen_load_fpr64(ctx
, fp0
, fs
);
9850 gen_load_fpr64(ctx
, fp1
, ft
);
9851 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9852 tcg_temp_free_i64(fp1
);
9853 gen_store_fpr64(ctx
, fp0
, fd
);
9854 tcg_temp_free_i64(fp0
);
9860 TCGv_i64 fp0
= tcg_temp_new_i64();
9862 gen_load_fpr64(ctx
, fp0
, fs
);
9863 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9864 gen_store_fpr64(ctx
, fp0
, fd
);
9865 tcg_temp_free_i64(fp0
);
9871 TCGv_i64 fp0
= tcg_temp_new_i64();
9873 gen_load_fpr64(ctx
, fp0
, fs
);
9874 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9875 gen_store_fpr64(ctx
, fp0
, fd
);
9876 tcg_temp_free_i64(fp0
);
9882 TCGv_i64 fp0
= tcg_temp_new_i64();
9883 TCGv_i64 fp1
= tcg_temp_new_i64();
9885 gen_load_fpr64(ctx
, fp0
, fs
);
9886 gen_load_fpr64(ctx
, fp1
, ft
);
9887 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9888 tcg_temp_free_i64(fp1
);
9889 gen_store_fpr64(ctx
, fp0
, fd
);
9890 tcg_temp_free_i64(fp0
);
9894 check_cp1_64bitmode(ctx
);
9896 TCGv_i32 fp0
= tcg_temp_new_i32();
9898 gen_load_fpr32h(ctx
, fp0
, fs
);
9899 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9900 gen_store_fpr32(ctx
, fp0
, fd
);
9901 tcg_temp_free_i32(fp0
);
9907 TCGv_i64 fp0
= tcg_temp_new_i64();
9909 gen_load_fpr64(ctx
, fp0
, fs
);
9910 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9911 gen_store_fpr64(ctx
, fp0
, fd
);
9912 tcg_temp_free_i64(fp0
);
9916 check_cp1_64bitmode(ctx
);
9918 TCGv_i32 fp0
= tcg_temp_new_i32();
9920 gen_load_fpr32(ctx
, fp0
, fs
);
9921 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9922 gen_store_fpr32(ctx
, fp0
, fd
);
9923 tcg_temp_free_i32(fp0
);
9929 TCGv_i32 fp0
= tcg_temp_new_i32();
9930 TCGv_i32 fp1
= tcg_temp_new_i32();
9932 gen_load_fpr32(ctx
, fp0
, fs
);
9933 gen_load_fpr32(ctx
, fp1
, ft
);
9934 gen_store_fpr32h(ctx
, fp0
, fd
);
9935 gen_store_fpr32(ctx
, fp1
, fd
);
9936 tcg_temp_free_i32(fp0
);
9937 tcg_temp_free_i32(fp1
);
9943 TCGv_i32 fp0
= tcg_temp_new_i32();
9944 TCGv_i32 fp1
= tcg_temp_new_i32();
9946 gen_load_fpr32(ctx
, fp0
, fs
);
9947 gen_load_fpr32h(ctx
, fp1
, ft
);
9948 gen_store_fpr32(ctx
, fp1
, fd
);
9949 gen_store_fpr32h(ctx
, fp0
, fd
);
9950 tcg_temp_free_i32(fp0
);
9951 tcg_temp_free_i32(fp1
);
9957 TCGv_i32 fp0
= tcg_temp_new_i32();
9958 TCGv_i32 fp1
= tcg_temp_new_i32();
9960 gen_load_fpr32h(ctx
, fp0
, fs
);
9961 gen_load_fpr32(ctx
, fp1
, ft
);
9962 gen_store_fpr32(ctx
, fp1
, fd
);
9963 gen_store_fpr32h(ctx
, fp0
, fd
);
9964 tcg_temp_free_i32(fp0
);
9965 tcg_temp_free_i32(fp1
);
9971 TCGv_i32 fp0
= tcg_temp_new_i32();
9972 TCGv_i32 fp1
= tcg_temp_new_i32();
9974 gen_load_fpr32h(ctx
, fp0
, fs
);
9975 gen_load_fpr32h(ctx
, fp1
, ft
);
9976 gen_store_fpr32(ctx
, fp1
, fd
);
9977 gen_store_fpr32h(ctx
, fp0
, fd
);
9978 tcg_temp_free_i32(fp0
);
9979 tcg_temp_free_i32(fp1
);
9985 case OPC_CMP_UEQ_PS
:
9986 case OPC_CMP_OLT_PS
:
9987 case OPC_CMP_ULT_PS
:
9988 case OPC_CMP_OLE_PS
:
9989 case OPC_CMP_ULE_PS
:
9991 case OPC_CMP_NGLE_PS
:
9992 case OPC_CMP_SEQ_PS
:
9993 case OPC_CMP_NGL_PS
:
9995 case OPC_CMP_NGE_PS
:
9997 case OPC_CMP_NGT_PS
:
9998 if (ctx
->opcode
& (1 << 6)) {
9999 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10001 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10005 MIPS_INVAL("farith");
10006 generate_exception_end(ctx
, EXCP_RI
);
10011 /* Coprocessor 3 (FPU) */
10012 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10013 int fd
, int fs
, int base
, int index
)
10015 TCGv t0
= tcg_temp_new();
10018 gen_load_gpr(t0
, index
);
10019 } else if (index
== 0) {
10020 gen_load_gpr(t0
, base
);
10022 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10024 /* Don't do NOP if destination is zero: we must perform the actual
10030 TCGv_i32 fp0
= tcg_temp_new_i32();
10032 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10033 tcg_gen_trunc_tl_i32(fp0
, t0
);
10034 gen_store_fpr32(ctx
, fp0
, fd
);
10035 tcg_temp_free_i32(fp0
);
10040 check_cp1_registers(ctx
, fd
);
10042 TCGv_i64 fp0
= tcg_temp_new_i64();
10043 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10044 gen_store_fpr64(ctx
, fp0
, fd
);
10045 tcg_temp_free_i64(fp0
);
10049 check_cp1_64bitmode(ctx
);
10050 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10052 TCGv_i64 fp0
= tcg_temp_new_i64();
10054 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10055 gen_store_fpr64(ctx
, fp0
, fd
);
10056 tcg_temp_free_i64(fp0
);
10062 TCGv_i32 fp0
= tcg_temp_new_i32();
10063 gen_load_fpr32(ctx
, fp0
, fs
);
10064 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10065 tcg_temp_free_i32(fp0
);
10070 check_cp1_registers(ctx
, fs
);
10072 TCGv_i64 fp0
= tcg_temp_new_i64();
10073 gen_load_fpr64(ctx
, fp0
, fs
);
10074 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10075 tcg_temp_free_i64(fp0
);
10079 check_cp1_64bitmode(ctx
);
10080 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10082 TCGv_i64 fp0
= tcg_temp_new_i64();
10083 gen_load_fpr64(ctx
, fp0
, fs
);
10084 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10085 tcg_temp_free_i64(fp0
);
10092 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10093 int fd
, int fr
, int fs
, int ft
)
10099 TCGv t0
= tcg_temp_local_new();
10100 TCGv_i32 fp
= tcg_temp_new_i32();
10101 TCGv_i32 fph
= tcg_temp_new_i32();
10102 TCGLabel
*l1
= gen_new_label();
10103 TCGLabel
*l2
= gen_new_label();
10105 gen_load_gpr(t0
, fr
);
10106 tcg_gen_andi_tl(t0
, t0
, 0x7);
10108 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10109 gen_load_fpr32(ctx
, fp
, fs
);
10110 gen_load_fpr32h(ctx
, fph
, fs
);
10111 gen_store_fpr32(ctx
, fp
, fd
);
10112 gen_store_fpr32h(ctx
, fph
, fd
);
10115 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10117 #ifdef TARGET_WORDS_BIGENDIAN
10118 gen_load_fpr32(ctx
, fp
, fs
);
10119 gen_load_fpr32h(ctx
, fph
, ft
);
10120 gen_store_fpr32h(ctx
, fp
, fd
);
10121 gen_store_fpr32(ctx
, fph
, fd
);
10123 gen_load_fpr32h(ctx
, fph
, fs
);
10124 gen_load_fpr32(ctx
, fp
, ft
);
10125 gen_store_fpr32(ctx
, fph
, fd
);
10126 gen_store_fpr32h(ctx
, fp
, fd
);
10129 tcg_temp_free_i32(fp
);
10130 tcg_temp_free_i32(fph
);
10136 TCGv_i32 fp0
= tcg_temp_new_i32();
10137 TCGv_i32 fp1
= tcg_temp_new_i32();
10138 TCGv_i32 fp2
= tcg_temp_new_i32();
10140 gen_load_fpr32(ctx
, fp0
, fs
);
10141 gen_load_fpr32(ctx
, fp1
, ft
);
10142 gen_load_fpr32(ctx
, fp2
, fr
);
10143 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10144 tcg_temp_free_i32(fp0
);
10145 tcg_temp_free_i32(fp1
);
10146 gen_store_fpr32(ctx
, fp2
, fd
);
10147 tcg_temp_free_i32(fp2
);
10152 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10154 TCGv_i64 fp0
= tcg_temp_new_i64();
10155 TCGv_i64 fp1
= tcg_temp_new_i64();
10156 TCGv_i64 fp2
= tcg_temp_new_i64();
10158 gen_load_fpr64(ctx
, fp0
, fs
);
10159 gen_load_fpr64(ctx
, fp1
, ft
);
10160 gen_load_fpr64(ctx
, fp2
, fr
);
10161 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10162 tcg_temp_free_i64(fp0
);
10163 tcg_temp_free_i64(fp1
);
10164 gen_store_fpr64(ctx
, fp2
, fd
);
10165 tcg_temp_free_i64(fp2
);
10171 TCGv_i64 fp0
= tcg_temp_new_i64();
10172 TCGv_i64 fp1
= tcg_temp_new_i64();
10173 TCGv_i64 fp2
= tcg_temp_new_i64();
10175 gen_load_fpr64(ctx
, fp0
, fs
);
10176 gen_load_fpr64(ctx
, fp1
, ft
);
10177 gen_load_fpr64(ctx
, fp2
, fr
);
10178 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10179 tcg_temp_free_i64(fp0
);
10180 tcg_temp_free_i64(fp1
);
10181 gen_store_fpr64(ctx
, fp2
, fd
);
10182 tcg_temp_free_i64(fp2
);
10188 TCGv_i32 fp0
= tcg_temp_new_i32();
10189 TCGv_i32 fp1
= tcg_temp_new_i32();
10190 TCGv_i32 fp2
= tcg_temp_new_i32();
10192 gen_load_fpr32(ctx
, fp0
, fs
);
10193 gen_load_fpr32(ctx
, fp1
, ft
);
10194 gen_load_fpr32(ctx
, fp2
, fr
);
10195 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10196 tcg_temp_free_i32(fp0
);
10197 tcg_temp_free_i32(fp1
);
10198 gen_store_fpr32(ctx
, fp2
, fd
);
10199 tcg_temp_free_i32(fp2
);
10204 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10206 TCGv_i64 fp0
= tcg_temp_new_i64();
10207 TCGv_i64 fp1
= tcg_temp_new_i64();
10208 TCGv_i64 fp2
= tcg_temp_new_i64();
10210 gen_load_fpr64(ctx
, fp0
, fs
);
10211 gen_load_fpr64(ctx
, fp1
, ft
);
10212 gen_load_fpr64(ctx
, fp2
, fr
);
10213 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10214 tcg_temp_free_i64(fp0
);
10215 tcg_temp_free_i64(fp1
);
10216 gen_store_fpr64(ctx
, fp2
, fd
);
10217 tcg_temp_free_i64(fp2
);
10223 TCGv_i64 fp0
= tcg_temp_new_i64();
10224 TCGv_i64 fp1
= tcg_temp_new_i64();
10225 TCGv_i64 fp2
= tcg_temp_new_i64();
10227 gen_load_fpr64(ctx
, fp0
, fs
);
10228 gen_load_fpr64(ctx
, fp1
, ft
);
10229 gen_load_fpr64(ctx
, fp2
, fr
);
10230 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10231 tcg_temp_free_i64(fp0
);
10232 tcg_temp_free_i64(fp1
);
10233 gen_store_fpr64(ctx
, fp2
, fd
);
10234 tcg_temp_free_i64(fp2
);
10240 TCGv_i32 fp0
= tcg_temp_new_i32();
10241 TCGv_i32 fp1
= tcg_temp_new_i32();
10242 TCGv_i32 fp2
= tcg_temp_new_i32();
10244 gen_load_fpr32(ctx
, fp0
, fs
);
10245 gen_load_fpr32(ctx
, fp1
, ft
);
10246 gen_load_fpr32(ctx
, fp2
, fr
);
10247 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10248 tcg_temp_free_i32(fp0
);
10249 tcg_temp_free_i32(fp1
);
10250 gen_store_fpr32(ctx
, fp2
, fd
);
10251 tcg_temp_free_i32(fp2
);
10256 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10258 TCGv_i64 fp0
= tcg_temp_new_i64();
10259 TCGv_i64 fp1
= tcg_temp_new_i64();
10260 TCGv_i64 fp2
= tcg_temp_new_i64();
10262 gen_load_fpr64(ctx
, fp0
, fs
);
10263 gen_load_fpr64(ctx
, fp1
, ft
);
10264 gen_load_fpr64(ctx
, fp2
, fr
);
10265 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10266 tcg_temp_free_i64(fp0
);
10267 tcg_temp_free_i64(fp1
);
10268 gen_store_fpr64(ctx
, fp2
, fd
);
10269 tcg_temp_free_i64(fp2
);
10275 TCGv_i64 fp0
= tcg_temp_new_i64();
10276 TCGv_i64 fp1
= tcg_temp_new_i64();
10277 TCGv_i64 fp2
= tcg_temp_new_i64();
10279 gen_load_fpr64(ctx
, fp0
, fs
);
10280 gen_load_fpr64(ctx
, fp1
, ft
);
10281 gen_load_fpr64(ctx
, fp2
, fr
);
10282 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10283 tcg_temp_free_i64(fp0
);
10284 tcg_temp_free_i64(fp1
);
10285 gen_store_fpr64(ctx
, fp2
, fd
);
10286 tcg_temp_free_i64(fp2
);
10292 TCGv_i32 fp0
= tcg_temp_new_i32();
10293 TCGv_i32 fp1
= tcg_temp_new_i32();
10294 TCGv_i32 fp2
= tcg_temp_new_i32();
10296 gen_load_fpr32(ctx
, fp0
, fs
);
10297 gen_load_fpr32(ctx
, fp1
, ft
);
10298 gen_load_fpr32(ctx
, fp2
, fr
);
10299 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10300 tcg_temp_free_i32(fp0
);
10301 tcg_temp_free_i32(fp1
);
10302 gen_store_fpr32(ctx
, fp2
, fd
);
10303 tcg_temp_free_i32(fp2
);
10308 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10310 TCGv_i64 fp0
= tcg_temp_new_i64();
10311 TCGv_i64 fp1
= tcg_temp_new_i64();
10312 TCGv_i64 fp2
= tcg_temp_new_i64();
10314 gen_load_fpr64(ctx
, fp0
, fs
);
10315 gen_load_fpr64(ctx
, fp1
, ft
);
10316 gen_load_fpr64(ctx
, fp2
, fr
);
10317 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10318 tcg_temp_free_i64(fp0
);
10319 tcg_temp_free_i64(fp1
);
10320 gen_store_fpr64(ctx
, fp2
, fd
);
10321 tcg_temp_free_i64(fp2
);
10327 TCGv_i64 fp0
= tcg_temp_new_i64();
10328 TCGv_i64 fp1
= tcg_temp_new_i64();
10329 TCGv_i64 fp2
= tcg_temp_new_i64();
10331 gen_load_fpr64(ctx
, fp0
, fs
);
10332 gen_load_fpr64(ctx
, fp1
, ft
);
10333 gen_load_fpr64(ctx
, fp2
, fr
);
10334 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10335 tcg_temp_free_i64(fp0
);
10336 tcg_temp_free_i64(fp1
);
10337 gen_store_fpr64(ctx
, fp2
, fd
);
10338 tcg_temp_free_i64(fp2
);
10342 MIPS_INVAL("flt3_arith");
10343 generate_exception_end(ctx
, EXCP_RI
);
10348 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10352 #if !defined(CONFIG_USER_ONLY)
10353 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10354 Therefore only check the ISA in system mode. */
10355 check_insn(ctx
, ISA_MIPS32R2
);
10357 t0
= tcg_temp_new();
10361 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10362 gen_store_gpr(t0
, rt
);
10365 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10366 gen_store_gpr(t0
, rt
);
10369 gen_helper_rdhwr_cc(t0
, cpu_env
);
10370 gen_store_gpr(t0
, rt
);
10373 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10374 gen_store_gpr(t0
, rt
);
10377 check_insn(ctx
, ISA_MIPS32R6
);
10379 /* Performance counter registers are not implemented other than
10380 * control register 0.
10382 generate_exception(ctx
, EXCP_RI
);
10384 gen_helper_rdhwr_performance(t0
, cpu_env
);
10385 gen_store_gpr(t0
, rt
);
10388 check_insn(ctx
, ISA_MIPS32R6
);
10389 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10390 gen_store_gpr(t0
, rt
);
10393 #if defined(CONFIG_USER_ONLY)
10394 tcg_gen_ld_tl(t0
, cpu_env
,
10395 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10396 gen_store_gpr(t0
, rt
);
10399 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10400 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10401 tcg_gen_ld_tl(t0
, cpu_env
,
10402 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10403 gen_store_gpr(t0
, rt
);
10405 generate_exception_end(ctx
, EXCP_RI
);
10409 default: /* Invalid */
10410 MIPS_INVAL("rdhwr");
10411 generate_exception_end(ctx
, EXCP_RI
);
10417 static inline void clear_branch_hflags(DisasContext
*ctx
)
10419 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10420 if (ctx
->bstate
== BS_NONE
) {
10421 save_cpu_state(ctx
, 0);
10423 /* it is not safe to save ctx->hflags as hflags may be changed
10424 in execution time by the instruction in delay / forbidden slot. */
10425 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10429 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10431 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10432 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10433 /* Branches completion */
10434 clear_branch_hflags(ctx
);
10435 ctx
->bstate
= BS_BRANCH
;
10436 /* FIXME: Need to clear can_do_io. */
10437 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10438 case MIPS_HFLAG_FBNSLOT
:
10439 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10442 /* unconditional branch */
10443 if (proc_hflags
& MIPS_HFLAG_BX
) {
10444 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10446 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10448 case MIPS_HFLAG_BL
:
10449 /* blikely taken case */
10450 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10452 case MIPS_HFLAG_BC
:
10453 /* Conditional branch */
10455 TCGLabel
*l1
= gen_new_label();
10457 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10458 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10460 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10463 case MIPS_HFLAG_BR
:
10464 /* unconditional branch to register */
10465 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10466 TCGv t0
= tcg_temp_new();
10467 TCGv_i32 t1
= tcg_temp_new_i32();
10469 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10470 tcg_gen_trunc_tl_i32(t1
, t0
);
10472 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10473 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10474 tcg_gen_or_i32(hflags
, hflags
, t1
);
10475 tcg_temp_free_i32(t1
);
10477 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10479 tcg_gen_mov_tl(cpu_PC
, btarget
);
10481 if (ctx
->singlestep_enabled
) {
10482 save_cpu_state(ctx
, 0);
10483 gen_helper_raise_exception_debug(cpu_env
);
10485 tcg_gen_exit_tb(0);
10488 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10494 /* Compact Branches */
10495 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10496 int rs
, int rt
, int32_t offset
)
10498 int bcond_compute
= 0;
10499 TCGv t0
= tcg_temp_new();
10500 TCGv t1
= tcg_temp_new();
10501 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10503 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10504 #ifdef MIPS_DEBUG_DISAS
10505 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10508 generate_exception_end(ctx
, EXCP_RI
);
10512 /* Load needed operands and calculate btarget */
10514 /* compact branch */
10515 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10516 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10517 gen_load_gpr(t0
, rs
);
10518 gen_load_gpr(t1
, rt
);
10520 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10521 if (rs
<= rt
&& rs
== 0) {
10522 /* OPC_BEQZALC, OPC_BNEZALC */
10523 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10526 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10527 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10528 gen_load_gpr(t0
, rs
);
10529 gen_load_gpr(t1
, rt
);
10531 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10533 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10534 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10535 if (rs
== 0 || rs
== rt
) {
10536 /* OPC_BLEZALC, OPC_BGEZALC */
10537 /* OPC_BGTZALC, OPC_BLTZALC */
10538 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10540 gen_load_gpr(t0
, rs
);
10541 gen_load_gpr(t1
, rt
);
10543 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10547 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10552 /* OPC_BEQZC, OPC_BNEZC */
10553 gen_load_gpr(t0
, rs
);
10555 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10557 /* OPC_JIC, OPC_JIALC */
10558 TCGv tbase
= tcg_temp_new();
10559 TCGv toffset
= tcg_temp_new();
10561 gen_load_gpr(tbase
, rt
);
10562 tcg_gen_movi_tl(toffset
, offset
);
10563 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10564 tcg_temp_free(tbase
);
10565 tcg_temp_free(toffset
);
10569 MIPS_INVAL("Compact branch/jump");
10570 generate_exception_end(ctx
, EXCP_RI
);
10574 if (bcond_compute
== 0) {
10575 /* Uncoditional compact branch */
10578 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10581 ctx
->hflags
|= MIPS_HFLAG_BR
;
10584 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10587 ctx
->hflags
|= MIPS_HFLAG_B
;
10590 MIPS_INVAL("Compact branch/jump");
10591 generate_exception_end(ctx
, EXCP_RI
);
10595 /* Generating branch here as compact branches don't have delay slot */
10596 gen_branch(ctx
, 4);
10598 /* Conditional compact branch */
10599 TCGLabel
*fs
= gen_new_label();
10600 save_cpu_state(ctx
, 0);
10603 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10604 if (rs
== 0 && rt
!= 0) {
10606 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10607 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10609 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10612 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10615 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10616 if (rs
== 0 && rt
!= 0) {
10618 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10619 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10621 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10624 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10627 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10628 if (rs
== 0 && rt
!= 0) {
10630 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10631 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10633 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10636 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10639 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10640 if (rs
== 0 && rt
!= 0) {
10642 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10643 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10645 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10648 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10651 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10652 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10654 /* OPC_BOVC, OPC_BNVC */
10655 TCGv t2
= tcg_temp_new();
10656 TCGv t3
= tcg_temp_new();
10657 TCGv t4
= tcg_temp_new();
10658 TCGv input_overflow
= tcg_temp_new();
10660 gen_load_gpr(t0
, rs
);
10661 gen_load_gpr(t1
, rt
);
10662 tcg_gen_ext32s_tl(t2
, t0
);
10663 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10664 tcg_gen_ext32s_tl(t3
, t1
);
10665 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10666 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10668 tcg_gen_add_tl(t4
, t2
, t3
);
10669 tcg_gen_ext32s_tl(t4
, t4
);
10670 tcg_gen_xor_tl(t2
, t2
, t3
);
10671 tcg_gen_xor_tl(t3
, t4
, t3
);
10672 tcg_gen_andc_tl(t2
, t3
, t2
);
10673 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10674 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10675 if (opc
== OPC_BOVC
) {
10677 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10680 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10682 tcg_temp_free(input_overflow
);
10686 } else if (rs
< rt
&& rs
== 0) {
10687 /* OPC_BEQZALC, OPC_BNEZALC */
10688 if (opc
== OPC_BEQZALC
) {
10690 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10693 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10696 /* OPC_BEQC, OPC_BNEC */
10697 if (opc
== OPC_BEQC
) {
10699 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10702 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10707 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10710 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10713 MIPS_INVAL("Compact conditional branch/jump");
10714 generate_exception_end(ctx
, EXCP_RI
);
10718 /* Generating branch here as compact branches don't have delay slot */
10719 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10722 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10730 /* ISA extensions (ASEs) */
10731 /* MIPS16 extension to MIPS32 */
10733 /* MIPS16 major opcodes */
10735 M16_OPC_ADDIUSP
= 0x00,
10736 M16_OPC_ADDIUPC
= 0x01,
10738 M16_OPC_JAL
= 0x03,
10739 M16_OPC_BEQZ
= 0x04,
10740 M16_OPC_BNEQZ
= 0x05,
10741 M16_OPC_SHIFT
= 0x06,
10743 M16_OPC_RRIA
= 0x08,
10744 M16_OPC_ADDIU8
= 0x09,
10745 M16_OPC_SLTI
= 0x0a,
10746 M16_OPC_SLTIU
= 0x0b,
10749 M16_OPC_CMPI
= 0x0e,
10753 M16_OPC_LWSP
= 0x12,
10755 M16_OPC_LBU
= 0x14,
10756 M16_OPC_LHU
= 0x15,
10757 M16_OPC_LWPC
= 0x16,
10758 M16_OPC_LWU
= 0x17,
10761 M16_OPC_SWSP
= 0x1a,
10763 M16_OPC_RRR
= 0x1c,
10765 M16_OPC_EXTEND
= 0x1e,
10769 /* I8 funct field */
10788 /* RR funct field */
10822 /* I64 funct field */
10830 I64_DADDIUPC
= 0x6,
10834 /* RR ry field for CNVT */
10836 RR_RY_CNVT_ZEB
= 0x0,
10837 RR_RY_CNVT_ZEH
= 0x1,
10838 RR_RY_CNVT_ZEW
= 0x2,
10839 RR_RY_CNVT_SEB
= 0x4,
10840 RR_RY_CNVT_SEH
= 0x5,
10841 RR_RY_CNVT_SEW
= 0x6,
10844 static int xlat (int r
)
10846 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10851 static void gen_mips16_save (DisasContext
*ctx
,
10852 int xsregs
, int aregs
,
10853 int do_ra
, int do_s0
, int do_s1
,
10856 TCGv t0
= tcg_temp_new();
10857 TCGv t1
= tcg_temp_new();
10858 TCGv t2
= tcg_temp_new();
10888 generate_exception_end(ctx
, EXCP_RI
);
10894 gen_base_offset_addr(ctx
, t0
, 29, 12);
10895 gen_load_gpr(t1
, 7);
10896 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10899 gen_base_offset_addr(ctx
, t0
, 29, 8);
10900 gen_load_gpr(t1
, 6);
10901 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10904 gen_base_offset_addr(ctx
, t0
, 29, 4);
10905 gen_load_gpr(t1
, 5);
10906 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10909 gen_base_offset_addr(ctx
, t0
, 29, 0);
10910 gen_load_gpr(t1
, 4);
10911 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10914 gen_load_gpr(t0
, 29);
10916 #define DECR_AND_STORE(reg) do { \
10917 tcg_gen_movi_tl(t2, -4); \
10918 gen_op_addr_add(ctx, t0, t0, t2); \
10919 gen_load_gpr(t1, reg); \
10920 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10924 DECR_AND_STORE(31);
10929 DECR_AND_STORE(30);
10932 DECR_AND_STORE(23);
10935 DECR_AND_STORE(22);
10938 DECR_AND_STORE(21);
10941 DECR_AND_STORE(20);
10944 DECR_AND_STORE(19);
10947 DECR_AND_STORE(18);
10951 DECR_AND_STORE(17);
10954 DECR_AND_STORE(16);
10984 generate_exception_end(ctx
, EXCP_RI
);
11000 #undef DECR_AND_STORE
11002 tcg_gen_movi_tl(t2
, -framesize
);
11003 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11009 static void gen_mips16_restore (DisasContext
*ctx
,
11010 int xsregs
, int aregs
,
11011 int do_ra
, int do_s0
, int do_s1
,
11015 TCGv t0
= tcg_temp_new();
11016 TCGv t1
= tcg_temp_new();
11017 TCGv t2
= tcg_temp_new();
11019 tcg_gen_movi_tl(t2
, framesize
);
11020 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11022 #define DECR_AND_LOAD(reg) do { \
11023 tcg_gen_movi_tl(t2, -4); \
11024 gen_op_addr_add(ctx, t0, t0, t2); \
11025 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11026 gen_store_gpr(t1, reg); \
11090 generate_exception_end(ctx
, EXCP_RI
);
11106 #undef DECR_AND_LOAD
11108 tcg_gen_movi_tl(t2
, framesize
);
11109 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11115 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11116 int is_64_bit
, int extended
)
11120 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11121 generate_exception_end(ctx
, EXCP_RI
);
11125 t0
= tcg_temp_new();
11127 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11128 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11130 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11136 #if defined(TARGET_MIPS64)
11137 static void decode_i64_mips16 (DisasContext
*ctx
,
11138 int ry
, int funct
, int16_t offset
,
11143 check_insn(ctx
, ISA_MIPS3
);
11144 check_mips_64(ctx
);
11145 offset
= extended
? offset
: offset
<< 3;
11146 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11149 check_insn(ctx
, ISA_MIPS3
);
11150 check_mips_64(ctx
);
11151 offset
= extended
? offset
: offset
<< 3;
11152 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11155 check_insn(ctx
, ISA_MIPS3
);
11156 check_mips_64(ctx
);
11157 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11158 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11161 check_insn(ctx
, ISA_MIPS3
);
11162 check_mips_64(ctx
);
11163 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11164 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11167 check_insn(ctx
, ISA_MIPS3
);
11168 check_mips_64(ctx
);
11169 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11170 generate_exception_end(ctx
, EXCP_RI
);
11172 offset
= extended
? offset
: offset
<< 3;
11173 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11177 check_insn(ctx
, ISA_MIPS3
);
11178 check_mips_64(ctx
);
11179 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11180 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11183 check_insn(ctx
, ISA_MIPS3
);
11184 check_mips_64(ctx
);
11185 offset
= extended
? offset
: offset
<< 2;
11186 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11189 check_insn(ctx
, ISA_MIPS3
);
11190 check_mips_64(ctx
);
11191 offset
= extended
? offset
: offset
<< 2;
11192 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11198 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11200 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11201 int op
, rx
, ry
, funct
, sa
;
11202 int16_t imm
, offset
;
11204 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11205 op
= (ctx
->opcode
>> 11) & 0x1f;
11206 sa
= (ctx
->opcode
>> 22) & 0x1f;
11207 funct
= (ctx
->opcode
>> 8) & 0x7;
11208 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11209 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11210 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11211 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11212 | (ctx
->opcode
& 0x1f));
11214 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11217 case M16_OPC_ADDIUSP
:
11218 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11220 case M16_OPC_ADDIUPC
:
11221 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11224 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11225 /* No delay slot, so just process as a normal instruction */
11228 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11229 /* No delay slot, so just process as a normal instruction */
11231 case M16_OPC_BNEQZ
:
11232 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11233 /* No delay slot, so just process as a normal instruction */
11235 case M16_OPC_SHIFT
:
11236 switch (ctx
->opcode
& 0x3) {
11238 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11241 #if defined(TARGET_MIPS64)
11242 check_mips_64(ctx
);
11243 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11245 generate_exception_end(ctx
, EXCP_RI
);
11249 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11252 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11256 #if defined(TARGET_MIPS64)
11258 check_insn(ctx
, ISA_MIPS3
);
11259 check_mips_64(ctx
);
11260 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11264 imm
= ctx
->opcode
& 0xf;
11265 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11266 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11267 imm
= (int16_t) (imm
<< 1) >> 1;
11268 if ((ctx
->opcode
>> 4) & 0x1) {
11269 #if defined(TARGET_MIPS64)
11270 check_mips_64(ctx
);
11271 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11273 generate_exception_end(ctx
, EXCP_RI
);
11276 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11279 case M16_OPC_ADDIU8
:
11280 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11283 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11285 case M16_OPC_SLTIU
:
11286 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11291 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11294 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11297 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11300 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11303 check_insn(ctx
, ISA_MIPS32
);
11305 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11306 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11307 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11308 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11309 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11310 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11311 | (ctx
->opcode
& 0xf)) << 3;
11313 if (ctx
->opcode
& (1 << 7)) {
11314 gen_mips16_save(ctx
, xsregs
, aregs
,
11315 do_ra
, do_s0
, do_s1
,
11318 gen_mips16_restore(ctx
, xsregs
, aregs
,
11319 do_ra
, do_s0
, do_s1
,
11325 generate_exception_end(ctx
, EXCP_RI
);
11330 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11333 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11335 #if defined(TARGET_MIPS64)
11337 check_insn(ctx
, ISA_MIPS3
);
11338 check_mips_64(ctx
);
11339 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11343 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11346 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11349 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11352 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11355 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11358 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11361 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11363 #if defined(TARGET_MIPS64)
11365 check_insn(ctx
, ISA_MIPS3
);
11366 check_mips_64(ctx
);
11367 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11371 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11374 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11377 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11380 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11382 #if defined(TARGET_MIPS64)
11384 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11388 generate_exception_end(ctx
, EXCP_RI
);
11395 static inline bool is_uhi(int sdbbp_code
)
11397 #ifdef CONFIG_USER_ONLY
11400 return semihosting_enabled() && sdbbp_code
== 1;
11404 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11408 int op
, cnvt_op
, op1
, offset
;
11412 op
= (ctx
->opcode
>> 11) & 0x1f;
11413 sa
= (ctx
->opcode
>> 2) & 0x7;
11414 sa
= sa
== 0 ? 8 : sa
;
11415 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11416 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11417 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11418 op1
= offset
= ctx
->opcode
& 0x1f;
11423 case M16_OPC_ADDIUSP
:
11425 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11427 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11430 case M16_OPC_ADDIUPC
:
11431 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11434 offset
= (ctx
->opcode
& 0x7ff) << 1;
11435 offset
= (int16_t)(offset
<< 4) >> 4;
11436 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11437 /* No delay slot, so just process as a normal instruction */
11440 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11441 offset
= (((ctx
->opcode
& 0x1f) << 21)
11442 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11444 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11445 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11449 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11450 ((int8_t)ctx
->opcode
) << 1, 0);
11451 /* No delay slot, so just process as a normal instruction */
11453 case M16_OPC_BNEQZ
:
11454 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11455 ((int8_t)ctx
->opcode
) << 1, 0);
11456 /* No delay slot, so just process as a normal instruction */
11458 case M16_OPC_SHIFT
:
11459 switch (ctx
->opcode
& 0x3) {
11461 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11464 #if defined(TARGET_MIPS64)
11465 check_insn(ctx
, ISA_MIPS3
);
11466 check_mips_64(ctx
);
11467 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11469 generate_exception_end(ctx
, EXCP_RI
);
11473 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11476 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11480 #if defined(TARGET_MIPS64)
11482 check_insn(ctx
, ISA_MIPS3
);
11483 check_mips_64(ctx
);
11484 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11489 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11491 if ((ctx
->opcode
>> 4) & 1) {
11492 #if defined(TARGET_MIPS64)
11493 check_insn(ctx
, ISA_MIPS3
);
11494 check_mips_64(ctx
);
11495 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11497 generate_exception_end(ctx
, EXCP_RI
);
11500 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11504 case M16_OPC_ADDIU8
:
11506 int16_t imm
= (int8_t) ctx
->opcode
;
11508 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11513 int16_t imm
= (uint8_t) ctx
->opcode
;
11514 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11517 case M16_OPC_SLTIU
:
11519 int16_t imm
= (uint8_t) ctx
->opcode
;
11520 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11527 funct
= (ctx
->opcode
>> 8) & 0x7;
11530 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11531 ((int8_t)ctx
->opcode
) << 1, 0);
11534 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11535 ((int8_t)ctx
->opcode
) << 1, 0);
11538 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11541 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11542 ((int8_t)ctx
->opcode
) << 3);
11545 check_insn(ctx
, ISA_MIPS32
);
11547 int do_ra
= ctx
->opcode
& (1 << 6);
11548 int do_s0
= ctx
->opcode
& (1 << 5);
11549 int do_s1
= ctx
->opcode
& (1 << 4);
11550 int framesize
= ctx
->opcode
& 0xf;
11552 if (framesize
== 0) {
11555 framesize
= framesize
<< 3;
11558 if (ctx
->opcode
& (1 << 7)) {
11559 gen_mips16_save(ctx
, 0, 0,
11560 do_ra
, do_s0
, do_s1
, framesize
);
11562 gen_mips16_restore(ctx
, 0, 0,
11563 do_ra
, do_s0
, do_s1
, framesize
);
11569 int rz
= xlat(ctx
->opcode
& 0x7);
11571 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11572 ((ctx
->opcode
>> 5) & 0x7);
11573 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11577 reg32
= ctx
->opcode
& 0x1f;
11578 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11581 generate_exception_end(ctx
, EXCP_RI
);
11588 int16_t imm
= (uint8_t) ctx
->opcode
;
11590 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11595 int16_t imm
= (uint8_t) ctx
->opcode
;
11596 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11599 #if defined(TARGET_MIPS64)
11601 check_insn(ctx
, ISA_MIPS3
);
11602 check_mips_64(ctx
);
11603 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11607 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11610 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11613 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11616 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11619 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11622 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11625 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11627 #if defined (TARGET_MIPS64)
11629 check_insn(ctx
, ISA_MIPS3
);
11630 check_mips_64(ctx
);
11631 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11635 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11638 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11641 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11644 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11648 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11651 switch (ctx
->opcode
& 0x3) {
11653 mips32_op
= OPC_ADDU
;
11656 mips32_op
= OPC_SUBU
;
11658 #if defined(TARGET_MIPS64)
11660 mips32_op
= OPC_DADDU
;
11661 check_insn(ctx
, ISA_MIPS3
);
11662 check_mips_64(ctx
);
11665 mips32_op
= OPC_DSUBU
;
11666 check_insn(ctx
, ISA_MIPS3
);
11667 check_mips_64(ctx
);
11671 generate_exception_end(ctx
, EXCP_RI
);
11675 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11684 int nd
= (ctx
->opcode
>> 7) & 0x1;
11685 int link
= (ctx
->opcode
>> 6) & 0x1;
11686 int ra
= (ctx
->opcode
>> 5) & 0x1;
11689 check_insn(ctx
, ISA_MIPS32
);
11698 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11703 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11704 gen_helper_do_semihosting(cpu_env
);
11706 /* XXX: not clear which exception should be raised
11707 * when in debug mode...
11709 check_insn(ctx
, ISA_MIPS32
);
11710 generate_exception_end(ctx
, EXCP_DBp
);
11714 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11717 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11720 generate_exception_end(ctx
, EXCP_BREAK
);
11723 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11726 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11729 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11731 #if defined (TARGET_MIPS64)
11733 check_insn(ctx
, ISA_MIPS3
);
11734 check_mips_64(ctx
);
11735 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11739 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11742 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11745 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11748 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11751 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11754 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11757 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11760 check_insn(ctx
, ISA_MIPS32
);
11762 case RR_RY_CNVT_ZEB
:
11763 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11765 case RR_RY_CNVT_ZEH
:
11766 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11768 case RR_RY_CNVT_SEB
:
11769 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11771 case RR_RY_CNVT_SEH
:
11772 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11774 #if defined (TARGET_MIPS64)
11775 case RR_RY_CNVT_ZEW
:
11776 check_insn(ctx
, ISA_MIPS64
);
11777 check_mips_64(ctx
);
11778 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11780 case RR_RY_CNVT_SEW
:
11781 check_insn(ctx
, ISA_MIPS64
);
11782 check_mips_64(ctx
);
11783 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11787 generate_exception_end(ctx
, EXCP_RI
);
11792 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11794 #if defined (TARGET_MIPS64)
11796 check_insn(ctx
, ISA_MIPS3
);
11797 check_mips_64(ctx
);
11798 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11801 check_insn(ctx
, ISA_MIPS3
);
11802 check_mips_64(ctx
);
11803 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11806 check_insn(ctx
, ISA_MIPS3
);
11807 check_mips_64(ctx
);
11808 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11811 check_insn(ctx
, ISA_MIPS3
);
11812 check_mips_64(ctx
);
11813 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11817 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11820 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11823 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11826 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11828 #if defined (TARGET_MIPS64)
11830 check_insn(ctx
, ISA_MIPS3
);
11831 check_mips_64(ctx
);
11832 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11835 check_insn(ctx
, ISA_MIPS3
);
11836 check_mips_64(ctx
);
11837 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11840 check_insn(ctx
, ISA_MIPS3
);
11841 check_mips_64(ctx
);
11842 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11845 check_insn(ctx
, ISA_MIPS3
);
11846 check_mips_64(ctx
);
11847 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11851 generate_exception_end(ctx
, EXCP_RI
);
11855 case M16_OPC_EXTEND
:
11856 decode_extended_mips16_opc(env
, ctx
);
11859 #if defined(TARGET_MIPS64)
11861 funct
= (ctx
->opcode
>> 8) & 0x7;
11862 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11866 generate_exception_end(ctx
, EXCP_RI
);
11873 /* microMIPS extension to MIPS32/MIPS64 */
11876 * microMIPS32/microMIPS64 major opcodes
11878 * 1. MIPS Architecture for Programmers Volume II-B:
11879 * The microMIPS32 Instruction Set (Revision 3.05)
11881 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11883 * 2. MIPS Architecture For Programmers Volume II-A:
11884 * The MIPS64 Instruction Set (Revision 3.51)
11914 POOL32S
= 0x16, /* MIPS64 */
11915 DADDIU32
= 0x17, /* MIPS64 */
11944 /* 0x29 is reserved */
11957 /* 0x31 is reserved */
11970 SD32
= 0x36, /* MIPS64 */
11971 LD32
= 0x37, /* MIPS64 */
11973 /* 0x39 is reserved */
11989 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11999 /* POOL32A encoding of minor opcode field */
12002 /* These opcodes are distinguished only by bits 9..6; those bits are
12003 * what are recorded below. */
12040 /* The following can be distinguished by their lower 6 bits. */
12050 /* POOL32AXF encoding of minor opcode field extension */
12053 * 1. MIPS Architecture for Programmers Volume II-B:
12054 * The microMIPS32 Instruction Set (Revision 3.05)
12056 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12058 * 2. MIPS Architecture for Programmers VolumeIV-e:
12059 * The MIPS DSP Application-Specific Extension
12060 * to the microMIPS32 Architecture (Revision 2.34)
12062 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12077 /* begin of microMIPS32 DSP */
12079 /* bits 13..12 for 0x01 */
12085 /* bits 13..12 for 0x2a */
12091 /* bits 13..12 for 0x32 */
12095 /* end of microMIPS32 DSP */
12097 /* bits 15..12 for 0x2c */
12114 /* bits 15..12 for 0x34 */
12122 /* bits 15..12 for 0x3c */
12124 JR
= 0x0, /* alias */
12132 /* bits 15..12 for 0x05 */
12136 /* bits 15..12 for 0x0d */
12148 /* bits 15..12 for 0x15 */
12154 /* bits 15..12 for 0x1d */
12158 /* bits 15..12 for 0x2d */
12163 /* bits 15..12 for 0x35 */
12170 /* POOL32B encoding of minor opcode field (bits 15..12) */
12186 /* POOL32C encoding of minor opcode field (bits 15..12) */
12194 /* 0xa is reserved */
12201 /* 0x6 is reserved */
12207 /* POOL32F encoding of minor opcode field (bits 5..0) */
12210 /* These are the bit 7..6 values */
12219 /* These are the bit 8..6 values */
12244 MOVZ_FMT_05
= 0x05,
12278 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12285 /* POOL32Fxf encoding of minor opcode extension field */
12323 /* POOL32I encoding of minor opcode field (bits 25..21) */
12353 /* These overlap and are distinguished by bit16 of the instruction */
12362 /* POOL16A encoding of minor opcode field */
12369 /* POOL16B encoding of minor opcode field */
12376 /* POOL16C encoding of minor opcode field */
12396 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12416 /* POOL16D encoding of minor opcode field */
12423 /* POOL16E encoding of minor opcode field */
12430 static int mmreg (int r
)
12432 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12437 /* Used for 16-bit store instructions. */
12438 static int mmreg2 (int r
)
12440 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12445 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12446 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12447 #define uMIPS_RS2(op) uMIPS_RS(op)
12448 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12449 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12450 #define uMIPS_RS5(op) (op & 0x1f)
12452 /* Signed immediate */
12453 #define SIMM(op, start, width) \
12454 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12457 /* Zero-extended immediate */
12458 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12460 static void gen_addiur1sp(DisasContext
*ctx
)
12462 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12464 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12467 static void gen_addiur2(DisasContext
*ctx
)
12469 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12470 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12471 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12473 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12476 static void gen_addiusp(DisasContext
*ctx
)
12478 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12481 if (encoded
<= 1) {
12482 decoded
= 256 + encoded
;
12483 } else if (encoded
<= 255) {
12485 } else if (encoded
<= 509) {
12486 decoded
= encoded
- 512;
12488 decoded
= encoded
- 768;
12491 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12494 static void gen_addius5(DisasContext
*ctx
)
12496 int imm
= SIMM(ctx
->opcode
, 1, 4);
12497 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12499 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12502 static void gen_andi16(DisasContext
*ctx
)
12504 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12505 31, 32, 63, 64, 255, 32768, 65535 };
12506 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12507 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12508 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12510 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12513 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12514 int base
, int16_t offset
)
12519 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12520 generate_exception_end(ctx
, EXCP_RI
);
12524 t0
= tcg_temp_new();
12526 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12528 t1
= tcg_const_tl(reglist
);
12529 t2
= tcg_const_i32(ctx
->mem_idx
);
12531 save_cpu_state(ctx
, 1);
12534 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12537 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12539 #ifdef TARGET_MIPS64
12541 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12544 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12550 tcg_temp_free_i32(t2
);
12554 static void gen_pool16c_insn(DisasContext
*ctx
)
12556 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12557 int rs
= mmreg(ctx
->opcode
& 0x7);
12559 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12564 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12570 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12576 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12582 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12589 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12590 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12592 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12601 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12602 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12604 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12611 int reg
= ctx
->opcode
& 0x1f;
12613 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12619 int reg
= ctx
->opcode
& 0x1f;
12620 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12621 /* Let normal delay slot handling in our caller take us
12622 to the branch target. */
12627 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12628 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12632 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12633 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12637 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12641 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12644 generate_exception_end(ctx
, EXCP_BREAK
);
12647 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12648 gen_helper_do_semihosting(cpu_env
);
12650 /* XXX: not clear which exception should be raised
12651 * when in debug mode...
12653 check_insn(ctx
, ISA_MIPS32
);
12654 generate_exception_end(ctx
, EXCP_DBp
);
12657 case JRADDIUSP
+ 0:
12658 case JRADDIUSP
+ 1:
12660 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12661 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12662 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12663 /* Let normal delay slot handling in our caller take us
12664 to the branch target. */
12668 generate_exception_end(ctx
, EXCP_RI
);
12673 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12676 int rd
, rs
, re
, rt
;
12677 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12678 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12679 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12680 rd
= rd_enc
[enc_dest
];
12681 re
= re_enc
[enc_dest
];
12682 rs
= rs_rt_enc
[enc_rs
];
12683 rt
= rs_rt_enc
[enc_rt
];
12685 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12687 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12690 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12692 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12696 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12698 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12699 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12701 switch (ctx
->opcode
& 0xf) {
12703 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12706 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12710 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12711 int offset
= extract32(ctx
->opcode
, 4, 4);
12712 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12715 case R6_JRC16
: /* JRCADDIUSP */
12716 if ((ctx
->opcode
>> 4) & 1) {
12718 int imm
= extract32(ctx
->opcode
, 5, 5);
12719 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12720 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12723 int rs
= extract32(ctx
->opcode
, 5, 5);
12724 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12727 case MOVEP
... MOVEP_07
:
12728 case MOVEP_0C
... MOVEP_0F
:
12730 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12731 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12732 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12733 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12737 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12740 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12744 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12745 int offset
= extract32(ctx
->opcode
, 4, 4);
12746 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12749 case JALRC16
: /* BREAK16, SDBBP16 */
12750 switch (ctx
->opcode
& 0x3f) {
12752 case JALRC16
+ 0x20:
12754 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12759 generate_exception(ctx
, EXCP_BREAK
);
12763 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12764 gen_helper_do_semihosting(cpu_env
);
12766 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12767 generate_exception(ctx
, EXCP_RI
);
12769 generate_exception(ctx
, EXCP_DBp
);
12776 generate_exception(ctx
, EXCP_RI
);
12781 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12783 TCGv t0
= tcg_temp_new();
12784 TCGv t1
= tcg_temp_new();
12786 gen_load_gpr(t0
, base
);
12789 gen_load_gpr(t1
, index
);
12790 tcg_gen_shli_tl(t1
, t1
, 2);
12791 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12794 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12795 gen_store_gpr(t1
, rd
);
12801 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12802 int base
, int16_t offset
)
12806 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12807 generate_exception_end(ctx
, EXCP_RI
);
12811 t0
= tcg_temp_new();
12812 t1
= tcg_temp_new();
12814 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12819 generate_exception_end(ctx
, EXCP_RI
);
12822 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12823 gen_store_gpr(t1
, rd
);
12824 tcg_gen_movi_tl(t1
, 4);
12825 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12826 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12827 gen_store_gpr(t1
, rd
+1);
12830 gen_load_gpr(t1
, rd
);
12831 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12832 tcg_gen_movi_tl(t1
, 4);
12833 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12834 gen_load_gpr(t1
, rd
+1);
12835 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12837 #ifdef TARGET_MIPS64
12840 generate_exception_end(ctx
, EXCP_RI
);
12843 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12844 gen_store_gpr(t1
, rd
);
12845 tcg_gen_movi_tl(t1
, 8);
12846 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12847 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12848 gen_store_gpr(t1
, rd
+1);
12851 gen_load_gpr(t1
, rd
);
12852 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12853 tcg_gen_movi_tl(t1
, 8);
12854 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12855 gen_load_gpr(t1
, rd
+1);
12856 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12864 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12866 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12867 int minor
= (ctx
->opcode
>> 12) & 0xf;
12868 uint32_t mips32_op
;
12870 switch (extension
) {
12872 mips32_op
= OPC_TEQ
;
12875 mips32_op
= OPC_TGE
;
12878 mips32_op
= OPC_TGEU
;
12881 mips32_op
= OPC_TLT
;
12884 mips32_op
= OPC_TLTU
;
12887 mips32_op
= OPC_TNE
;
12889 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12891 #ifndef CONFIG_USER_ONLY
12894 check_cp0_enabled(ctx
);
12896 /* Treat as NOP. */
12899 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12903 check_cp0_enabled(ctx
);
12905 TCGv t0
= tcg_temp_new();
12907 gen_load_gpr(t0
, rt
);
12908 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12914 switch (minor
& 3) {
12916 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12919 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12922 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12925 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12928 goto pool32axf_invalid
;
12932 switch (minor
& 3) {
12934 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12937 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12940 goto pool32axf_invalid
;
12946 check_insn(ctx
, ISA_MIPS32R6
);
12947 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12950 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12953 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12956 mips32_op
= OPC_CLO
;
12959 mips32_op
= OPC_CLZ
;
12961 check_insn(ctx
, ISA_MIPS32
);
12962 gen_cl(ctx
, mips32_op
, rt
, rs
);
12965 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12966 gen_rdhwr(ctx
, rt
, rs
, 0);
12969 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12972 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12973 mips32_op
= OPC_MULT
;
12976 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12977 mips32_op
= OPC_MULTU
;
12980 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12981 mips32_op
= OPC_DIV
;
12984 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12985 mips32_op
= OPC_DIVU
;
12988 check_insn(ctx
, ISA_MIPS32
);
12989 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12992 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12993 mips32_op
= OPC_MADD
;
12996 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12997 mips32_op
= OPC_MADDU
;
13000 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13001 mips32_op
= OPC_MSUB
;
13004 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13005 mips32_op
= OPC_MSUBU
;
13007 check_insn(ctx
, ISA_MIPS32
);
13008 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13011 goto pool32axf_invalid
;
13022 generate_exception_err(ctx
, EXCP_CpU
, 2);
13025 goto pool32axf_invalid
;
13030 case JALR
: /* JALRC */
13031 case JALR_HB
: /* JALRC_HB */
13032 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13033 /* JALRC, JALRC_HB */
13034 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13036 /* JALR, JALR_HB */
13037 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13038 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13043 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13044 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13045 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13048 goto pool32axf_invalid
;
13054 check_cp0_enabled(ctx
);
13055 check_insn(ctx
, ISA_MIPS32R2
);
13056 gen_load_srsgpr(rs
, rt
);
13059 check_cp0_enabled(ctx
);
13060 check_insn(ctx
, ISA_MIPS32R2
);
13061 gen_store_srsgpr(rs
, rt
);
13064 goto pool32axf_invalid
;
13067 #ifndef CONFIG_USER_ONLY
13071 mips32_op
= OPC_TLBP
;
13074 mips32_op
= OPC_TLBR
;
13077 mips32_op
= OPC_TLBWI
;
13080 mips32_op
= OPC_TLBWR
;
13083 mips32_op
= OPC_TLBINV
;
13086 mips32_op
= OPC_TLBINVF
;
13089 mips32_op
= OPC_WAIT
;
13092 mips32_op
= OPC_DERET
;
13095 mips32_op
= OPC_ERET
;
13097 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13100 goto pool32axf_invalid
;
13106 check_cp0_enabled(ctx
);
13108 TCGv t0
= tcg_temp_new();
13110 save_cpu_state(ctx
, 1);
13111 gen_helper_di(t0
, cpu_env
);
13112 gen_store_gpr(t0
, rs
);
13113 /* Stop translation as we may have switched the execution mode */
13114 ctx
->bstate
= BS_STOP
;
13119 check_cp0_enabled(ctx
);
13121 TCGv t0
= tcg_temp_new();
13123 save_cpu_state(ctx
, 1);
13124 gen_helper_ei(t0
, cpu_env
);
13125 gen_store_gpr(t0
, rs
);
13126 /* Stop translation as we may have switched the execution mode */
13127 ctx
->bstate
= BS_STOP
;
13132 goto pool32axf_invalid
;
13142 generate_exception_end(ctx
, EXCP_SYSCALL
);
13145 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13146 gen_helper_do_semihosting(cpu_env
);
13148 check_insn(ctx
, ISA_MIPS32
);
13149 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13150 generate_exception_end(ctx
, EXCP_RI
);
13152 generate_exception_end(ctx
, EXCP_DBp
);
13157 goto pool32axf_invalid
;
13161 switch (minor
& 3) {
13163 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13166 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13169 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13172 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13175 goto pool32axf_invalid
;
13179 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13182 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13185 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13188 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13191 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13194 goto pool32axf_invalid
;
13199 MIPS_INVAL("pool32axf");
13200 generate_exception_end(ctx
, EXCP_RI
);
13205 /* Values for microMIPS fmt field. Variable-width, depending on which
13206 formats the instruction supports. */
13225 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13227 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13228 uint32_t mips32_op
;
13230 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13231 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13232 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13234 switch (extension
) {
13235 case FLOAT_1BIT_FMT(CFC1
, 0):
13236 mips32_op
= OPC_CFC1
;
13238 case FLOAT_1BIT_FMT(CTC1
, 0):
13239 mips32_op
= OPC_CTC1
;
13241 case FLOAT_1BIT_FMT(MFC1
, 0):
13242 mips32_op
= OPC_MFC1
;
13244 case FLOAT_1BIT_FMT(MTC1
, 0):
13245 mips32_op
= OPC_MTC1
;
13247 case FLOAT_1BIT_FMT(MFHC1
, 0):
13248 mips32_op
= OPC_MFHC1
;
13250 case FLOAT_1BIT_FMT(MTHC1
, 0):
13251 mips32_op
= OPC_MTHC1
;
13253 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13256 /* Reciprocal square root */
13257 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13258 mips32_op
= OPC_RSQRT_S
;
13260 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13261 mips32_op
= OPC_RSQRT_D
;
13265 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13266 mips32_op
= OPC_SQRT_S
;
13268 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13269 mips32_op
= OPC_SQRT_D
;
13273 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13274 mips32_op
= OPC_RECIP_S
;
13276 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13277 mips32_op
= OPC_RECIP_D
;
13281 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13282 mips32_op
= OPC_FLOOR_L_S
;
13284 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13285 mips32_op
= OPC_FLOOR_L_D
;
13287 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13288 mips32_op
= OPC_FLOOR_W_S
;
13290 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13291 mips32_op
= OPC_FLOOR_W_D
;
13295 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13296 mips32_op
= OPC_CEIL_L_S
;
13298 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13299 mips32_op
= OPC_CEIL_L_D
;
13301 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13302 mips32_op
= OPC_CEIL_W_S
;
13304 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13305 mips32_op
= OPC_CEIL_W_D
;
13309 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13310 mips32_op
= OPC_TRUNC_L_S
;
13312 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13313 mips32_op
= OPC_TRUNC_L_D
;
13315 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13316 mips32_op
= OPC_TRUNC_W_S
;
13318 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13319 mips32_op
= OPC_TRUNC_W_D
;
13323 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13324 mips32_op
= OPC_ROUND_L_S
;
13326 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13327 mips32_op
= OPC_ROUND_L_D
;
13329 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13330 mips32_op
= OPC_ROUND_W_S
;
13332 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13333 mips32_op
= OPC_ROUND_W_D
;
13336 /* Integer to floating-point conversion */
13337 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13338 mips32_op
= OPC_CVT_L_S
;
13340 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13341 mips32_op
= OPC_CVT_L_D
;
13343 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13344 mips32_op
= OPC_CVT_W_S
;
13346 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13347 mips32_op
= OPC_CVT_W_D
;
13350 /* Paired-foo conversions */
13351 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13352 mips32_op
= OPC_CVT_S_PL
;
13354 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13355 mips32_op
= OPC_CVT_S_PU
;
13357 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13358 mips32_op
= OPC_CVT_PW_PS
;
13360 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13361 mips32_op
= OPC_CVT_PS_PW
;
13364 /* Floating-point moves */
13365 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13366 mips32_op
= OPC_MOV_S
;
13368 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13369 mips32_op
= OPC_MOV_D
;
13371 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13372 mips32_op
= OPC_MOV_PS
;
13375 /* Absolute value */
13376 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13377 mips32_op
= OPC_ABS_S
;
13379 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13380 mips32_op
= OPC_ABS_D
;
13382 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13383 mips32_op
= OPC_ABS_PS
;
13387 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13388 mips32_op
= OPC_NEG_S
;
13390 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13391 mips32_op
= OPC_NEG_D
;
13393 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13394 mips32_op
= OPC_NEG_PS
;
13397 /* Reciprocal square root step */
13398 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13399 mips32_op
= OPC_RSQRT1_S
;
13401 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13402 mips32_op
= OPC_RSQRT1_D
;
13404 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13405 mips32_op
= OPC_RSQRT1_PS
;
13408 /* Reciprocal step */
13409 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13410 mips32_op
= OPC_RECIP1_S
;
13412 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13413 mips32_op
= OPC_RECIP1_S
;
13415 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13416 mips32_op
= OPC_RECIP1_PS
;
13419 /* Conversions from double */
13420 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13421 mips32_op
= OPC_CVT_D_S
;
13423 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13424 mips32_op
= OPC_CVT_D_W
;
13426 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13427 mips32_op
= OPC_CVT_D_L
;
13430 /* Conversions from single */
13431 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13432 mips32_op
= OPC_CVT_S_D
;
13434 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13435 mips32_op
= OPC_CVT_S_W
;
13437 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13438 mips32_op
= OPC_CVT_S_L
;
13440 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13443 /* Conditional moves on floating-point codes */
13444 case COND_FLOAT_MOV(MOVT
, 0):
13445 case COND_FLOAT_MOV(MOVT
, 1):
13446 case COND_FLOAT_MOV(MOVT
, 2):
13447 case COND_FLOAT_MOV(MOVT
, 3):
13448 case COND_FLOAT_MOV(MOVT
, 4):
13449 case COND_FLOAT_MOV(MOVT
, 5):
13450 case COND_FLOAT_MOV(MOVT
, 6):
13451 case COND_FLOAT_MOV(MOVT
, 7):
13452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13453 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13455 case COND_FLOAT_MOV(MOVF
, 0):
13456 case COND_FLOAT_MOV(MOVF
, 1):
13457 case COND_FLOAT_MOV(MOVF
, 2):
13458 case COND_FLOAT_MOV(MOVF
, 3):
13459 case COND_FLOAT_MOV(MOVF
, 4):
13460 case COND_FLOAT_MOV(MOVF
, 5):
13461 case COND_FLOAT_MOV(MOVF
, 6):
13462 case COND_FLOAT_MOV(MOVF
, 7):
13463 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13464 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13467 MIPS_INVAL("pool32fxf");
13468 generate_exception_end(ctx
, EXCP_RI
);
13473 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13477 int rt
, rs
, rd
, rr
;
13479 uint32_t op
, minor
, mips32_op
;
13480 uint32_t cond
, fmt
, cc
;
13482 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13483 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13485 rt
= (ctx
->opcode
>> 21) & 0x1f;
13486 rs
= (ctx
->opcode
>> 16) & 0x1f;
13487 rd
= (ctx
->opcode
>> 11) & 0x1f;
13488 rr
= (ctx
->opcode
>> 6) & 0x1f;
13489 imm
= (int16_t) ctx
->opcode
;
13491 op
= (ctx
->opcode
>> 26) & 0x3f;
13494 minor
= ctx
->opcode
& 0x3f;
13497 minor
= (ctx
->opcode
>> 6) & 0xf;
13500 mips32_op
= OPC_SLL
;
13503 mips32_op
= OPC_SRA
;
13506 mips32_op
= OPC_SRL
;
13509 mips32_op
= OPC_ROTR
;
13511 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13514 check_insn(ctx
, ISA_MIPS32R6
);
13515 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13518 check_insn(ctx
, ISA_MIPS32R6
);
13519 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13522 check_insn(ctx
, ISA_MIPS32R6
);
13523 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13526 goto pool32a_invalid
;
13530 minor
= (ctx
->opcode
>> 6) & 0xf;
13534 mips32_op
= OPC_ADD
;
13537 mips32_op
= OPC_ADDU
;
13540 mips32_op
= OPC_SUB
;
13543 mips32_op
= OPC_SUBU
;
13546 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13547 mips32_op
= OPC_MUL
;
13549 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13553 mips32_op
= OPC_SLLV
;
13556 mips32_op
= OPC_SRLV
;
13559 mips32_op
= OPC_SRAV
;
13562 mips32_op
= OPC_ROTRV
;
13564 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13566 /* Logical operations */
13568 mips32_op
= OPC_AND
;
13571 mips32_op
= OPC_OR
;
13574 mips32_op
= OPC_NOR
;
13577 mips32_op
= OPC_XOR
;
13579 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13581 /* Set less than */
13583 mips32_op
= OPC_SLT
;
13586 mips32_op
= OPC_SLTU
;
13588 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13591 goto pool32a_invalid
;
13595 minor
= (ctx
->opcode
>> 6) & 0xf;
13597 /* Conditional moves */
13598 case MOVN
: /* MUL */
13599 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13601 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13604 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13607 case MOVZ
: /* MUH */
13608 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13610 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13613 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13617 check_insn(ctx
, ISA_MIPS32R6
);
13618 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13621 check_insn(ctx
, ISA_MIPS32R6
);
13622 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13624 case LWXS
: /* DIV */
13625 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13627 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13630 gen_ldxs(ctx
, rs
, rt
, rd
);
13634 check_insn(ctx
, ISA_MIPS32R6
);
13635 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13638 check_insn(ctx
, ISA_MIPS32R6
);
13639 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13642 check_insn(ctx
, ISA_MIPS32R6
);
13643 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13646 goto pool32a_invalid
;
13650 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13653 check_insn(ctx
, ISA_MIPS32R6
);
13654 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13655 extract32(ctx
->opcode
, 9, 2));
13658 check_insn(ctx
, ISA_MIPS32R6
);
13659 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13660 extract32(ctx
->opcode
, 9, 2));
13663 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13666 gen_pool32axf(env
, ctx
, rt
, rs
);
13669 generate_exception_end(ctx
, EXCP_BREAK
);
13672 check_insn(ctx
, ISA_MIPS32R6
);
13673 generate_exception_end(ctx
, EXCP_RI
);
13677 MIPS_INVAL("pool32a");
13678 generate_exception_end(ctx
, EXCP_RI
);
13683 minor
= (ctx
->opcode
>> 12) & 0xf;
13686 check_cp0_enabled(ctx
);
13687 /* Treat as no-op. */
13691 /* COP2: Not implemented. */
13692 generate_exception_err(ctx
, EXCP_CpU
, 2);
13694 #ifdef TARGET_MIPS64
13697 check_insn(ctx
, ISA_MIPS3
);
13698 check_mips_64(ctx
);
13703 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13705 #ifdef TARGET_MIPS64
13708 check_insn(ctx
, ISA_MIPS3
);
13709 check_mips_64(ctx
);
13714 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13717 MIPS_INVAL("pool32b");
13718 generate_exception_end(ctx
, EXCP_RI
);
13723 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13724 minor
= ctx
->opcode
& 0x3f;
13725 check_cp1_enabled(ctx
);
13728 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13729 mips32_op
= OPC_ALNV_PS
;
13732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13733 mips32_op
= OPC_MADD_S
;
13736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13737 mips32_op
= OPC_MADD_D
;
13740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13741 mips32_op
= OPC_MADD_PS
;
13744 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13745 mips32_op
= OPC_MSUB_S
;
13748 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13749 mips32_op
= OPC_MSUB_D
;
13752 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13753 mips32_op
= OPC_MSUB_PS
;
13756 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13757 mips32_op
= OPC_NMADD_S
;
13760 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13761 mips32_op
= OPC_NMADD_D
;
13764 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13765 mips32_op
= OPC_NMADD_PS
;
13768 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13769 mips32_op
= OPC_NMSUB_S
;
13772 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13773 mips32_op
= OPC_NMSUB_D
;
13776 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13777 mips32_op
= OPC_NMSUB_PS
;
13779 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13781 case CABS_COND_FMT
:
13782 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13783 cond
= (ctx
->opcode
>> 6) & 0xf;
13784 cc
= (ctx
->opcode
>> 13) & 0x7;
13785 fmt
= (ctx
->opcode
>> 10) & 0x3;
13788 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13791 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13794 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13797 goto pool32f_invalid
;
13801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13802 cond
= (ctx
->opcode
>> 6) & 0xf;
13803 cc
= (ctx
->opcode
>> 13) & 0x7;
13804 fmt
= (ctx
->opcode
>> 10) & 0x3;
13807 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13810 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13813 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13816 goto pool32f_invalid
;
13820 check_insn(ctx
, ISA_MIPS32R6
);
13821 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13824 check_insn(ctx
, ISA_MIPS32R6
);
13825 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13828 gen_pool32fxf(ctx
, rt
, rs
);
13832 switch ((ctx
->opcode
>> 6) & 0x7) {
13834 mips32_op
= OPC_PLL_PS
;
13837 mips32_op
= OPC_PLU_PS
;
13840 mips32_op
= OPC_PUL_PS
;
13843 mips32_op
= OPC_PUU_PS
;
13846 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13847 mips32_op
= OPC_CVT_PS_S
;
13849 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13852 goto pool32f_invalid
;
13856 check_insn(ctx
, ISA_MIPS32R6
);
13857 switch ((ctx
->opcode
>> 9) & 0x3) {
13859 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13862 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13865 goto pool32f_invalid
;
13870 switch ((ctx
->opcode
>> 6) & 0x7) {
13872 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13873 mips32_op
= OPC_LWXC1
;
13876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13877 mips32_op
= OPC_SWXC1
;
13880 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13881 mips32_op
= OPC_LDXC1
;
13884 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13885 mips32_op
= OPC_SDXC1
;
13888 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13889 mips32_op
= OPC_LUXC1
;
13892 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13893 mips32_op
= OPC_SUXC1
;
13895 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13898 goto pool32f_invalid
;
13902 check_insn(ctx
, ISA_MIPS32R6
);
13903 switch ((ctx
->opcode
>> 9) & 0x3) {
13905 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13908 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13911 goto pool32f_invalid
;
13916 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13917 fmt
= (ctx
->opcode
>> 9) & 0x3;
13918 switch ((ctx
->opcode
>> 6) & 0x7) {
13922 mips32_op
= OPC_RSQRT2_S
;
13925 mips32_op
= OPC_RSQRT2_D
;
13928 mips32_op
= OPC_RSQRT2_PS
;
13931 goto pool32f_invalid
;
13937 mips32_op
= OPC_RECIP2_S
;
13940 mips32_op
= OPC_RECIP2_D
;
13943 mips32_op
= OPC_RECIP2_PS
;
13946 goto pool32f_invalid
;
13950 mips32_op
= OPC_ADDR_PS
;
13953 mips32_op
= OPC_MULR_PS
;
13955 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13958 goto pool32f_invalid
;
13962 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13963 cc
= (ctx
->opcode
>> 13) & 0x7;
13964 fmt
= (ctx
->opcode
>> 9) & 0x3;
13965 switch ((ctx
->opcode
>> 6) & 0x7) {
13966 case MOVF_FMT
: /* RINT_FMT */
13967 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13971 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13974 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13977 goto pool32f_invalid
;
13983 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13986 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13990 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13993 goto pool32f_invalid
;
13997 case MOVT_FMT
: /* CLASS_FMT */
13998 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14002 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14005 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14008 goto pool32f_invalid
;
14014 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14017 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14021 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14024 goto pool32f_invalid
;
14029 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14032 goto pool32f_invalid
;
14035 #define FINSN_3ARG_SDPS(prfx) \
14036 switch ((ctx->opcode >> 8) & 0x3) { \
14038 mips32_op = OPC_##prfx##_S; \
14041 mips32_op = OPC_##prfx##_D; \
14043 case FMT_SDPS_PS: \
14045 mips32_op = OPC_##prfx##_PS; \
14048 goto pool32f_invalid; \
14051 check_insn(ctx
, ISA_MIPS32R6
);
14052 switch ((ctx
->opcode
>> 9) & 0x3) {
14054 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14057 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14060 goto pool32f_invalid
;
14064 check_insn(ctx
, ISA_MIPS32R6
);
14065 switch ((ctx
->opcode
>> 9) & 0x3) {
14067 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14070 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14073 goto pool32f_invalid
;
14077 /* regular FP ops */
14078 switch ((ctx
->opcode
>> 6) & 0x3) {
14080 FINSN_3ARG_SDPS(ADD
);
14083 FINSN_3ARG_SDPS(SUB
);
14086 FINSN_3ARG_SDPS(MUL
);
14089 fmt
= (ctx
->opcode
>> 8) & 0x3;
14091 mips32_op
= OPC_DIV_D
;
14092 } else if (fmt
== 0) {
14093 mips32_op
= OPC_DIV_S
;
14095 goto pool32f_invalid
;
14099 goto pool32f_invalid
;
14104 switch ((ctx
->opcode
>> 6) & 0x7) {
14105 case MOVN_FMT
: /* SELNEZ_FMT */
14106 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14108 switch ((ctx
->opcode
>> 9) & 0x3) {
14110 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14113 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14116 goto pool32f_invalid
;
14120 FINSN_3ARG_SDPS(MOVN
);
14124 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14125 FINSN_3ARG_SDPS(MOVN
);
14127 case MOVZ_FMT
: /* SELEQZ_FMT */
14128 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14130 switch ((ctx
->opcode
>> 9) & 0x3) {
14132 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14135 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14138 goto pool32f_invalid
;
14142 FINSN_3ARG_SDPS(MOVZ
);
14146 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14147 FINSN_3ARG_SDPS(MOVZ
);
14150 check_insn(ctx
, ISA_MIPS32R6
);
14151 switch ((ctx
->opcode
>> 9) & 0x3) {
14153 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14156 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14159 goto pool32f_invalid
;
14163 check_insn(ctx
, ISA_MIPS32R6
);
14164 switch ((ctx
->opcode
>> 9) & 0x3) {
14166 mips32_op
= OPC_MADDF_S
;
14169 mips32_op
= OPC_MADDF_D
;
14172 goto pool32f_invalid
;
14176 check_insn(ctx
, ISA_MIPS32R6
);
14177 switch ((ctx
->opcode
>> 9) & 0x3) {
14179 mips32_op
= OPC_MSUBF_S
;
14182 mips32_op
= OPC_MSUBF_D
;
14185 goto pool32f_invalid
;
14189 goto pool32f_invalid
;
14193 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14197 MIPS_INVAL("pool32f");
14198 generate_exception_end(ctx
, EXCP_RI
);
14202 generate_exception_err(ctx
, EXCP_CpU
, 1);
14206 minor
= (ctx
->opcode
>> 21) & 0x1f;
14209 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14210 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14214 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14215 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14218 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14219 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14220 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14223 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14224 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14228 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14229 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14233 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14234 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14237 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14238 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14241 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14242 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14246 case TLTI
: /* BC1EQZC */
14247 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14249 check_cp1_enabled(ctx
);
14250 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14253 mips32_op
= OPC_TLTI
;
14257 case TGEI
: /* BC1NEZC */
14258 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14260 check_cp1_enabled(ctx
);
14261 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14264 mips32_op
= OPC_TGEI
;
14269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14270 mips32_op
= OPC_TLTIU
;
14273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14274 mips32_op
= OPC_TGEIU
;
14276 case TNEI
: /* SYNCI */
14277 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14279 /* Break the TB to be able to sync copied instructions
14281 ctx
->bstate
= BS_STOP
;
14284 mips32_op
= OPC_TNEI
;
14289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14290 mips32_op
= OPC_TEQI
;
14292 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14298 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14299 4, rs
, 0, imm
<< 1, 0);
14300 /* Compact branches don't have a delay slot, so just let
14301 the normal delay slot handling take us to the branch
14305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14306 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14309 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14310 /* Break the TB to be able to sync copied instructions
14312 ctx
->bstate
= BS_STOP
;
14316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14317 /* COP2: Not implemented. */
14318 generate_exception_err(ctx
, EXCP_CpU
, 2);
14321 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14322 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14325 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14326 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14329 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14330 mips32_op
= OPC_BC1FANY4
;
14333 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14334 mips32_op
= OPC_BC1TANY4
;
14337 check_insn(ctx
, ASE_MIPS3D
);
14340 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14341 check_cp1_enabled(ctx
);
14342 gen_compute_branch1(ctx
, mips32_op
,
14343 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14345 generate_exception_err(ctx
, EXCP_CpU
, 1);
14350 /* MIPS DSP: not implemented */
14353 MIPS_INVAL("pool32i");
14354 generate_exception_end(ctx
, EXCP_RI
);
14359 minor
= (ctx
->opcode
>> 12) & 0xf;
14360 offset
= sextract32(ctx
->opcode
, 0,
14361 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14364 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14365 mips32_op
= OPC_LWL
;
14368 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14369 mips32_op
= OPC_SWL
;
14372 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14373 mips32_op
= OPC_LWR
;
14376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14377 mips32_op
= OPC_SWR
;
14379 #if defined(TARGET_MIPS64)
14381 check_insn(ctx
, ISA_MIPS3
);
14382 check_mips_64(ctx
);
14383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14384 mips32_op
= OPC_LDL
;
14387 check_insn(ctx
, ISA_MIPS3
);
14388 check_mips_64(ctx
);
14389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14390 mips32_op
= OPC_SDL
;
14393 check_insn(ctx
, ISA_MIPS3
);
14394 check_mips_64(ctx
);
14395 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14396 mips32_op
= OPC_LDR
;
14399 check_insn(ctx
, ISA_MIPS3
);
14400 check_mips_64(ctx
);
14401 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14402 mips32_op
= OPC_SDR
;
14405 check_insn(ctx
, ISA_MIPS3
);
14406 check_mips_64(ctx
);
14407 mips32_op
= OPC_LWU
;
14410 check_insn(ctx
, ISA_MIPS3
);
14411 check_mips_64(ctx
);
14412 mips32_op
= OPC_LLD
;
14416 mips32_op
= OPC_LL
;
14419 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14422 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14425 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14427 #if defined(TARGET_MIPS64)
14429 check_insn(ctx
, ISA_MIPS3
);
14430 check_mips_64(ctx
);
14431 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14435 /* Treat as no-op */
14436 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14437 /* hint codes 24-31 are reserved and signal RI */
14438 generate_exception(ctx
, EXCP_RI
);
14442 MIPS_INVAL("pool32c");
14443 generate_exception_end(ctx
, EXCP_RI
);
14447 case ADDI32
: /* AUI, LUI */
14448 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14450 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14453 mips32_op
= OPC_ADDI
;
14458 mips32_op
= OPC_ADDIU
;
14460 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14463 /* Logical operations */
14465 mips32_op
= OPC_ORI
;
14468 mips32_op
= OPC_XORI
;
14471 mips32_op
= OPC_ANDI
;
14473 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14476 /* Set less than immediate */
14478 mips32_op
= OPC_SLTI
;
14481 mips32_op
= OPC_SLTIU
;
14483 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14486 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14487 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14488 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14489 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14491 case JALS32
: /* BOVC, BEQC, BEQZALC */
14492 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14495 mips32_op
= OPC_BOVC
;
14496 } else if (rs
< rt
&& rs
== 0) {
14498 mips32_op
= OPC_BEQZALC
;
14501 mips32_op
= OPC_BEQC
;
14503 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14506 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14507 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14508 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14511 case BEQ32
: /* BC */
14512 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14514 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14515 sextract32(ctx
->opcode
<< 1, 0, 27));
14518 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14521 case BNE32
: /* BALC */
14522 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14524 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14525 sextract32(ctx
->opcode
<< 1, 0, 27));
14528 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14531 case J32
: /* BGTZC, BLTZC, BLTC */
14532 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14533 if (rs
== 0 && rt
!= 0) {
14535 mips32_op
= OPC_BGTZC
;
14536 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14538 mips32_op
= OPC_BLTZC
;
14541 mips32_op
= OPC_BLTC
;
14543 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14546 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14547 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14550 case JAL32
: /* BLEZC, BGEZC, BGEC */
14551 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14552 if (rs
== 0 && rt
!= 0) {
14554 mips32_op
= OPC_BLEZC
;
14555 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14557 mips32_op
= OPC_BGEZC
;
14560 mips32_op
= OPC_BGEC
;
14562 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14565 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14566 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14567 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14570 /* Floating point (COP1) */
14572 mips32_op
= OPC_LWC1
;
14575 mips32_op
= OPC_LDC1
;
14578 mips32_op
= OPC_SWC1
;
14581 mips32_op
= OPC_SDC1
;
14583 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14585 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14587 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14588 switch ((ctx
->opcode
>> 16) & 0x1f) {
14589 case ADDIUPC_00
... ADDIUPC_07
:
14590 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14593 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14596 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14598 case LWPC_08
... LWPC_0F
:
14599 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14602 generate_exception(ctx
, EXCP_RI
);
14607 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14608 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14610 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14613 case BNVC
: /* BNEC, BNEZALC */
14614 check_insn(ctx
, ISA_MIPS32R6
);
14617 mips32_op
= OPC_BNVC
;
14618 } else if (rs
< rt
&& rs
== 0) {
14620 mips32_op
= OPC_BNEZALC
;
14623 mips32_op
= OPC_BNEC
;
14625 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14627 case R6_BNEZC
: /* JIALC */
14628 check_insn(ctx
, ISA_MIPS32R6
);
14631 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14632 sextract32(ctx
->opcode
<< 1, 0, 22));
14635 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14638 case R6_BEQZC
: /* JIC */
14639 check_insn(ctx
, ISA_MIPS32R6
);
14642 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14643 sextract32(ctx
->opcode
<< 1, 0, 22));
14646 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14649 case BLEZALC
: /* BGEZALC, BGEUC */
14650 check_insn(ctx
, ISA_MIPS32R6
);
14651 if (rs
== 0 && rt
!= 0) {
14653 mips32_op
= OPC_BLEZALC
;
14654 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14656 mips32_op
= OPC_BGEZALC
;
14659 mips32_op
= OPC_BGEUC
;
14661 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14663 case BGTZALC
: /* BLTZALC, BLTUC */
14664 check_insn(ctx
, ISA_MIPS32R6
);
14665 if (rs
== 0 && rt
!= 0) {
14667 mips32_op
= OPC_BGTZALC
;
14668 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14670 mips32_op
= OPC_BLTZALC
;
14673 mips32_op
= OPC_BLTUC
;
14675 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14677 /* Loads and stores */
14679 mips32_op
= OPC_LB
;
14682 mips32_op
= OPC_LBU
;
14685 mips32_op
= OPC_LH
;
14688 mips32_op
= OPC_LHU
;
14691 mips32_op
= OPC_LW
;
14693 #ifdef TARGET_MIPS64
14695 check_insn(ctx
, ISA_MIPS3
);
14696 check_mips_64(ctx
);
14697 mips32_op
= OPC_LD
;
14700 check_insn(ctx
, ISA_MIPS3
);
14701 check_mips_64(ctx
);
14702 mips32_op
= OPC_SD
;
14706 mips32_op
= OPC_SB
;
14709 mips32_op
= OPC_SH
;
14712 mips32_op
= OPC_SW
;
14715 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14718 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14721 generate_exception_end(ctx
, EXCP_RI
);
14726 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14730 /* make sure instructions are on a halfword boundary */
14731 if (ctx
->pc
& 0x1) {
14732 env
->CP0_BadVAddr
= ctx
->pc
;
14733 generate_exception_end(ctx
, EXCP_AdEL
);
14737 op
= (ctx
->opcode
>> 10) & 0x3f;
14738 /* Enforce properly-sized instructions in a delay slot */
14739 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14740 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14742 /* POOL32A, POOL32B, POOL32I, POOL32C */
14744 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14746 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14748 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14750 /* LB32, LH32, LWC132, LDC132, LW32 */
14751 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14752 generate_exception_end(ctx
, EXCP_RI
);
14757 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14759 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14761 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14762 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14763 generate_exception_end(ctx
, EXCP_RI
);
14773 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14774 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14775 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14778 switch (ctx
->opcode
& 0x1) {
14786 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14787 /* In the Release 6 the register number location in
14788 * the instruction encoding has changed.
14790 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14792 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14798 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14799 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14800 int amount
= (ctx
->opcode
>> 1) & 0x7;
14802 amount
= amount
== 0 ? 8 : amount
;
14804 switch (ctx
->opcode
& 0x1) {
14813 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14817 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14818 gen_pool16c_r6_insn(ctx
);
14820 gen_pool16c_insn(ctx
);
14825 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14826 int rb
= 28; /* GP */
14827 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14829 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14834 if (ctx
->opcode
& 1) {
14835 generate_exception_end(ctx
, EXCP_RI
);
14838 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14839 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14840 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14841 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14846 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14847 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14848 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14849 offset
= (offset
== 0xf ? -1 : offset
);
14851 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14856 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14857 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14858 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14860 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14865 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14866 int rb
= 29; /* SP */
14867 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14869 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14874 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14875 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14876 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14878 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14883 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14884 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14885 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14887 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14892 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14893 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14894 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14896 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14901 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14902 int rb
= 29; /* SP */
14903 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14905 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14910 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14911 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14912 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14914 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14919 int rd
= uMIPS_RD5(ctx
->opcode
);
14920 int rs
= uMIPS_RS5(ctx
->opcode
);
14922 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14929 switch (ctx
->opcode
& 0x1) {
14939 switch (ctx
->opcode
& 0x1) {
14944 gen_addiur1sp(ctx
);
14948 case B16
: /* BC16 */
14949 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14950 sextract32(ctx
->opcode
, 0, 10) << 1,
14951 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14953 case BNEZ16
: /* BNEZC16 */
14954 case BEQZ16
: /* BEQZC16 */
14955 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14956 mmreg(uMIPS_RD(ctx
->opcode
)),
14957 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14958 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14963 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14964 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14966 imm
= (imm
== 0x7f ? -1 : imm
);
14967 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14973 generate_exception_end(ctx
, EXCP_RI
);
14976 decode_micromips32_opc(env
, ctx
);
14983 /* SmartMIPS extension to MIPS32 */
14985 #if defined(TARGET_MIPS64)
14987 /* MDMX extension to MIPS64 */
14991 /* MIPSDSP functions. */
14992 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14993 int rd
, int base
, int offset
)
14998 t0
= tcg_temp_new();
15001 gen_load_gpr(t0
, offset
);
15002 } else if (offset
== 0) {
15003 gen_load_gpr(t0
, base
);
15005 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15010 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15011 gen_store_gpr(t0
, rd
);
15014 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15015 gen_store_gpr(t0
, rd
);
15018 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15019 gen_store_gpr(t0
, rd
);
15021 #if defined(TARGET_MIPS64)
15023 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15024 gen_store_gpr(t0
, rd
);
15031 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15032 int ret
, int v1
, int v2
)
15038 /* Treat as NOP. */
15042 v1_t
= tcg_temp_new();
15043 v2_t
= tcg_temp_new();
15045 gen_load_gpr(v1_t
, v1
);
15046 gen_load_gpr(v2_t
, v2
);
15049 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15050 case OPC_MULT_G_2E
:
15054 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15056 case OPC_ADDUH_R_QB
:
15057 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15060 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15062 case OPC_ADDQH_R_PH
:
15063 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15066 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15068 case OPC_ADDQH_R_W
:
15069 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15072 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15074 case OPC_SUBUH_R_QB
:
15075 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15078 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15080 case OPC_SUBQH_R_PH
:
15081 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15084 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15086 case OPC_SUBQH_R_W
:
15087 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15091 case OPC_ABSQ_S_PH_DSP
:
15093 case OPC_ABSQ_S_QB
:
15095 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15097 case OPC_ABSQ_S_PH
:
15099 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15103 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15105 case OPC_PRECEQ_W_PHL
:
15107 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15108 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15110 case OPC_PRECEQ_W_PHR
:
15112 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15113 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15114 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15116 case OPC_PRECEQU_PH_QBL
:
15118 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15120 case OPC_PRECEQU_PH_QBR
:
15122 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15124 case OPC_PRECEQU_PH_QBLA
:
15126 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15128 case OPC_PRECEQU_PH_QBRA
:
15130 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15132 case OPC_PRECEU_PH_QBL
:
15134 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15136 case OPC_PRECEU_PH_QBR
:
15138 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15140 case OPC_PRECEU_PH_QBLA
:
15142 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15144 case OPC_PRECEU_PH_QBRA
:
15146 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15150 case OPC_ADDU_QB_DSP
:
15154 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15156 case OPC_ADDQ_S_PH
:
15158 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15162 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15166 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15168 case OPC_ADDU_S_QB
:
15170 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15174 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15176 case OPC_ADDU_S_PH
:
15178 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15182 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15184 case OPC_SUBQ_S_PH
:
15186 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15190 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15194 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15196 case OPC_SUBU_S_QB
:
15198 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15202 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15204 case OPC_SUBU_S_PH
:
15206 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15210 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15214 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15218 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15220 case OPC_RADDU_W_QB
:
15222 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15226 case OPC_CMPU_EQ_QB_DSP
:
15228 case OPC_PRECR_QB_PH
:
15230 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15232 case OPC_PRECRQ_QB_PH
:
15234 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15236 case OPC_PRECR_SRA_PH_W
:
15239 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15240 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15242 tcg_temp_free_i32(sa_t
);
15245 case OPC_PRECR_SRA_R_PH_W
:
15248 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15249 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15251 tcg_temp_free_i32(sa_t
);
15254 case OPC_PRECRQ_PH_W
:
15256 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15258 case OPC_PRECRQ_RS_PH_W
:
15260 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15262 case OPC_PRECRQU_S_QB_PH
:
15264 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15268 #ifdef TARGET_MIPS64
15269 case OPC_ABSQ_S_QH_DSP
:
15271 case OPC_PRECEQ_L_PWL
:
15273 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15275 case OPC_PRECEQ_L_PWR
:
15277 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15279 case OPC_PRECEQ_PW_QHL
:
15281 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15283 case OPC_PRECEQ_PW_QHR
:
15285 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15287 case OPC_PRECEQ_PW_QHLA
:
15289 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15291 case OPC_PRECEQ_PW_QHRA
:
15293 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15295 case OPC_PRECEQU_QH_OBL
:
15297 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15299 case OPC_PRECEQU_QH_OBR
:
15301 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15303 case OPC_PRECEQU_QH_OBLA
:
15305 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15307 case OPC_PRECEQU_QH_OBRA
:
15309 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15311 case OPC_PRECEU_QH_OBL
:
15313 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15315 case OPC_PRECEU_QH_OBR
:
15317 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15319 case OPC_PRECEU_QH_OBLA
:
15321 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15323 case OPC_PRECEU_QH_OBRA
:
15325 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15327 case OPC_ABSQ_S_OB
:
15329 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15331 case OPC_ABSQ_S_PW
:
15333 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15335 case OPC_ABSQ_S_QH
:
15337 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15341 case OPC_ADDU_OB_DSP
:
15343 case OPC_RADDU_L_OB
:
15345 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15349 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15351 case OPC_SUBQ_S_PW
:
15353 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15357 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15359 case OPC_SUBQ_S_QH
:
15361 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15365 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15367 case OPC_SUBU_S_OB
:
15369 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15373 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15375 case OPC_SUBU_S_QH
:
15377 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15381 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15383 case OPC_SUBUH_R_OB
:
15385 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15389 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15391 case OPC_ADDQ_S_PW
:
15393 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15397 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15399 case OPC_ADDQ_S_QH
:
15401 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15405 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15407 case OPC_ADDU_S_OB
:
15409 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15413 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15415 case OPC_ADDU_S_QH
:
15417 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15421 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15423 case OPC_ADDUH_R_OB
:
15425 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15429 case OPC_CMPU_EQ_OB_DSP
:
15431 case OPC_PRECR_OB_QH
:
15433 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15435 case OPC_PRECR_SRA_QH_PW
:
15438 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15439 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15440 tcg_temp_free_i32(ret_t
);
15443 case OPC_PRECR_SRA_R_QH_PW
:
15446 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15447 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15448 tcg_temp_free_i32(sa_v
);
15451 case OPC_PRECRQ_OB_QH
:
15453 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15455 case OPC_PRECRQ_PW_L
:
15457 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15459 case OPC_PRECRQ_QH_PW
:
15461 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15463 case OPC_PRECRQ_RS_QH_PW
:
15465 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15467 case OPC_PRECRQU_S_OB_QH
:
15469 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15476 tcg_temp_free(v1_t
);
15477 tcg_temp_free(v2_t
);
15480 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15481 int ret
, int v1
, int v2
)
15489 /* Treat as NOP. */
15493 t0
= tcg_temp_new();
15494 v1_t
= tcg_temp_new();
15495 v2_t
= tcg_temp_new();
15497 tcg_gen_movi_tl(t0
, v1
);
15498 gen_load_gpr(v1_t
, v1
);
15499 gen_load_gpr(v2_t
, v2
);
15502 case OPC_SHLL_QB_DSP
:
15504 op2
= MASK_SHLL_QB(ctx
->opcode
);
15508 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15512 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15516 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15520 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15522 case OPC_SHLL_S_PH
:
15524 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15526 case OPC_SHLLV_S_PH
:
15528 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15532 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15534 case OPC_SHLLV_S_W
:
15536 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15540 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15544 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15548 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15552 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15556 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15558 case OPC_SHRA_R_QB
:
15560 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15564 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15566 case OPC_SHRAV_R_QB
:
15568 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15572 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15574 case OPC_SHRA_R_PH
:
15576 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15580 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15582 case OPC_SHRAV_R_PH
:
15584 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15588 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15590 case OPC_SHRAV_R_W
:
15592 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15594 default: /* Invalid */
15595 MIPS_INVAL("MASK SHLL.QB");
15596 generate_exception_end(ctx
, EXCP_RI
);
15601 #ifdef TARGET_MIPS64
15602 case OPC_SHLL_OB_DSP
:
15603 op2
= MASK_SHLL_OB(ctx
->opcode
);
15607 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15611 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15613 case OPC_SHLL_S_PW
:
15615 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15617 case OPC_SHLLV_S_PW
:
15619 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15623 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15627 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15631 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15635 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15637 case OPC_SHLL_S_QH
:
15639 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15641 case OPC_SHLLV_S_QH
:
15643 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15647 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15651 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15653 case OPC_SHRA_R_OB
:
15655 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15657 case OPC_SHRAV_R_OB
:
15659 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15663 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15667 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15669 case OPC_SHRA_R_PW
:
15671 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15673 case OPC_SHRAV_R_PW
:
15675 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15679 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15683 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15685 case OPC_SHRA_R_QH
:
15687 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15689 case OPC_SHRAV_R_QH
:
15691 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15695 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15699 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15703 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15707 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15709 default: /* Invalid */
15710 MIPS_INVAL("MASK SHLL.OB");
15711 generate_exception_end(ctx
, EXCP_RI
);
15719 tcg_temp_free(v1_t
);
15720 tcg_temp_free(v2_t
);
15723 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15724 int ret
, int v1
, int v2
, int check_ret
)
15730 if ((ret
== 0) && (check_ret
== 1)) {
15731 /* Treat as NOP. */
15735 t0
= tcg_temp_new_i32();
15736 v1_t
= tcg_temp_new();
15737 v2_t
= tcg_temp_new();
15739 tcg_gen_movi_i32(t0
, ret
);
15740 gen_load_gpr(v1_t
, v1
);
15741 gen_load_gpr(v2_t
, v2
);
15744 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15745 * the same mask and op1. */
15746 case OPC_MULT_G_2E
:
15750 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15753 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15756 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15758 case OPC_MULQ_RS_W
:
15759 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15763 case OPC_DPA_W_PH_DSP
:
15765 case OPC_DPAU_H_QBL
:
15767 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15769 case OPC_DPAU_H_QBR
:
15771 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15773 case OPC_DPSU_H_QBL
:
15775 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15777 case OPC_DPSU_H_QBR
:
15779 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15783 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15785 case OPC_DPAX_W_PH
:
15787 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15789 case OPC_DPAQ_S_W_PH
:
15791 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15793 case OPC_DPAQX_S_W_PH
:
15795 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15797 case OPC_DPAQX_SA_W_PH
:
15799 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15803 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15805 case OPC_DPSX_W_PH
:
15807 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15809 case OPC_DPSQ_S_W_PH
:
15811 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15813 case OPC_DPSQX_S_W_PH
:
15815 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15817 case OPC_DPSQX_SA_W_PH
:
15819 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15821 case OPC_MULSAQ_S_W_PH
:
15823 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15825 case OPC_DPAQ_SA_L_W
:
15827 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15829 case OPC_DPSQ_SA_L_W
:
15831 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15833 case OPC_MAQ_S_W_PHL
:
15835 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15837 case OPC_MAQ_S_W_PHR
:
15839 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15841 case OPC_MAQ_SA_W_PHL
:
15843 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15845 case OPC_MAQ_SA_W_PHR
:
15847 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15849 case OPC_MULSA_W_PH
:
15851 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15855 #ifdef TARGET_MIPS64
15856 case OPC_DPAQ_W_QH_DSP
:
15858 int ac
= ret
& 0x03;
15859 tcg_gen_movi_i32(t0
, ac
);
15864 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15868 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15872 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15876 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15880 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15882 case OPC_DPAQ_S_W_QH
:
15884 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15886 case OPC_DPAQ_SA_L_PW
:
15888 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15890 case OPC_DPAU_H_OBL
:
15892 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15894 case OPC_DPAU_H_OBR
:
15896 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15900 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15902 case OPC_DPSQ_S_W_QH
:
15904 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15906 case OPC_DPSQ_SA_L_PW
:
15908 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15910 case OPC_DPSU_H_OBL
:
15912 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15914 case OPC_DPSU_H_OBR
:
15916 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15918 case OPC_MAQ_S_L_PWL
:
15920 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15922 case OPC_MAQ_S_L_PWR
:
15924 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15926 case OPC_MAQ_S_W_QHLL
:
15928 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15930 case OPC_MAQ_SA_W_QHLL
:
15932 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15934 case OPC_MAQ_S_W_QHLR
:
15936 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15938 case OPC_MAQ_SA_W_QHLR
:
15940 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15942 case OPC_MAQ_S_W_QHRL
:
15944 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15946 case OPC_MAQ_SA_W_QHRL
:
15948 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15950 case OPC_MAQ_S_W_QHRR
:
15952 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15954 case OPC_MAQ_SA_W_QHRR
:
15956 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15958 case OPC_MULSAQ_S_L_PW
:
15960 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15962 case OPC_MULSAQ_S_W_QH
:
15964 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15970 case OPC_ADDU_QB_DSP
:
15972 case OPC_MULEU_S_PH_QBL
:
15974 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15976 case OPC_MULEU_S_PH_QBR
:
15978 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15980 case OPC_MULQ_RS_PH
:
15982 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15984 case OPC_MULEQ_S_W_PHL
:
15986 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15988 case OPC_MULEQ_S_W_PHR
:
15990 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15992 case OPC_MULQ_S_PH
:
15994 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15998 #ifdef TARGET_MIPS64
15999 case OPC_ADDU_OB_DSP
:
16001 case OPC_MULEQ_S_PW_QHL
:
16003 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16005 case OPC_MULEQ_S_PW_QHR
:
16007 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16009 case OPC_MULEU_S_QH_OBL
:
16011 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16013 case OPC_MULEU_S_QH_OBR
:
16015 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16017 case OPC_MULQ_RS_QH
:
16019 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16026 tcg_temp_free_i32(t0
);
16027 tcg_temp_free(v1_t
);
16028 tcg_temp_free(v2_t
);
16031 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16039 /* Treat as NOP. */
16043 t0
= tcg_temp_new();
16044 val_t
= tcg_temp_new();
16045 gen_load_gpr(val_t
, val
);
16048 case OPC_ABSQ_S_PH_DSP
:
16052 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16057 target_long result
;
16058 imm
= (ctx
->opcode
>> 16) & 0xFF;
16059 result
= (uint32_t)imm
<< 24 |
16060 (uint32_t)imm
<< 16 |
16061 (uint32_t)imm
<< 8 |
16063 result
= (int32_t)result
;
16064 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16069 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16070 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16071 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16072 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16073 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16074 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16079 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16080 imm
= (int16_t)(imm
<< 6) >> 6;
16081 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16082 (target_long
)((int32_t)imm
<< 16 | \
16088 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16089 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16090 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16091 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16095 #ifdef TARGET_MIPS64
16096 case OPC_ABSQ_S_QH_DSP
:
16103 imm
= (ctx
->opcode
>> 16) & 0xFF;
16104 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16105 temp
= (temp
<< 16) | temp
;
16106 temp
= (temp
<< 32) | temp
;
16107 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16115 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16116 imm
= (int16_t)(imm
<< 6) >> 6;
16117 temp
= ((target_long
)imm
<< 32) \
16118 | ((target_long
)imm
& 0xFFFFFFFF);
16119 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16127 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16128 imm
= (int16_t)(imm
<< 6) >> 6;
16130 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16131 ((uint64_t)(uint16_t)imm
<< 32) |
16132 ((uint64_t)(uint16_t)imm
<< 16) |
16133 (uint64_t)(uint16_t)imm
;
16134 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16139 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16140 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16141 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16142 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16143 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16144 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16145 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16149 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16150 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16151 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16155 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16156 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16157 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16158 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16159 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16166 tcg_temp_free(val_t
);
16169 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16170 uint32_t op1
, uint32_t op2
,
16171 int ret
, int v1
, int v2
, int check_ret
)
16177 if ((ret
== 0) && (check_ret
== 1)) {
16178 /* Treat as NOP. */
16182 t1
= tcg_temp_new();
16183 v1_t
= tcg_temp_new();
16184 v2_t
= tcg_temp_new();
16186 gen_load_gpr(v1_t
, v1
);
16187 gen_load_gpr(v2_t
, v2
);
16190 case OPC_CMPU_EQ_QB_DSP
:
16192 case OPC_CMPU_EQ_QB
:
16194 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16196 case OPC_CMPU_LT_QB
:
16198 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16200 case OPC_CMPU_LE_QB
:
16202 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16204 case OPC_CMPGU_EQ_QB
:
16206 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16208 case OPC_CMPGU_LT_QB
:
16210 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16212 case OPC_CMPGU_LE_QB
:
16214 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16216 case OPC_CMPGDU_EQ_QB
:
16218 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16219 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16220 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16221 tcg_gen_shli_tl(t1
, t1
, 24);
16222 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16224 case OPC_CMPGDU_LT_QB
:
16226 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16227 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16228 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16229 tcg_gen_shli_tl(t1
, t1
, 24);
16230 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16232 case OPC_CMPGDU_LE_QB
:
16234 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16235 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16236 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16237 tcg_gen_shli_tl(t1
, t1
, 24);
16238 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16240 case OPC_CMP_EQ_PH
:
16242 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16244 case OPC_CMP_LT_PH
:
16246 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16248 case OPC_CMP_LE_PH
:
16250 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16254 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16258 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16260 case OPC_PACKRL_PH
:
16262 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16266 #ifdef TARGET_MIPS64
16267 case OPC_CMPU_EQ_OB_DSP
:
16269 case OPC_CMP_EQ_PW
:
16271 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16273 case OPC_CMP_LT_PW
:
16275 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16277 case OPC_CMP_LE_PW
:
16279 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16281 case OPC_CMP_EQ_QH
:
16283 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16285 case OPC_CMP_LT_QH
:
16287 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16289 case OPC_CMP_LE_QH
:
16291 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16293 case OPC_CMPGDU_EQ_OB
:
16295 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16297 case OPC_CMPGDU_LT_OB
:
16299 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16301 case OPC_CMPGDU_LE_OB
:
16303 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16305 case OPC_CMPGU_EQ_OB
:
16307 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16309 case OPC_CMPGU_LT_OB
:
16311 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16313 case OPC_CMPGU_LE_OB
:
16315 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16317 case OPC_CMPU_EQ_OB
:
16319 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16321 case OPC_CMPU_LT_OB
:
16323 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16325 case OPC_CMPU_LE_OB
:
16327 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16329 case OPC_PACKRL_PW
:
16331 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16335 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16339 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16343 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16351 tcg_temp_free(v1_t
);
16352 tcg_temp_free(v2_t
);
16355 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16356 uint32_t op1
, int rt
, int rs
, int sa
)
16363 /* Treat as NOP. */
16367 t0
= tcg_temp_new();
16368 gen_load_gpr(t0
, rs
);
16371 case OPC_APPEND_DSP
:
16372 switch (MASK_APPEND(ctx
->opcode
)) {
16375 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16377 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16381 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16382 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16383 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16384 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16386 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16390 if (sa
!= 0 && sa
!= 2) {
16391 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16392 tcg_gen_ext32u_tl(t0
, t0
);
16393 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16394 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16396 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16398 default: /* Invalid */
16399 MIPS_INVAL("MASK APPEND");
16400 generate_exception_end(ctx
, EXCP_RI
);
16404 #ifdef TARGET_MIPS64
16405 case OPC_DAPPEND_DSP
:
16406 switch (MASK_DAPPEND(ctx
->opcode
)) {
16409 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16413 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16414 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16415 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16419 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16420 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16421 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16426 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16427 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16428 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16429 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16432 default: /* Invalid */
16433 MIPS_INVAL("MASK DAPPEND");
16434 generate_exception_end(ctx
, EXCP_RI
);
16443 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16444 int ret
, int v1
, int v2
, int check_ret
)
16453 if ((ret
== 0) && (check_ret
== 1)) {
16454 /* Treat as NOP. */
16458 t0
= tcg_temp_new();
16459 t1
= tcg_temp_new();
16460 v1_t
= tcg_temp_new();
16461 v2_t
= tcg_temp_new();
16463 gen_load_gpr(v1_t
, v1
);
16464 gen_load_gpr(v2_t
, v2
);
16467 case OPC_EXTR_W_DSP
:
16471 tcg_gen_movi_tl(t0
, v2
);
16472 tcg_gen_movi_tl(t1
, v1
);
16473 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16476 tcg_gen_movi_tl(t0
, v2
);
16477 tcg_gen_movi_tl(t1
, v1
);
16478 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16480 case OPC_EXTR_RS_W
:
16481 tcg_gen_movi_tl(t0
, v2
);
16482 tcg_gen_movi_tl(t1
, v1
);
16483 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16486 tcg_gen_movi_tl(t0
, v2
);
16487 tcg_gen_movi_tl(t1
, v1
);
16488 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16490 case OPC_EXTRV_S_H
:
16491 tcg_gen_movi_tl(t0
, v2
);
16492 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16495 tcg_gen_movi_tl(t0
, v2
);
16496 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16498 case OPC_EXTRV_R_W
:
16499 tcg_gen_movi_tl(t0
, v2
);
16500 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16502 case OPC_EXTRV_RS_W
:
16503 tcg_gen_movi_tl(t0
, v2
);
16504 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16507 tcg_gen_movi_tl(t0
, v2
);
16508 tcg_gen_movi_tl(t1
, v1
);
16509 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16512 tcg_gen_movi_tl(t0
, v2
);
16513 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16516 tcg_gen_movi_tl(t0
, v2
);
16517 tcg_gen_movi_tl(t1
, v1
);
16518 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16521 tcg_gen_movi_tl(t0
, v2
);
16522 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16525 imm
= (ctx
->opcode
>> 20) & 0x3F;
16526 tcg_gen_movi_tl(t0
, ret
);
16527 tcg_gen_movi_tl(t1
, imm
);
16528 gen_helper_shilo(t0
, t1
, cpu_env
);
16531 tcg_gen_movi_tl(t0
, ret
);
16532 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16535 tcg_gen_movi_tl(t0
, ret
);
16536 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16539 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16540 tcg_gen_movi_tl(t0
, imm
);
16541 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16544 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16545 tcg_gen_movi_tl(t0
, imm
);
16546 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16550 #ifdef TARGET_MIPS64
16551 case OPC_DEXTR_W_DSP
:
16555 tcg_gen_movi_tl(t0
, ret
);
16556 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16560 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16561 int ac
= (ctx
->opcode
>> 11) & 0x03;
16562 tcg_gen_movi_tl(t0
, shift
);
16563 tcg_gen_movi_tl(t1
, ac
);
16564 gen_helper_dshilo(t0
, t1
, cpu_env
);
16569 int ac
= (ctx
->opcode
>> 11) & 0x03;
16570 tcg_gen_movi_tl(t0
, ac
);
16571 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16575 tcg_gen_movi_tl(t0
, v2
);
16576 tcg_gen_movi_tl(t1
, v1
);
16578 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16581 tcg_gen_movi_tl(t0
, v2
);
16582 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16585 tcg_gen_movi_tl(t0
, v2
);
16586 tcg_gen_movi_tl(t1
, v1
);
16587 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16590 tcg_gen_movi_tl(t0
, v2
);
16591 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16594 tcg_gen_movi_tl(t0
, v2
);
16595 tcg_gen_movi_tl(t1
, v1
);
16596 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16598 case OPC_DEXTR_R_L
:
16599 tcg_gen_movi_tl(t0
, v2
);
16600 tcg_gen_movi_tl(t1
, v1
);
16601 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16603 case OPC_DEXTR_RS_L
:
16604 tcg_gen_movi_tl(t0
, v2
);
16605 tcg_gen_movi_tl(t1
, v1
);
16606 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16609 tcg_gen_movi_tl(t0
, v2
);
16610 tcg_gen_movi_tl(t1
, v1
);
16611 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16613 case OPC_DEXTR_R_W
:
16614 tcg_gen_movi_tl(t0
, v2
);
16615 tcg_gen_movi_tl(t1
, v1
);
16616 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16618 case OPC_DEXTR_RS_W
:
16619 tcg_gen_movi_tl(t0
, v2
);
16620 tcg_gen_movi_tl(t1
, v1
);
16621 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16623 case OPC_DEXTR_S_H
:
16624 tcg_gen_movi_tl(t0
, v2
);
16625 tcg_gen_movi_tl(t1
, v1
);
16626 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16628 case OPC_DEXTRV_S_H
:
16629 tcg_gen_movi_tl(t0
, v2
);
16630 tcg_gen_movi_tl(t1
, v1
);
16631 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16634 tcg_gen_movi_tl(t0
, v2
);
16635 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16637 case OPC_DEXTRV_R_L
:
16638 tcg_gen_movi_tl(t0
, v2
);
16639 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16641 case OPC_DEXTRV_RS_L
:
16642 tcg_gen_movi_tl(t0
, v2
);
16643 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16646 tcg_gen_movi_tl(t0
, v2
);
16647 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16649 case OPC_DEXTRV_R_W
:
16650 tcg_gen_movi_tl(t0
, v2
);
16651 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16653 case OPC_DEXTRV_RS_W
:
16654 tcg_gen_movi_tl(t0
, v2
);
16655 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16664 tcg_temp_free(v1_t
);
16665 tcg_temp_free(v2_t
);
16668 /* End MIPSDSP functions. */
16670 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16672 int rs
, rt
, rd
, sa
;
16675 rs
= (ctx
->opcode
>> 21) & 0x1f;
16676 rt
= (ctx
->opcode
>> 16) & 0x1f;
16677 rd
= (ctx
->opcode
>> 11) & 0x1f;
16678 sa
= (ctx
->opcode
>> 6) & 0x1f;
16680 op1
= MASK_SPECIAL(ctx
->opcode
);
16683 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16685 case OPC_MULT
... OPC_DIVU
:
16686 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16696 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16699 MIPS_INVAL("special_r6 muldiv");
16700 generate_exception_end(ctx
, EXCP_RI
);
16706 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16710 if (rt
== 0 && sa
== 1) {
16711 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16712 We need additionally to check other fields */
16713 gen_cl(ctx
, op1
, rd
, rs
);
16715 generate_exception_end(ctx
, EXCP_RI
);
16719 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16720 gen_helper_do_semihosting(cpu_env
);
16722 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16723 generate_exception_end(ctx
, EXCP_RI
);
16725 generate_exception_end(ctx
, EXCP_DBp
);
16729 #if defined(TARGET_MIPS64)
16731 check_mips_64(ctx
);
16732 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16736 if (rt
== 0 && sa
== 1) {
16737 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16738 We need additionally to check other fields */
16739 check_mips_64(ctx
);
16740 gen_cl(ctx
, op1
, rd
, rs
);
16742 generate_exception_end(ctx
, EXCP_RI
);
16745 case OPC_DMULT
... OPC_DDIVU
:
16746 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16756 check_mips_64(ctx
);
16757 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16760 MIPS_INVAL("special_r6 muldiv");
16761 generate_exception_end(ctx
, EXCP_RI
);
16766 default: /* Invalid */
16767 MIPS_INVAL("special_r6");
16768 generate_exception_end(ctx
, EXCP_RI
);
16773 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16775 int rs
, rt
, rd
, sa
;
16778 rs
= (ctx
->opcode
>> 21) & 0x1f;
16779 rt
= (ctx
->opcode
>> 16) & 0x1f;
16780 rd
= (ctx
->opcode
>> 11) & 0x1f;
16781 sa
= (ctx
->opcode
>> 6) & 0x1f;
16783 op1
= MASK_SPECIAL(ctx
->opcode
);
16785 case OPC_MOVN
: /* Conditional move */
16787 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16788 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16789 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16791 case OPC_MFHI
: /* Move from HI/LO */
16793 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16796 case OPC_MTLO
: /* Move to HI/LO */
16797 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16800 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16801 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16802 check_cp1_enabled(ctx
);
16803 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16804 (ctx
->opcode
>> 16) & 1);
16806 generate_exception_err(ctx
, EXCP_CpU
, 1);
16812 check_insn(ctx
, INSN_VR54XX
);
16813 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16814 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16816 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16821 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16823 #if defined(TARGET_MIPS64)
16824 case OPC_DMULT
... OPC_DDIVU
:
16825 check_insn(ctx
, ISA_MIPS3
);
16826 check_mips_64(ctx
);
16827 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16831 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16834 #ifdef MIPS_STRICT_STANDARD
16835 MIPS_INVAL("SPIM");
16836 generate_exception_end(ctx
, EXCP_RI
);
16838 /* Implemented as RI exception for now. */
16839 MIPS_INVAL("spim (unofficial)");
16840 generate_exception_end(ctx
, EXCP_RI
);
16843 default: /* Invalid */
16844 MIPS_INVAL("special_legacy");
16845 generate_exception_end(ctx
, EXCP_RI
);
16850 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16852 int rs
, rt
, rd
, sa
;
16855 rs
= (ctx
->opcode
>> 21) & 0x1f;
16856 rt
= (ctx
->opcode
>> 16) & 0x1f;
16857 rd
= (ctx
->opcode
>> 11) & 0x1f;
16858 sa
= (ctx
->opcode
>> 6) & 0x1f;
16860 op1
= MASK_SPECIAL(ctx
->opcode
);
16862 case OPC_SLL
: /* Shift with immediate */
16863 if (sa
== 5 && rd
== 0 &&
16864 rs
== 0 && rt
== 0) { /* PAUSE */
16865 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16866 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16867 generate_exception_end(ctx
, EXCP_RI
);
16873 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16876 switch ((ctx
->opcode
>> 21) & 0x1f) {
16878 /* rotr is decoded as srl on non-R2 CPUs */
16879 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16884 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16887 generate_exception_end(ctx
, EXCP_RI
);
16891 case OPC_ADD
... OPC_SUBU
:
16892 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16894 case OPC_SLLV
: /* Shifts */
16896 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16899 switch ((ctx
->opcode
>> 6) & 0x1f) {
16901 /* rotrv is decoded as srlv on non-R2 CPUs */
16902 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16907 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16910 generate_exception_end(ctx
, EXCP_RI
);
16914 case OPC_SLT
: /* Set on less than */
16916 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16918 case OPC_AND
: /* Logic*/
16922 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16925 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16927 case OPC_TGE
... OPC_TEQ
: /* Traps */
16929 check_insn(ctx
, ISA_MIPS2
);
16930 gen_trap(ctx
, op1
, rs
, rt
, -1);
16932 case OPC_LSA
: /* OPC_PMON */
16933 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16934 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16935 decode_opc_special_r6(env
, ctx
);
16937 /* Pmon entry point, also R4010 selsl */
16938 #ifdef MIPS_STRICT_STANDARD
16939 MIPS_INVAL("PMON / selsl");
16940 generate_exception_end(ctx
, EXCP_RI
);
16942 gen_helper_0e0i(pmon
, sa
);
16947 generate_exception_end(ctx
, EXCP_SYSCALL
);
16950 generate_exception_end(ctx
, EXCP_BREAK
);
16953 check_insn(ctx
, ISA_MIPS2
);
16954 /* Treat as NOP. */
16957 #if defined(TARGET_MIPS64)
16958 /* MIPS64 specific opcodes */
16963 check_insn(ctx
, ISA_MIPS3
);
16964 check_mips_64(ctx
);
16965 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16968 switch ((ctx
->opcode
>> 21) & 0x1f) {
16970 /* drotr is decoded as dsrl on non-R2 CPUs */
16971 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16976 check_insn(ctx
, ISA_MIPS3
);
16977 check_mips_64(ctx
);
16978 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16981 generate_exception_end(ctx
, EXCP_RI
);
16986 switch ((ctx
->opcode
>> 21) & 0x1f) {
16988 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16989 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16994 check_insn(ctx
, ISA_MIPS3
);
16995 check_mips_64(ctx
);
16996 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16999 generate_exception_end(ctx
, EXCP_RI
);
17003 case OPC_DADD
... OPC_DSUBU
:
17004 check_insn(ctx
, ISA_MIPS3
);
17005 check_mips_64(ctx
);
17006 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17010 check_insn(ctx
, ISA_MIPS3
);
17011 check_mips_64(ctx
);
17012 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17015 switch ((ctx
->opcode
>> 6) & 0x1f) {
17017 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17018 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17023 check_insn(ctx
, ISA_MIPS3
);
17024 check_mips_64(ctx
);
17025 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17028 generate_exception_end(ctx
, EXCP_RI
);
17033 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17034 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17035 decode_opc_special_r6(env
, ctx
);
17040 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17041 decode_opc_special_r6(env
, ctx
);
17043 decode_opc_special_legacy(env
, ctx
);
17048 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17055 rs
= (ctx
->opcode
>> 21) & 0x1f;
17056 rt
= (ctx
->opcode
>> 16) & 0x1f;
17057 rd
= (ctx
->opcode
>> 11) & 0x1f;
17059 op1
= MASK_SPECIAL2(ctx
->opcode
);
17061 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17062 case OPC_MSUB
... OPC_MSUBU
:
17063 check_insn(ctx
, ISA_MIPS32
);
17064 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17067 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17070 case OPC_DIVU_G_2F
:
17071 case OPC_MULT_G_2F
:
17072 case OPC_MULTU_G_2F
:
17074 case OPC_MODU_G_2F
:
17075 check_insn(ctx
, INSN_LOONGSON2F
);
17076 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17080 check_insn(ctx
, ISA_MIPS32
);
17081 gen_cl(ctx
, op1
, rd
, rs
);
17084 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17085 gen_helper_do_semihosting(cpu_env
);
17087 /* XXX: not clear which exception should be raised
17088 * when in debug mode...
17090 check_insn(ctx
, ISA_MIPS32
);
17091 generate_exception_end(ctx
, EXCP_DBp
);
17094 #if defined(TARGET_MIPS64)
17097 check_insn(ctx
, ISA_MIPS64
);
17098 check_mips_64(ctx
);
17099 gen_cl(ctx
, op1
, rd
, rs
);
17101 case OPC_DMULT_G_2F
:
17102 case OPC_DMULTU_G_2F
:
17103 case OPC_DDIV_G_2F
:
17104 case OPC_DDIVU_G_2F
:
17105 case OPC_DMOD_G_2F
:
17106 case OPC_DMODU_G_2F
:
17107 check_insn(ctx
, INSN_LOONGSON2F
);
17108 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17111 default: /* Invalid */
17112 MIPS_INVAL("special2_legacy");
17113 generate_exception_end(ctx
, EXCP_RI
);
17118 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17120 int rs
, rt
, rd
, sa
;
17124 rs
= (ctx
->opcode
>> 21) & 0x1f;
17125 rt
= (ctx
->opcode
>> 16) & 0x1f;
17126 rd
= (ctx
->opcode
>> 11) & 0x1f;
17127 sa
= (ctx
->opcode
>> 6) & 0x1f;
17128 imm
= (int16_t)ctx
->opcode
>> 7;
17130 op1
= MASK_SPECIAL3(ctx
->opcode
);
17134 /* hint codes 24-31 are reserved and signal RI */
17135 generate_exception_end(ctx
, EXCP_RI
);
17137 /* Treat as NOP. */
17140 /* Treat as NOP. */
17143 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17146 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17151 /* Treat as NOP. */
17154 op2
= MASK_BSHFL(ctx
->opcode
);
17156 case OPC_ALIGN
... OPC_ALIGN_END
:
17157 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17160 gen_bitswap(ctx
, op2
, rd
, rt
);
17165 #if defined(TARGET_MIPS64)
17167 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17170 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17173 check_mips_64(ctx
);
17176 /* Treat as NOP. */
17179 op2
= MASK_DBSHFL(ctx
->opcode
);
17181 case OPC_DALIGN
... OPC_DALIGN_END
:
17182 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17185 gen_bitswap(ctx
, op2
, rd
, rt
);
17192 default: /* Invalid */
17193 MIPS_INVAL("special3_r6");
17194 generate_exception_end(ctx
, EXCP_RI
);
17199 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17204 rs
= (ctx
->opcode
>> 21) & 0x1f;
17205 rt
= (ctx
->opcode
>> 16) & 0x1f;
17206 rd
= (ctx
->opcode
>> 11) & 0x1f;
17208 op1
= MASK_SPECIAL3(ctx
->opcode
);
17210 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17211 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17212 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17213 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17214 * the same mask and op1. */
17215 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17216 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17219 case OPC_ADDUH_R_QB
:
17221 case OPC_ADDQH_R_PH
:
17223 case OPC_ADDQH_R_W
:
17225 case OPC_SUBUH_R_QB
:
17227 case OPC_SUBQH_R_PH
:
17229 case OPC_SUBQH_R_W
:
17230 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17235 case OPC_MULQ_RS_W
:
17236 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17239 MIPS_INVAL("MASK ADDUH.QB");
17240 generate_exception_end(ctx
, EXCP_RI
);
17243 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17244 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17246 generate_exception_end(ctx
, EXCP_RI
);
17250 op2
= MASK_LX(ctx
->opcode
);
17252 #if defined(TARGET_MIPS64)
17258 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17260 default: /* Invalid */
17261 MIPS_INVAL("MASK LX");
17262 generate_exception_end(ctx
, EXCP_RI
);
17266 case OPC_ABSQ_S_PH_DSP
:
17267 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17269 case OPC_ABSQ_S_QB
:
17270 case OPC_ABSQ_S_PH
:
17272 case OPC_PRECEQ_W_PHL
:
17273 case OPC_PRECEQ_W_PHR
:
17274 case OPC_PRECEQU_PH_QBL
:
17275 case OPC_PRECEQU_PH_QBR
:
17276 case OPC_PRECEQU_PH_QBLA
:
17277 case OPC_PRECEQU_PH_QBRA
:
17278 case OPC_PRECEU_PH_QBL
:
17279 case OPC_PRECEU_PH_QBR
:
17280 case OPC_PRECEU_PH_QBLA
:
17281 case OPC_PRECEU_PH_QBRA
:
17282 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17289 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17292 MIPS_INVAL("MASK ABSQ_S.PH");
17293 generate_exception_end(ctx
, EXCP_RI
);
17297 case OPC_ADDU_QB_DSP
:
17298 op2
= MASK_ADDU_QB(ctx
->opcode
);
17301 case OPC_ADDQ_S_PH
:
17304 case OPC_ADDU_S_QB
:
17306 case OPC_ADDU_S_PH
:
17308 case OPC_SUBQ_S_PH
:
17311 case OPC_SUBU_S_QB
:
17313 case OPC_SUBU_S_PH
:
17317 case OPC_RADDU_W_QB
:
17318 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17320 case OPC_MULEU_S_PH_QBL
:
17321 case OPC_MULEU_S_PH_QBR
:
17322 case OPC_MULQ_RS_PH
:
17323 case OPC_MULEQ_S_W_PHL
:
17324 case OPC_MULEQ_S_W_PHR
:
17325 case OPC_MULQ_S_PH
:
17326 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17328 default: /* Invalid */
17329 MIPS_INVAL("MASK ADDU.QB");
17330 generate_exception_end(ctx
, EXCP_RI
);
17335 case OPC_CMPU_EQ_QB_DSP
:
17336 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17338 case OPC_PRECR_SRA_PH_W
:
17339 case OPC_PRECR_SRA_R_PH_W
:
17340 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17342 case OPC_PRECR_QB_PH
:
17343 case OPC_PRECRQ_QB_PH
:
17344 case OPC_PRECRQ_PH_W
:
17345 case OPC_PRECRQ_RS_PH_W
:
17346 case OPC_PRECRQU_S_QB_PH
:
17347 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17349 case OPC_CMPU_EQ_QB
:
17350 case OPC_CMPU_LT_QB
:
17351 case OPC_CMPU_LE_QB
:
17352 case OPC_CMP_EQ_PH
:
17353 case OPC_CMP_LT_PH
:
17354 case OPC_CMP_LE_PH
:
17355 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17357 case OPC_CMPGU_EQ_QB
:
17358 case OPC_CMPGU_LT_QB
:
17359 case OPC_CMPGU_LE_QB
:
17360 case OPC_CMPGDU_EQ_QB
:
17361 case OPC_CMPGDU_LT_QB
:
17362 case OPC_CMPGDU_LE_QB
:
17365 case OPC_PACKRL_PH
:
17366 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17368 default: /* Invalid */
17369 MIPS_INVAL("MASK CMPU.EQ.QB");
17370 generate_exception_end(ctx
, EXCP_RI
);
17374 case OPC_SHLL_QB_DSP
:
17375 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17377 case OPC_DPA_W_PH_DSP
:
17378 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17380 case OPC_DPAU_H_QBL
:
17381 case OPC_DPAU_H_QBR
:
17382 case OPC_DPSU_H_QBL
:
17383 case OPC_DPSU_H_QBR
:
17385 case OPC_DPAX_W_PH
:
17386 case OPC_DPAQ_S_W_PH
:
17387 case OPC_DPAQX_S_W_PH
:
17388 case OPC_DPAQX_SA_W_PH
:
17390 case OPC_DPSX_W_PH
:
17391 case OPC_DPSQ_S_W_PH
:
17392 case OPC_DPSQX_S_W_PH
:
17393 case OPC_DPSQX_SA_W_PH
:
17394 case OPC_MULSAQ_S_W_PH
:
17395 case OPC_DPAQ_SA_L_W
:
17396 case OPC_DPSQ_SA_L_W
:
17397 case OPC_MAQ_S_W_PHL
:
17398 case OPC_MAQ_S_W_PHR
:
17399 case OPC_MAQ_SA_W_PHL
:
17400 case OPC_MAQ_SA_W_PHR
:
17401 case OPC_MULSA_W_PH
:
17402 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17404 default: /* Invalid */
17405 MIPS_INVAL("MASK DPAW.PH");
17406 generate_exception_end(ctx
, EXCP_RI
);
17411 op2
= MASK_INSV(ctx
->opcode
);
17422 t0
= tcg_temp_new();
17423 t1
= tcg_temp_new();
17425 gen_load_gpr(t0
, rt
);
17426 gen_load_gpr(t1
, rs
);
17428 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17434 default: /* Invalid */
17435 MIPS_INVAL("MASK INSV");
17436 generate_exception_end(ctx
, EXCP_RI
);
17440 case OPC_APPEND_DSP
:
17441 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17443 case OPC_EXTR_W_DSP
:
17444 op2
= MASK_EXTR_W(ctx
->opcode
);
17448 case OPC_EXTR_RS_W
:
17450 case OPC_EXTRV_S_H
:
17452 case OPC_EXTRV_R_W
:
17453 case OPC_EXTRV_RS_W
:
17458 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17461 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17467 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17469 default: /* Invalid */
17470 MIPS_INVAL("MASK EXTR.W");
17471 generate_exception_end(ctx
, EXCP_RI
);
17475 #if defined(TARGET_MIPS64)
17476 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17477 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17478 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17479 check_insn(ctx
, INSN_LOONGSON2E
);
17480 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17482 case OPC_ABSQ_S_QH_DSP
:
17483 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17485 case OPC_PRECEQ_L_PWL
:
17486 case OPC_PRECEQ_L_PWR
:
17487 case OPC_PRECEQ_PW_QHL
:
17488 case OPC_PRECEQ_PW_QHR
:
17489 case OPC_PRECEQ_PW_QHLA
:
17490 case OPC_PRECEQ_PW_QHRA
:
17491 case OPC_PRECEQU_QH_OBL
:
17492 case OPC_PRECEQU_QH_OBR
:
17493 case OPC_PRECEQU_QH_OBLA
:
17494 case OPC_PRECEQU_QH_OBRA
:
17495 case OPC_PRECEU_QH_OBL
:
17496 case OPC_PRECEU_QH_OBR
:
17497 case OPC_PRECEU_QH_OBLA
:
17498 case OPC_PRECEU_QH_OBRA
:
17499 case OPC_ABSQ_S_OB
:
17500 case OPC_ABSQ_S_PW
:
17501 case OPC_ABSQ_S_QH
:
17502 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17510 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17512 default: /* Invalid */
17513 MIPS_INVAL("MASK ABSQ_S.QH");
17514 generate_exception_end(ctx
, EXCP_RI
);
17518 case OPC_ADDU_OB_DSP
:
17519 op2
= MASK_ADDU_OB(ctx
->opcode
);
17521 case OPC_RADDU_L_OB
:
17523 case OPC_SUBQ_S_PW
:
17525 case OPC_SUBQ_S_QH
:
17527 case OPC_SUBU_S_OB
:
17529 case OPC_SUBU_S_QH
:
17531 case OPC_SUBUH_R_OB
:
17533 case OPC_ADDQ_S_PW
:
17535 case OPC_ADDQ_S_QH
:
17537 case OPC_ADDU_S_OB
:
17539 case OPC_ADDU_S_QH
:
17541 case OPC_ADDUH_R_OB
:
17542 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17544 case OPC_MULEQ_S_PW_QHL
:
17545 case OPC_MULEQ_S_PW_QHR
:
17546 case OPC_MULEU_S_QH_OBL
:
17547 case OPC_MULEU_S_QH_OBR
:
17548 case OPC_MULQ_RS_QH
:
17549 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17551 default: /* Invalid */
17552 MIPS_INVAL("MASK ADDU.OB");
17553 generate_exception_end(ctx
, EXCP_RI
);
17557 case OPC_CMPU_EQ_OB_DSP
:
17558 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17560 case OPC_PRECR_SRA_QH_PW
:
17561 case OPC_PRECR_SRA_R_QH_PW
:
17562 /* Return value is rt. */
17563 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17565 case OPC_PRECR_OB_QH
:
17566 case OPC_PRECRQ_OB_QH
:
17567 case OPC_PRECRQ_PW_L
:
17568 case OPC_PRECRQ_QH_PW
:
17569 case OPC_PRECRQ_RS_QH_PW
:
17570 case OPC_PRECRQU_S_OB_QH
:
17571 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17573 case OPC_CMPU_EQ_OB
:
17574 case OPC_CMPU_LT_OB
:
17575 case OPC_CMPU_LE_OB
:
17576 case OPC_CMP_EQ_QH
:
17577 case OPC_CMP_LT_QH
:
17578 case OPC_CMP_LE_QH
:
17579 case OPC_CMP_EQ_PW
:
17580 case OPC_CMP_LT_PW
:
17581 case OPC_CMP_LE_PW
:
17582 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17584 case OPC_CMPGDU_EQ_OB
:
17585 case OPC_CMPGDU_LT_OB
:
17586 case OPC_CMPGDU_LE_OB
:
17587 case OPC_CMPGU_EQ_OB
:
17588 case OPC_CMPGU_LT_OB
:
17589 case OPC_CMPGU_LE_OB
:
17590 case OPC_PACKRL_PW
:
17594 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17596 default: /* Invalid */
17597 MIPS_INVAL("MASK CMPU_EQ.OB");
17598 generate_exception_end(ctx
, EXCP_RI
);
17602 case OPC_DAPPEND_DSP
:
17603 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17605 case OPC_DEXTR_W_DSP
:
17606 op2
= MASK_DEXTR_W(ctx
->opcode
);
17613 case OPC_DEXTR_R_L
:
17614 case OPC_DEXTR_RS_L
:
17616 case OPC_DEXTR_R_W
:
17617 case OPC_DEXTR_RS_W
:
17618 case OPC_DEXTR_S_H
:
17620 case OPC_DEXTRV_R_L
:
17621 case OPC_DEXTRV_RS_L
:
17622 case OPC_DEXTRV_S_H
:
17624 case OPC_DEXTRV_R_W
:
17625 case OPC_DEXTRV_RS_W
:
17626 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17631 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17633 default: /* Invalid */
17634 MIPS_INVAL("MASK EXTR.W");
17635 generate_exception_end(ctx
, EXCP_RI
);
17639 case OPC_DPAQ_W_QH_DSP
:
17640 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17642 case OPC_DPAU_H_OBL
:
17643 case OPC_DPAU_H_OBR
:
17644 case OPC_DPSU_H_OBL
:
17645 case OPC_DPSU_H_OBR
:
17647 case OPC_DPAQ_S_W_QH
:
17649 case OPC_DPSQ_S_W_QH
:
17650 case OPC_MULSAQ_S_W_QH
:
17651 case OPC_DPAQ_SA_L_PW
:
17652 case OPC_DPSQ_SA_L_PW
:
17653 case OPC_MULSAQ_S_L_PW
:
17654 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17656 case OPC_MAQ_S_W_QHLL
:
17657 case OPC_MAQ_S_W_QHLR
:
17658 case OPC_MAQ_S_W_QHRL
:
17659 case OPC_MAQ_S_W_QHRR
:
17660 case OPC_MAQ_SA_W_QHLL
:
17661 case OPC_MAQ_SA_W_QHLR
:
17662 case OPC_MAQ_SA_W_QHRL
:
17663 case OPC_MAQ_SA_W_QHRR
:
17664 case OPC_MAQ_S_L_PWL
:
17665 case OPC_MAQ_S_L_PWR
:
17670 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17672 default: /* Invalid */
17673 MIPS_INVAL("MASK DPAQ.W.QH");
17674 generate_exception_end(ctx
, EXCP_RI
);
17678 case OPC_DINSV_DSP
:
17679 op2
= MASK_INSV(ctx
->opcode
);
17690 t0
= tcg_temp_new();
17691 t1
= tcg_temp_new();
17693 gen_load_gpr(t0
, rt
);
17694 gen_load_gpr(t1
, rs
);
17696 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17702 default: /* Invalid */
17703 MIPS_INVAL("MASK DINSV");
17704 generate_exception_end(ctx
, EXCP_RI
);
17708 case OPC_SHLL_OB_DSP
:
17709 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17712 default: /* Invalid */
17713 MIPS_INVAL("special3_legacy");
17714 generate_exception_end(ctx
, EXCP_RI
);
17719 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17721 int rs
, rt
, rd
, sa
;
17724 rs
= (ctx
->opcode
>> 21) & 0x1f;
17725 rt
= (ctx
->opcode
>> 16) & 0x1f;
17726 rd
= (ctx
->opcode
>> 11) & 0x1f;
17727 sa
= (ctx
->opcode
>> 6) & 0x1f;
17729 op1
= MASK_SPECIAL3(ctx
->opcode
);
17733 check_insn(ctx
, ISA_MIPS32R2
);
17734 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17737 op2
= MASK_BSHFL(ctx
->opcode
);
17739 case OPC_ALIGN
... OPC_ALIGN_END
:
17741 check_insn(ctx
, ISA_MIPS32R6
);
17742 decode_opc_special3_r6(env
, ctx
);
17745 check_insn(ctx
, ISA_MIPS32R2
);
17746 gen_bshfl(ctx
, op2
, rt
, rd
);
17750 #if defined(TARGET_MIPS64)
17751 case OPC_DEXTM
... OPC_DEXT
:
17752 case OPC_DINSM
... OPC_DINS
:
17753 check_insn(ctx
, ISA_MIPS64R2
);
17754 check_mips_64(ctx
);
17755 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17758 op2
= MASK_DBSHFL(ctx
->opcode
);
17760 case OPC_DALIGN
... OPC_DALIGN_END
:
17762 check_insn(ctx
, ISA_MIPS32R6
);
17763 decode_opc_special3_r6(env
, ctx
);
17766 check_insn(ctx
, ISA_MIPS64R2
);
17767 check_mips_64(ctx
);
17768 op2
= MASK_DBSHFL(ctx
->opcode
);
17769 gen_bshfl(ctx
, op2
, rt
, rd
);
17775 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17778 check_insn(ctx
, ASE_MT
);
17780 TCGv t0
= tcg_temp_new();
17781 TCGv t1
= tcg_temp_new();
17783 gen_load_gpr(t0
, rt
);
17784 gen_load_gpr(t1
, rs
);
17785 gen_helper_fork(t0
, t1
);
17791 check_insn(ctx
, ASE_MT
);
17793 TCGv t0
= tcg_temp_new();
17795 gen_load_gpr(t0
, rs
);
17796 gen_helper_yield(t0
, cpu_env
, t0
);
17797 gen_store_gpr(t0
, rd
);
17802 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17803 decode_opc_special3_r6(env
, ctx
);
17805 decode_opc_special3_legacy(env
, ctx
);
17810 /* MIPS SIMD Architecture (MSA) */
17811 static inline int check_msa_access(DisasContext
*ctx
)
17813 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17814 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17815 generate_exception_end(ctx
, EXCP_RI
);
17819 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17820 if (ctx
->insn_flags
& ASE_MSA
) {
17821 generate_exception_end(ctx
, EXCP_MSADIS
);
17824 generate_exception_end(ctx
, EXCP_RI
);
17831 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17833 /* generates tcg ops to check if any element is 0 */
17834 /* Note this function only works with MSA_WRLEN = 128 */
17835 uint64_t eval_zero_or_big
= 0;
17836 uint64_t eval_big
= 0;
17837 TCGv_i64 t0
= tcg_temp_new_i64();
17838 TCGv_i64 t1
= tcg_temp_new_i64();
17841 eval_zero_or_big
= 0x0101010101010101ULL
;
17842 eval_big
= 0x8080808080808080ULL
;
17845 eval_zero_or_big
= 0x0001000100010001ULL
;
17846 eval_big
= 0x8000800080008000ULL
;
17849 eval_zero_or_big
= 0x0000000100000001ULL
;
17850 eval_big
= 0x8000000080000000ULL
;
17853 eval_zero_or_big
= 0x0000000000000001ULL
;
17854 eval_big
= 0x8000000000000000ULL
;
17857 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17858 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17859 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17860 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17861 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17862 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17863 tcg_gen_or_i64(t0
, t0
, t1
);
17864 /* if all bits are zero then all elements are not zero */
17865 /* if some bit is non-zero then some element is zero */
17866 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17867 tcg_gen_trunc_i64_tl(tresult
, t0
);
17868 tcg_temp_free_i64(t0
);
17869 tcg_temp_free_i64(t1
);
17872 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17874 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17875 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17876 int64_t s16
= (int16_t)ctx
->opcode
;
17878 check_msa_access(ctx
);
17880 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17881 generate_exception_end(ctx
, EXCP_RI
);
17888 TCGv_i64 t0
= tcg_temp_new_i64();
17889 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17890 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17891 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17892 tcg_gen_trunc_i64_tl(bcond
, t0
);
17893 tcg_temp_free_i64(t0
);
17900 gen_check_zero_element(bcond
, df
, wt
);
17906 gen_check_zero_element(bcond
, df
, wt
);
17907 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17911 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17913 ctx
->hflags
|= MIPS_HFLAG_BC
;
17914 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17917 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17919 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17920 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17921 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17922 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17924 TCGv_i32 twd
= tcg_const_i32(wd
);
17925 TCGv_i32 tws
= tcg_const_i32(ws
);
17926 TCGv_i32 ti8
= tcg_const_i32(i8
);
17928 switch (MASK_MSA_I8(ctx
->opcode
)) {
17930 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17933 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17936 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17939 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17942 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17945 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17948 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17954 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17955 if (df
== DF_DOUBLE
) {
17956 generate_exception_end(ctx
, EXCP_RI
);
17958 TCGv_i32 tdf
= tcg_const_i32(df
);
17959 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17960 tcg_temp_free_i32(tdf
);
17965 MIPS_INVAL("MSA instruction");
17966 generate_exception_end(ctx
, EXCP_RI
);
17970 tcg_temp_free_i32(twd
);
17971 tcg_temp_free_i32(tws
);
17972 tcg_temp_free_i32(ti8
);
17975 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17977 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17978 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17979 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17980 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17981 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17982 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17984 TCGv_i32 tdf
= tcg_const_i32(df
);
17985 TCGv_i32 twd
= tcg_const_i32(wd
);
17986 TCGv_i32 tws
= tcg_const_i32(ws
);
17987 TCGv_i32 timm
= tcg_temp_new_i32();
17988 tcg_gen_movi_i32(timm
, u5
);
17990 switch (MASK_MSA_I5(ctx
->opcode
)) {
17992 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17995 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17997 case OPC_MAXI_S_df
:
17998 tcg_gen_movi_i32(timm
, s5
);
17999 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18001 case OPC_MAXI_U_df
:
18002 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18004 case OPC_MINI_S_df
:
18005 tcg_gen_movi_i32(timm
, s5
);
18006 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18008 case OPC_MINI_U_df
:
18009 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18012 tcg_gen_movi_i32(timm
, s5
);
18013 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18015 case OPC_CLTI_S_df
:
18016 tcg_gen_movi_i32(timm
, s5
);
18017 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18019 case OPC_CLTI_U_df
:
18020 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18022 case OPC_CLEI_S_df
:
18023 tcg_gen_movi_i32(timm
, s5
);
18024 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18026 case OPC_CLEI_U_df
:
18027 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18031 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18032 tcg_gen_movi_i32(timm
, s10
);
18033 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18037 MIPS_INVAL("MSA instruction");
18038 generate_exception_end(ctx
, EXCP_RI
);
18042 tcg_temp_free_i32(tdf
);
18043 tcg_temp_free_i32(twd
);
18044 tcg_temp_free_i32(tws
);
18045 tcg_temp_free_i32(timm
);
18048 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18050 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18051 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18052 uint32_t df
= 0, m
= 0;
18053 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18054 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18061 if ((dfm
& 0x40) == 0x00) {
18064 } else if ((dfm
& 0x60) == 0x40) {
18067 } else if ((dfm
& 0x70) == 0x60) {
18070 } else if ((dfm
& 0x78) == 0x70) {
18074 generate_exception_end(ctx
, EXCP_RI
);
18078 tdf
= tcg_const_i32(df
);
18079 tm
= tcg_const_i32(m
);
18080 twd
= tcg_const_i32(wd
);
18081 tws
= tcg_const_i32(ws
);
18083 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18085 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18088 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18091 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18094 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18097 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18100 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18102 case OPC_BINSLI_df
:
18103 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18105 case OPC_BINSRI_df
:
18106 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18109 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18112 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18115 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18118 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18121 MIPS_INVAL("MSA instruction");
18122 generate_exception_end(ctx
, EXCP_RI
);
18126 tcg_temp_free_i32(tdf
);
18127 tcg_temp_free_i32(tm
);
18128 tcg_temp_free_i32(twd
);
18129 tcg_temp_free_i32(tws
);
18132 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18134 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18135 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18136 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18137 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18138 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18140 TCGv_i32 tdf
= tcg_const_i32(df
);
18141 TCGv_i32 twd
= tcg_const_i32(wd
);
18142 TCGv_i32 tws
= tcg_const_i32(ws
);
18143 TCGv_i32 twt
= tcg_const_i32(wt
);
18145 switch (MASK_MSA_3R(ctx
->opcode
)) {
18147 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18150 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18153 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18156 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18158 case OPC_SUBS_S_df
:
18159 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18162 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18165 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18168 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18171 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18174 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18176 case OPC_ADDS_A_df
:
18177 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18179 case OPC_SUBS_U_df
:
18180 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18183 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18186 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18189 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18192 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18195 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18198 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18200 case OPC_ADDS_S_df
:
18201 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18203 case OPC_SUBSUS_U_df
:
18204 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18207 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18210 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18213 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18216 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18219 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18222 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18224 case OPC_ADDS_U_df
:
18225 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18227 case OPC_SUBSUU_S_df
:
18228 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18231 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18234 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18237 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18240 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18243 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18245 case OPC_ASUB_S_df
:
18246 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18249 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18252 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18255 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18258 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18261 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18264 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18266 case OPC_ASUB_U_df
:
18267 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18270 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18273 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18276 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18279 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18281 case OPC_AVER_S_df
:
18282 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18285 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18288 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18291 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18294 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18296 case OPC_AVER_U_df
:
18297 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18300 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18303 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18306 case OPC_DOTP_S_df
:
18307 case OPC_DOTP_U_df
:
18308 case OPC_DPADD_S_df
:
18309 case OPC_DPADD_U_df
:
18310 case OPC_DPSUB_S_df
:
18311 case OPC_HADD_S_df
:
18312 case OPC_DPSUB_U_df
:
18313 case OPC_HADD_U_df
:
18314 case OPC_HSUB_S_df
:
18315 case OPC_HSUB_U_df
:
18316 if (df
== DF_BYTE
) {
18317 generate_exception_end(ctx
, EXCP_RI
);
18320 switch (MASK_MSA_3R(ctx
->opcode
)) {
18321 case OPC_DOTP_S_df
:
18322 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18324 case OPC_DOTP_U_df
:
18325 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18327 case OPC_DPADD_S_df
:
18328 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18330 case OPC_DPADD_U_df
:
18331 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18333 case OPC_DPSUB_S_df
:
18334 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18336 case OPC_HADD_S_df
:
18337 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18339 case OPC_DPSUB_U_df
:
18340 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18342 case OPC_HADD_U_df
:
18343 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18345 case OPC_HSUB_S_df
:
18346 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18348 case OPC_HSUB_U_df
:
18349 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18354 MIPS_INVAL("MSA instruction");
18355 generate_exception_end(ctx
, EXCP_RI
);
18358 tcg_temp_free_i32(twd
);
18359 tcg_temp_free_i32(tws
);
18360 tcg_temp_free_i32(twt
);
18361 tcg_temp_free_i32(tdf
);
18364 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18366 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18367 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18368 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18369 TCGv telm
= tcg_temp_new();
18370 TCGv_i32 tsr
= tcg_const_i32(source
);
18371 TCGv_i32 tdt
= tcg_const_i32(dest
);
18373 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18375 gen_load_gpr(telm
, source
);
18376 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18379 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18380 gen_store_gpr(telm
, dest
);
18383 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18386 MIPS_INVAL("MSA instruction");
18387 generate_exception_end(ctx
, EXCP_RI
);
18391 tcg_temp_free(telm
);
18392 tcg_temp_free_i32(tdt
);
18393 tcg_temp_free_i32(tsr
);
18396 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18399 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18400 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18401 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18403 TCGv_i32 tws
= tcg_const_i32(ws
);
18404 TCGv_i32 twd
= tcg_const_i32(wd
);
18405 TCGv_i32 tn
= tcg_const_i32(n
);
18406 TCGv_i32 tdf
= tcg_const_i32(df
);
18408 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18410 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18412 case OPC_SPLATI_df
:
18413 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18416 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18418 case OPC_COPY_S_df
:
18419 case OPC_COPY_U_df
:
18420 case OPC_INSERT_df
:
18421 #if !defined(TARGET_MIPS64)
18422 /* Double format valid only for MIPS64 */
18423 if (df
== DF_DOUBLE
) {
18424 generate_exception_end(ctx
, EXCP_RI
);
18428 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18429 case OPC_COPY_S_df
:
18430 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18432 case OPC_COPY_U_df
:
18433 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18435 case OPC_INSERT_df
:
18436 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18441 MIPS_INVAL("MSA instruction");
18442 generate_exception_end(ctx
, EXCP_RI
);
18444 tcg_temp_free_i32(twd
);
18445 tcg_temp_free_i32(tws
);
18446 tcg_temp_free_i32(tn
);
18447 tcg_temp_free_i32(tdf
);
18450 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18452 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18453 uint32_t df
= 0, n
= 0;
18455 if ((dfn
& 0x30) == 0x00) {
18458 } else if ((dfn
& 0x38) == 0x20) {
18461 } else if ((dfn
& 0x3c) == 0x30) {
18464 } else if ((dfn
& 0x3e) == 0x38) {
18467 } else if (dfn
== 0x3E) {
18468 /* CTCMSA, CFCMSA, MOVE.V */
18469 gen_msa_elm_3e(env
, ctx
);
18472 generate_exception_end(ctx
, EXCP_RI
);
18476 gen_msa_elm_df(env
, ctx
, df
, n
);
18479 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18481 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18482 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18483 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18484 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18485 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18487 TCGv_i32 twd
= tcg_const_i32(wd
);
18488 TCGv_i32 tws
= tcg_const_i32(ws
);
18489 TCGv_i32 twt
= tcg_const_i32(wt
);
18490 TCGv_i32 tdf
= tcg_temp_new_i32();
18492 /* adjust df value for floating-point instruction */
18493 tcg_gen_movi_i32(tdf
, df
+ 2);
18495 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18497 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18500 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18503 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18506 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18509 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18512 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18515 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18518 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18521 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18524 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18527 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18530 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18533 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18536 tcg_gen_movi_i32(tdf
, df
+ 1);
18537 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18540 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18543 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18545 case OPC_MADD_Q_df
:
18546 tcg_gen_movi_i32(tdf
, df
+ 1);
18547 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18550 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18552 case OPC_MSUB_Q_df
:
18553 tcg_gen_movi_i32(tdf
, df
+ 1);
18554 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18557 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18560 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18563 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18566 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18569 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18572 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18575 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18578 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18581 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18584 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18587 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18590 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18593 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18595 case OPC_MULR_Q_df
:
18596 tcg_gen_movi_i32(tdf
, df
+ 1);
18597 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18600 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18602 case OPC_FMIN_A_df
:
18603 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18605 case OPC_MADDR_Q_df
:
18606 tcg_gen_movi_i32(tdf
, df
+ 1);
18607 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18610 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18613 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 case OPC_MSUBR_Q_df
:
18616 tcg_gen_movi_i32(tdf
, df
+ 1);
18617 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18620 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18622 case OPC_FMAX_A_df
:
18623 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18626 MIPS_INVAL("MSA instruction");
18627 generate_exception_end(ctx
, EXCP_RI
);
18631 tcg_temp_free_i32(twd
);
18632 tcg_temp_free_i32(tws
);
18633 tcg_temp_free_i32(twt
);
18634 tcg_temp_free_i32(tdf
);
18637 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18639 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18640 (op & (0x7 << 18)))
18641 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18642 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18643 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18644 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18645 TCGv_i32 twd
= tcg_const_i32(wd
);
18646 TCGv_i32 tws
= tcg_const_i32(ws
);
18647 TCGv_i32 twt
= tcg_const_i32(wt
);
18648 TCGv_i32 tdf
= tcg_const_i32(df
);
18650 switch (MASK_MSA_2R(ctx
->opcode
)) {
18652 #if !defined(TARGET_MIPS64)
18653 /* Double format valid only for MIPS64 */
18654 if (df
== DF_DOUBLE
) {
18655 generate_exception_end(ctx
, EXCP_RI
);
18659 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18662 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18665 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18668 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18671 MIPS_INVAL("MSA instruction");
18672 generate_exception_end(ctx
, EXCP_RI
);
18676 tcg_temp_free_i32(twd
);
18677 tcg_temp_free_i32(tws
);
18678 tcg_temp_free_i32(twt
);
18679 tcg_temp_free_i32(tdf
);
18682 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18684 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18685 (op & (0xf << 17)))
18686 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18687 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18688 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18689 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18690 TCGv_i32 twd
= tcg_const_i32(wd
);
18691 TCGv_i32 tws
= tcg_const_i32(ws
);
18692 TCGv_i32 twt
= tcg_const_i32(wt
);
18693 /* adjust df value for floating-point instruction */
18694 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18696 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18697 case OPC_FCLASS_df
:
18698 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18700 case OPC_FTRUNC_S_df
:
18701 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18703 case OPC_FTRUNC_U_df
:
18704 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18707 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18709 case OPC_FRSQRT_df
:
18710 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18713 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18716 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18719 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18721 case OPC_FEXUPL_df
:
18722 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18724 case OPC_FEXUPR_df
:
18725 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18728 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18731 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18733 case OPC_FTINT_S_df
:
18734 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18736 case OPC_FTINT_U_df
:
18737 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18739 case OPC_FFINT_S_df
:
18740 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18742 case OPC_FFINT_U_df
:
18743 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18747 tcg_temp_free_i32(twd
);
18748 tcg_temp_free_i32(tws
);
18749 tcg_temp_free_i32(twt
);
18750 tcg_temp_free_i32(tdf
);
18753 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18755 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18756 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18757 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18758 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18759 TCGv_i32 twd
= tcg_const_i32(wd
);
18760 TCGv_i32 tws
= tcg_const_i32(ws
);
18761 TCGv_i32 twt
= tcg_const_i32(wt
);
18763 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18765 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18768 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18771 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18774 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18777 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18780 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18783 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18786 MIPS_INVAL("MSA instruction");
18787 generate_exception_end(ctx
, EXCP_RI
);
18791 tcg_temp_free_i32(twd
);
18792 tcg_temp_free_i32(tws
);
18793 tcg_temp_free_i32(twt
);
18796 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18798 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18806 gen_msa_vec_v(env
, ctx
);
18809 gen_msa_2r(env
, ctx
);
18812 gen_msa_2rf(env
, ctx
);
18815 MIPS_INVAL("MSA instruction");
18816 generate_exception_end(ctx
, EXCP_RI
);
18821 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18823 uint32_t opcode
= ctx
->opcode
;
18824 check_insn(ctx
, ASE_MSA
);
18825 check_msa_access(ctx
);
18827 switch (MASK_MSA_MINOR(opcode
)) {
18828 case OPC_MSA_I8_00
:
18829 case OPC_MSA_I8_01
:
18830 case OPC_MSA_I8_02
:
18831 gen_msa_i8(env
, ctx
);
18833 case OPC_MSA_I5_06
:
18834 case OPC_MSA_I5_07
:
18835 gen_msa_i5(env
, ctx
);
18837 case OPC_MSA_BIT_09
:
18838 case OPC_MSA_BIT_0A
:
18839 gen_msa_bit(env
, ctx
);
18841 case OPC_MSA_3R_0D
:
18842 case OPC_MSA_3R_0E
:
18843 case OPC_MSA_3R_0F
:
18844 case OPC_MSA_3R_10
:
18845 case OPC_MSA_3R_11
:
18846 case OPC_MSA_3R_12
:
18847 case OPC_MSA_3R_13
:
18848 case OPC_MSA_3R_14
:
18849 case OPC_MSA_3R_15
:
18850 gen_msa_3r(env
, ctx
);
18853 gen_msa_elm(env
, ctx
);
18855 case OPC_MSA_3RF_1A
:
18856 case OPC_MSA_3RF_1B
:
18857 case OPC_MSA_3RF_1C
:
18858 gen_msa_3rf(env
, ctx
);
18861 gen_msa_vec(env
, ctx
);
18872 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18873 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18874 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18875 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18877 TCGv_i32 twd
= tcg_const_i32(wd
);
18878 TCGv taddr
= tcg_temp_new();
18879 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18881 switch (MASK_MSA_MINOR(opcode
)) {
18883 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18886 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18889 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18892 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18895 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18898 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18901 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18904 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18908 tcg_temp_free_i32(twd
);
18909 tcg_temp_free(taddr
);
18913 MIPS_INVAL("MSA instruction");
18914 generate_exception_end(ctx
, EXCP_RI
);
18920 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18923 int rs
, rt
, rd
, sa
;
18927 /* make sure instructions are on a word boundary */
18928 if (ctx
->pc
& 0x3) {
18929 env
->CP0_BadVAddr
= ctx
->pc
;
18930 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18934 /* Handle blikely not taken case */
18935 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18936 TCGLabel
*l1
= gen_new_label();
18938 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18939 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18940 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18944 op
= MASK_OP_MAJOR(ctx
->opcode
);
18945 rs
= (ctx
->opcode
>> 21) & 0x1f;
18946 rt
= (ctx
->opcode
>> 16) & 0x1f;
18947 rd
= (ctx
->opcode
>> 11) & 0x1f;
18948 sa
= (ctx
->opcode
>> 6) & 0x1f;
18949 imm
= (int16_t)ctx
->opcode
;
18952 decode_opc_special(env
, ctx
);
18955 decode_opc_special2_legacy(env
, ctx
);
18958 decode_opc_special3(env
, ctx
);
18961 op1
= MASK_REGIMM(ctx
->opcode
);
18963 case OPC_BLTZL
: /* REGIMM branches */
18967 check_insn(ctx
, ISA_MIPS2
);
18968 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18972 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18976 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18978 /* OPC_NAL, OPC_BAL */
18979 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18981 generate_exception_end(ctx
, EXCP_RI
);
18984 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18987 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18989 check_insn(ctx
, ISA_MIPS2
);
18990 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18991 gen_trap(ctx
, op1
, rs
, -1, imm
);
18994 check_insn(ctx
, ISA_MIPS32R6
);
18995 generate_exception_end(ctx
, EXCP_RI
);
18998 check_insn(ctx
, ISA_MIPS32R2
);
18999 /* Break the TB to be able to sync copied instructions
19001 ctx
->bstate
= BS_STOP
;
19003 case OPC_BPOSGE32
: /* MIPS DSP branch */
19004 #if defined(TARGET_MIPS64)
19008 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19010 #if defined(TARGET_MIPS64)
19012 check_insn(ctx
, ISA_MIPS32R6
);
19013 check_mips_64(ctx
);
19015 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19019 check_insn(ctx
, ISA_MIPS32R6
);
19020 check_mips_64(ctx
);
19022 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19026 default: /* Invalid */
19027 MIPS_INVAL("regimm");
19028 generate_exception_end(ctx
, EXCP_RI
);
19033 check_cp0_enabled(ctx
);
19034 op1
= MASK_CP0(ctx
->opcode
);
19042 #if defined(TARGET_MIPS64)
19046 #ifndef CONFIG_USER_ONLY
19047 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19048 #endif /* !CONFIG_USER_ONLY */
19050 case OPC_C0_FIRST
... OPC_C0_LAST
:
19051 #ifndef CONFIG_USER_ONLY
19052 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19053 #endif /* !CONFIG_USER_ONLY */
19056 #ifndef CONFIG_USER_ONLY
19059 TCGv t0
= tcg_temp_new();
19061 op2
= MASK_MFMC0(ctx
->opcode
);
19064 check_insn(ctx
, ASE_MT
);
19065 gen_helper_dmt(t0
);
19066 gen_store_gpr(t0
, rt
);
19069 check_insn(ctx
, ASE_MT
);
19070 gen_helper_emt(t0
);
19071 gen_store_gpr(t0
, rt
);
19074 check_insn(ctx
, ASE_MT
);
19075 gen_helper_dvpe(t0
, cpu_env
);
19076 gen_store_gpr(t0
, rt
);
19079 check_insn(ctx
, ASE_MT
);
19080 gen_helper_evpe(t0
, cpu_env
);
19081 gen_store_gpr(t0
, rt
);
19084 check_insn(ctx
, ISA_MIPS32R2
);
19085 save_cpu_state(ctx
, 1);
19086 gen_helper_di(t0
, cpu_env
);
19087 gen_store_gpr(t0
, rt
);
19088 /* Stop translation as we may have switched
19089 the execution mode. */
19090 ctx
->bstate
= BS_STOP
;
19093 check_insn(ctx
, ISA_MIPS32R2
);
19094 save_cpu_state(ctx
, 1);
19095 gen_helper_ei(t0
, cpu_env
);
19096 gen_store_gpr(t0
, rt
);
19097 /* Stop translation as we may have switched
19098 the execution mode. */
19099 ctx
->bstate
= BS_STOP
;
19101 default: /* Invalid */
19102 MIPS_INVAL("mfmc0");
19103 generate_exception_end(ctx
, EXCP_RI
);
19108 #endif /* !CONFIG_USER_ONLY */
19111 check_insn(ctx
, ISA_MIPS32R2
);
19112 gen_load_srsgpr(rt
, rd
);
19115 check_insn(ctx
, ISA_MIPS32R2
);
19116 gen_store_srsgpr(rt
, rd
);
19120 generate_exception_end(ctx
, EXCP_RI
);
19124 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19125 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19126 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19127 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19130 /* Arithmetic with immediate opcode */
19131 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19135 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19137 case OPC_SLTI
: /* Set on less than with immediate opcode */
19139 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19141 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19142 case OPC_LUI
: /* OPC_AUI */
19145 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19147 case OPC_J
... OPC_JAL
: /* Jump */
19148 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19149 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19152 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19153 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19155 generate_exception_end(ctx
, EXCP_RI
);
19158 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19159 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19162 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19165 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19166 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19168 generate_exception_end(ctx
, EXCP_RI
);
19171 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19172 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19175 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19178 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19181 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19183 check_insn(ctx
, ISA_MIPS32R6
);
19184 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19185 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19188 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19191 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19193 check_insn(ctx
, ISA_MIPS32R6
);
19194 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19195 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19200 check_insn(ctx
, ISA_MIPS2
);
19201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19205 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19207 case OPC_LL
: /* Load and stores */
19208 check_insn(ctx
, ISA_MIPS2
);
19212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19214 case OPC_LB
... OPC_LH
:
19215 case OPC_LW
... OPC_LHU
:
19216 gen_ld(ctx
, op
, rt
, rs
, imm
);
19220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19222 case OPC_SB
... OPC_SH
:
19224 gen_st(ctx
, op
, rt
, rs
, imm
);
19227 check_insn(ctx
, ISA_MIPS2
);
19228 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19229 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19233 check_cp0_enabled(ctx
);
19234 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19235 /* Treat as NOP. */
19238 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19239 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19240 /* Treat as NOP. */
19243 /* Floating point (COP1). */
19248 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19252 op1
= MASK_CP1(ctx
->opcode
);
19257 check_cp1_enabled(ctx
);
19258 check_insn(ctx
, ISA_MIPS32R2
);
19263 check_cp1_enabled(ctx
);
19264 gen_cp1(ctx
, op1
, rt
, rd
);
19266 #if defined(TARGET_MIPS64)
19269 check_cp1_enabled(ctx
);
19270 check_insn(ctx
, ISA_MIPS3
);
19271 check_mips_64(ctx
);
19272 gen_cp1(ctx
, op1
, rt
, rd
);
19275 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19276 check_cp1_enabled(ctx
);
19277 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19279 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19284 check_insn(ctx
, ASE_MIPS3D
);
19285 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19286 (rt
>> 2) & 0x7, imm
<< 2);
19290 check_cp1_enabled(ctx
);
19291 check_insn(ctx
, ISA_MIPS32R6
);
19292 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19296 check_cp1_enabled(ctx
);
19297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19299 check_insn(ctx
, ASE_MIPS3D
);
19302 check_cp1_enabled(ctx
);
19303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19304 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19305 (rt
>> 2) & 0x7, imm
<< 2);
19312 check_cp1_enabled(ctx
);
19313 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19319 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19320 check_cp1_enabled(ctx
);
19321 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19323 case R6_OPC_CMP_AF_S
:
19324 case R6_OPC_CMP_UN_S
:
19325 case R6_OPC_CMP_EQ_S
:
19326 case R6_OPC_CMP_UEQ_S
:
19327 case R6_OPC_CMP_LT_S
:
19328 case R6_OPC_CMP_ULT_S
:
19329 case R6_OPC_CMP_LE_S
:
19330 case R6_OPC_CMP_ULE_S
:
19331 case R6_OPC_CMP_SAF_S
:
19332 case R6_OPC_CMP_SUN_S
:
19333 case R6_OPC_CMP_SEQ_S
:
19334 case R6_OPC_CMP_SEUQ_S
:
19335 case R6_OPC_CMP_SLT_S
:
19336 case R6_OPC_CMP_SULT_S
:
19337 case R6_OPC_CMP_SLE_S
:
19338 case R6_OPC_CMP_SULE_S
:
19339 case R6_OPC_CMP_OR_S
:
19340 case R6_OPC_CMP_UNE_S
:
19341 case R6_OPC_CMP_NE_S
:
19342 case R6_OPC_CMP_SOR_S
:
19343 case R6_OPC_CMP_SUNE_S
:
19344 case R6_OPC_CMP_SNE_S
:
19345 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19347 case R6_OPC_CMP_AF_D
:
19348 case R6_OPC_CMP_UN_D
:
19349 case R6_OPC_CMP_EQ_D
:
19350 case R6_OPC_CMP_UEQ_D
:
19351 case R6_OPC_CMP_LT_D
:
19352 case R6_OPC_CMP_ULT_D
:
19353 case R6_OPC_CMP_LE_D
:
19354 case R6_OPC_CMP_ULE_D
:
19355 case R6_OPC_CMP_SAF_D
:
19356 case R6_OPC_CMP_SUN_D
:
19357 case R6_OPC_CMP_SEQ_D
:
19358 case R6_OPC_CMP_SEUQ_D
:
19359 case R6_OPC_CMP_SLT_D
:
19360 case R6_OPC_CMP_SULT_D
:
19361 case R6_OPC_CMP_SLE_D
:
19362 case R6_OPC_CMP_SULE_D
:
19363 case R6_OPC_CMP_OR_D
:
19364 case R6_OPC_CMP_UNE_D
:
19365 case R6_OPC_CMP_NE_D
:
19366 case R6_OPC_CMP_SOR_D
:
19367 case R6_OPC_CMP_SUNE_D
:
19368 case R6_OPC_CMP_SNE_D
:
19369 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19372 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19373 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19378 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19393 check_insn(ctx
, ASE_MSA
);
19394 gen_msa_branch(env
, ctx
, op1
);
19398 generate_exception_end(ctx
, EXCP_RI
);
19403 /* Compact branches [R6] and COP2 [non-R6] */
19404 case OPC_BC
: /* OPC_LWC2 */
19405 case OPC_BALC
: /* OPC_SWC2 */
19406 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19407 /* OPC_BC, OPC_BALC */
19408 gen_compute_compact_branch(ctx
, op
, 0, 0,
19409 sextract32(ctx
->opcode
<< 2, 0, 28));
19411 /* OPC_LWC2, OPC_SWC2 */
19412 /* COP2: Not implemented. */
19413 generate_exception_err(ctx
, EXCP_CpU
, 2);
19416 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19417 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19418 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19420 /* OPC_BEQZC, OPC_BNEZC */
19421 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19422 sextract32(ctx
->opcode
<< 2, 0, 23));
19424 /* OPC_JIC, OPC_JIALC */
19425 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19428 /* OPC_LWC2, OPC_SWC2 */
19429 /* COP2: Not implemented. */
19430 generate_exception_err(ctx
, EXCP_CpU
, 2);
19434 check_insn(ctx
, INSN_LOONGSON2F
);
19435 /* Note that these instructions use different fields. */
19436 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19441 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19442 check_cp1_enabled(ctx
);
19443 op1
= MASK_CP3(ctx
->opcode
);
19447 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19453 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19454 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19457 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19458 /* Treat as NOP. */
19461 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19475 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19476 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19480 generate_exception_end(ctx
, EXCP_RI
);
19484 generate_exception_err(ctx
, EXCP_CpU
, 1);
19488 #if defined(TARGET_MIPS64)
19489 /* MIPS64 opcodes */
19490 case OPC_LDL
... OPC_LDR
:
19492 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19496 check_insn(ctx
, ISA_MIPS3
);
19497 check_mips_64(ctx
);
19498 gen_ld(ctx
, op
, rt
, rs
, imm
);
19500 case OPC_SDL
... OPC_SDR
:
19501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19504 check_insn(ctx
, ISA_MIPS3
);
19505 check_mips_64(ctx
);
19506 gen_st(ctx
, op
, rt
, rs
, imm
);
19509 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19510 check_insn(ctx
, ISA_MIPS3
);
19511 check_mips_64(ctx
);
19512 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19514 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19515 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19516 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19517 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19520 check_insn(ctx
, ISA_MIPS3
);
19521 check_mips_64(ctx
);
19522 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19526 check_insn(ctx
, ISA_MIPS3
);
19527 check_mips_64(ctx
);
19528 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19531 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19532 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19533 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19535 MIPS_INVAL("major opcode");
19536 generate_exception_end(ctx
, EXCP_RI
);
19540 case OPC_DAUI
: /* OPC_JALX */
19541 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19542 #if defined(TARGET_MIPS64)
19544 check_mips_64(ctx
);
19546 generate_exception(ctx
, EXCP_RI
);
19547 } else if (rt
!= 0) {
19548 TCGv t0
= tcg_temp_new();
19549 gen_load_gpr(t0
, rs
);
19550 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19554 generate_exception_end(ctx
, EXCP_RI
);
19555 MIPS_INVAL("major opcode");
19559 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19560 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19561 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19564 case OPC_MSA
: /* OPC_MDMX */
19565 /* MDMX: Not implemented. */
19569 check_insn(ctx
, ISA_MIPS32R6
);
19570 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19572 default: /* Invalid */
19573 MIPS_INVAL("major opcode");
19574 generate_exception_end(ctx
, EXCP_RI
);
19579 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19581 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19582 CPUState
*cs
= CPU(cpu
);
19584 target_ulong pc_start
;
19585 target_ulong next_page_start
;
19592 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19595 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19596 ctx
.insn_flags
= env
->insn_flags
;
19597 ctx
.CP0_Config1
= env
->CP0_Config1
;
19599 ctx
.bstate
= BS_NONE
;
19601 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19602 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19603 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19604 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19605 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19606 ctx
.PAMask
= env
->PAMask
;
19607 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19608 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19609 /* Restore delay slot state from the tb context. */
19610 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19611 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19612 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19613 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19614 restore_cpu_state(env
, &ctx
);
19615 #ifdef CONFIG_USER_ONLY
19616 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19618 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19620 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19621 MO_UNALN
: MO_ALIGN
;
19623 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19624 if (max_insns
== 0) {
19625 max_insns
= CF_COUNT_MASK
;
19627 if (max_insns
> TCG_MAX_INSNS
) {
19628 max_insns
= TCG_MAX_INSNS
;
19631 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19633 while (ctx
.bstate
== BS_NONE
) {
19634 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19637 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19638 save_cpu_state(&ctx
, 1);
19639 ctx
.bstate
= BS_BRANCH
;
19640 gen_helper_raise_exception_debug(cpu_env
);
19641 /* The address covered by the breakpoint must be included in
19642 [tb->pc, tb->pc + tb->size) in order to for it to be
19643 properly cleared -- thus we increment the PC here so that
19644 the logic setting tb->size below does the right thing. */
19646 goto done_generating
;
19649 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19653 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19654 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19655 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19657 decode_opc(env
, &ctx
);
19658 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19659 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19660 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19661 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19662 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19663 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19665 generate_exception_end(&ctx
, EXCP_RI
);
19669 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19670 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19671 MIPS_HFLAG_FBNSLOT
))) {
19672 /* force to generate branch as there is neither delay nor
19676 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19677 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19678 /* Force to generate branch as microMIPS R6 doesn't restrict
19679 branches in the forbidden slot. */
19684 gen_branch(&ctx
, insn_bytes
);
19686 ctx
.pc
+= insn_bytes
;
19688 /* Execute a branch and its delay slot as a single instruction.
19689 This is what GDB expects and is consistent with what the
19690 hardware does (e.g. if a delay slot instruction faults, the
19691 reported PC is the PC of the branch). */
19692 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19696 if (ctx
.pc
>= next_page_start
) {
19700 if (tcg_op_buf_full()) {
19704 if (num_insns
>= max_insns
)
19710 if (tb
->cflags
& CF_LAST_IO
) {
19713 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19714 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19715 gen_helper_raise_exception_debug(cpu_env
);
19717 switch (ctx
.bstate
) {
19719 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19722 save_cpu_state(&ctx
, 0);
19723 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19726 tcg_gen_exit_tb(0);
19734 gen_tb_end(tb
, num_insns
);
19736 tb
->size
= ctx
.pc
- pc_start
;
19737 tb
->icount
= num_insns
;
19741 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19742 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19743 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19749 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19753 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19755 #define printfpr(fp) \
19758 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19759 " fd:%13g fs:%13g psu: %13g\n", \
19760 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19761 (double)(fp)->fd, \
19762 (double)(fp)->fs[FP_ENDIAN_IDX], \
19763 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19766 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19767 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19768 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19769 " fd:%13g fs:%13g psu:%13g\n", \
19770 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19772 (double)tmp.fs[FP_ENDIAN_IDX], \
19773 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19778 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19779 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19780 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19781 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19782 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19783 printfpr(&env
->active_fpu
.fpr
[i
]);
19789 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19792 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19793 CPUMIPSState
*env
= &cpu
->env
;
19796 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19797 " LO=0x" TARGET_FMT_lx
" ds %04x "
19798 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19799 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19800 env
->hflags
, env
->btarget
, env
->bcond
);
19801 for (i
= 0; i
< 32; i
++) {
19803 cpu_fprintf(f
, "GPR%02d:", i
);
19804 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19806 cpu_fprintf(f
, "\n");
19809 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19810 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19811 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19813 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19814 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19815 env
->CP0_Config2
, env
->CP0_Config3
);
19816 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19817 env
->CP0_Config4
, env
->CP0_Config5
);
19818 if (env
->hflags
& MIPS_HFLAG_FPU
)
19819 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19822 void mips_tcg_init(void)
19827 /* Initialize various static tables. */
19831 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19833 TCGV_UNUSED(cpu_gpr
[0]);
19834 for (i
= 1; i
< 32; i
++)
19835 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
19836 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19839 for (i
= 0; i
< 32; i
++) {
19840 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19842 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
19843 /* The scalar floating-point unit (FPU) registers are mapped on
19844 * the MSA vector registers. */
19845 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19846 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19847 msa_wr_d
[i
* 2 + 1] =
19848 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
19851 cpu_PC
= tcg_global_mem_new(cpu_env
,
19852 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19853 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19854 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
19855 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19857 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
19858 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19861 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
19862 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19864 bcond
= tcg_global_mem_new(cpu_env
,
19865 offsetof(CPUMIPSState
, bcond
), "bcond");
19866 btarget
= tcg_global_mem_new(cpu_env
,
19867 offsetof(CPUMIPSState
, btarget
), "btarget");
19868 hflags
= tcg_global_mem_new_i32(cpu_env
,
19869 offsetof(CPUMIPSState
, hflags
), "hflags");
19871 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
19872 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19874 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
19875 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19881 #include "translate_init.c"
19883 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19887 const mips_def_t
*def
;
19889 def
= cpu_mips_find_by_name(cpu_model
);
19892 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19894 env
->cpu_model
= def
;
19896 #ifndef CONFIG_USER_ONLY
19897 mmu_init(env
, def
);
19899 fpu_init(env
, def
);
19900 mvp_init(env
, def
);
19902 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19907 void cpu_state_reset(CPUMIPSState
*env
)
19909 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19910 CPUState
*cs
= CPU(cpu
);
19912 /* Reset registers to their default values */
19913 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19914 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19915 #ifdef TARGET_WORDS_BIGENDIAN
19916 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19918 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19919 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19920 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19921 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19922 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19923 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19924 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19925 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19926 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19927 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19928 << env
->cpu_model
->CP0_LLAddr_shift
;
19929 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19930 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19931 env
->CCRes
= env
->cpu_model
->CCRes
;
19932 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19933 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19934 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19935 env
->current_tc
= 0;
19936 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19937 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19938 #if defined(TARGET_MIPS64)
19939 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19940 env
->SEGMask
|= 3ULL << 62;
19943 env
->PABITS
= env
->cpu_model
->PABITS
;
19944 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19945 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19946 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19947 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19948 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19949 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19950 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19951 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19952 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19953 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19954 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19955 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19956 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19957 env
->msair
= env
->cpu_model
->MSAIR
;
19958 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19960 #if defined(CONFIG_USER_ONLY)
19961 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19962 # ifdef TARGET_MIPS64
19963 /* Enable 64-bit register mode. */
19964 env
->CP0_Status
|= (1 << CP0St_PX
);
19966 # ifdef TARGET_ABI_MIPSN64
19967 /* Enable 64-bit address mode. */
19968 env
->CP0_Status
|= (1 << CP0St_UX
);
19970 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19971 hardware registers. */
19972 env
->CP0_HWREna
|= 0x0000000F;
19973 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19974 env
->CP0_Status
|= (1 << CP0St_CU1
);
19976 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19977 env
->CP0_Status
|= (1 << CP0St_MX
);
19979 # if defined(TARGET_MIPS64)
19980 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19981 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19982 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19983 env
->CP0_Status
|= (1 << CP0St_FR
);
19987 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19988 /* If the exception was raised from a delay slot,
19989 come back to the jump. */
19990 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
19991 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
19993 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
19995 env
->active_tc
.PC
= (int32_t)0xBFC00000;
19996 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
19997 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
19998 env
->CP0_Wired
= 0;
19999 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20000 if (kvm_enabled()) {
20001 env
->CP0_EBase
|= 0x40000000;
20003 env
->CP0_EBase
|= 0x80000000;
20005 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20006 /* vectored interrupts not implemented, timer on int 7,
20007 no performance counters. */
20008 env
->CP0_IntCtl
= 0xe0000000;
20012 for (i
= 0; i
< 7; i
++) {
20013 env
->CP0_WatchLo
[i
] = 0;
20014 env
->CP0_WatchHi
[i
] = 0x80000000;
20016 env
->CP0_WatchLo
[7] = 0;
20017 env
->CP0_WatchHi
[7] = 0;
20019 /* Count register increments in debug mode, EJTAG version 1 */
20020 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20022 cpu_mips_store_count(env
, 1);
20024 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20027 /* Only TC0 on VPE 0 starts as active. */
20028 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20029 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20030 env
->tcs
[i
].CP0_TCHalt
= 1;
20032 env
->active_tc
.CP0_TCHalt
= 1;
20035 if (cs
->cpu_index
== 0) {
20036 /* VPE0 starts up enabled. */
20037 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20038 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20040 /* TC0 starts up unhalted. */
20042 env
->active_tc
.CP0_TCHalt
= 0;
20043 env
->tcs
[0].CP0_TCHalt
= 0;
20044 /* With thread 0 active. */
20045 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20046 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20050 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20051 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20052 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20053 env
->CP0_Status
|= (1 << CP0St_FR
);
20057 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20061 compute_hflags(env
);
20062 restore_rounding_mode(env
);
20063 restore_flush_mode(env
);
20064 restore_pamask(env
);
20065 cs
->exception_index
= EXCP_NONE
;
20067 if (semihosting_get_argc()) {
20068 /* UHI interface can be used to obtain argc and argv */
20069 env
->active_tc
.gpr
[4] = -1;
20073 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20074 target_ulong
*data
)
20076 env
->active_tc
.PC
= data
[0];
20077 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20078 env
->hflags
|= data
[1];
20079 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20080 case MIPS_HFLAG_BR
:
20082 case MIPS_HFLAG_BC
:
20083 case MIPS_HFLAG_BL
:
20085 env
->btarget
= data
[2];