2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
898 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
901 /* Coprocessor 0 (with rs == C0) */
902 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
905 OPC_TLBR
= 0x01 | OPC_C0
,
906 OPC_TLBWI
= 0x02 | OPC_C0
,
907 OPC_TLBINV
= 0x03 | OPC_C0
,
908 OPC_TLBINVF
= 0x04 | OPC_C0
,
909 OPC_TLBWR
= 0x06 | OPC_C0
,
910 OPC_TLBP
= 0x08 | OPC_C0
,
911 OPC_RFE
= 0x10 | OPC_C0
,
912 OPC_ERET
= 0x18 | OPC_C0
,
913 OPC_DERET
= 0x1F | OPC_C0
,
914 OPC_WAIT
= 0x20 | OPC_C0
,
917 /* Coprocessor 1 (rs field) */
918 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
920 /* Values for the fmt field in FP instructions */
922 /* 0 - 15 are reserved */
923 FMT_S
= 16, /* single fp */
924 FMT_D
= 17, /* double fp */
925 FMT_E
= 18, /* extended fp */
926 FMT_Q
= 19, /* quad fp */
927 FMT_W
= 20, /* 32-bit fixed */
928 FMT_L
= 21, /* 64-bit fixed */
929 FMT_PS
= 22, /* paired single fp */
930 /* 23 - 31 are reserved */
934 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
935 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
936 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
937 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
938 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
939 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
940 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
941 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
942 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
943 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
944 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
945 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
946 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
947 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
948 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
949 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
950 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
951 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
952 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
953 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
954 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
955 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
956 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
957 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
958 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
959 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
960 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
961 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
962 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
963 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
966 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
967 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
970 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
971 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
972 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
973 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
977 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
978 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
982 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
983 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
986 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1127 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1128 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1129 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1130 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1131 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1132 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1133 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1134 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1135 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1136 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1137 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1138 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1139 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1140 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1141 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1142 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1143 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1144 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1145 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1146 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1147 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1149 /* MI10 instruction */
1150 OPC_LD_B
= (0x20) | OPC_MSA
,
1151 OPC_LD_H
= (0x21) | OPC_MSA
,
1152 OPC_LD_W
= (0x22) | OPC_MSA
,
1153 OPC_LD_D
= (0x23) | OPC_MSA
,
1154 OPC_ST_B
= (0x24) | OPC_MSA
,
1155 OPC_ST_H
= (0x25) | OPC_MSA
,
1156 OPC_ST_W
= (0x26) | OPC_MSA
,
1157 OPC_ST_D
= (0x27) | OPC_MSA
,
1161 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1162 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1163 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1164 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1165 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1167 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1173 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1175 /* I8 instruction */
1176 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1179 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1180 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1182 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1183 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1184 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1185 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1187 /* VEC/2R/2RF instruction */
1188 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1189 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1190 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1191 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1192 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1193 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1194 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1196 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1199 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1200 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1201 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1202 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1203 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1205 /* 2RF instruction df(bit 16) = _w, _d */
1206 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1207 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1209 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1210 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1211 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1212 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1213 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1214 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1216 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1217 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1218 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1220 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1223 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1224 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1225 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1226 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1227 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1228 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1229 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1230 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1231 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1232 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1233 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1234 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1235 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1236 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1237 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1238 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1239 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1240 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1241 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1242 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1243 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1244 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1245 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1246 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1247 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1248 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1249 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1250 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1251 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1252 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1253 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1254 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1255 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1256 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1257 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1258 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1259 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1260 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1261 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1262 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1263 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1264 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1265 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1266 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1267 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1268 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1269 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1270 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1271 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1272 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1273 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1274 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1275 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1276 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1277 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1278 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1279 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1280 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1281 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1282 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1283 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1284 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1285 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1286 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1288 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1289 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 /* 3RF instruction _df(bit 21) = _w, _d */
1300 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1342 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1343 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1357 /* global register indices */
1358 static TCGv_env cpu_env
;
1359 static TCGv cpu_gpr
[32], cpu_PC
;
1360 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1361 static TCGv cpu_dspctrl
, btarget
, bcond
;
1362 static TCGv_i32 hflags
;
1363 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1364 static TCGv_i64 fpu_f64
[32];
1365 static TCGv_i64 msa_wr_d
[64];
1367 #include "exec/gen-icount.h"
1369 #define gen_helper_0e0i(name, arg) do { \
1370 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1371 gen_helper_##name(cpu_env, helper_tmp); \
1372 tcg_temp_free_i32(helper_tmp); \
1375 #define gen_helper_0e1i(name, arg1, arg2) do { \
1376 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1377 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1378 tcg_temp_free_i32(helper_tmp); \
1381 #define gen_helper_1e0i(name, ret, arg1) do { \
1382 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1383 gen_helper_##name(ret, cpu_env, helper_tmp); \
1384 tcg_temp_free_i32(helper_tmp); \
1387 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1388 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1389 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1390 tcg_temp_free_i32(helper_tmp); \
1393 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1394 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1395 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1396 tcg_temp_free_i32(helper_tmp); \
1399 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1400 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1401 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1402 tcg_temp_free_i32(helper_tmp); \
1405 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1406 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1407 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1408 tcg_temp_free_i32(helper_tmp); \
1411 typedef struct DisasContext
{
1412 struct TranslationBlock
*tb
;
1413 target_ulong pc
, saved_pc
;
1415 int singlestep_enabled
;
1417 int32_t CP0_Config1
;
1418 /* Routine used to access memory */
1420 TCGMemOp default_tcg_memop_mask
;
1421 uint32_t hflags
, saved_hflags
;
1423 target_ulong btarget
;
1432 int CP0_LLAddr_shift
;
1440 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1441 * exception condition */
1442 BS_STOP
= 1, /* We want to stop translation for any reason */
1443 BS_BRANCH
= 2, /* We reached a branch condition */
1444 BS_EXCP
= 3, /* We reached an exception condition */
1447 static const char * const regnames
[] = {
1448 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1449 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1450 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1451 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1454 static const char * const regnames_HI
[] = {
1455 "HI0", "HI1", "HI2", "HI3",
1458 static const char * const regnames_LO
[] = {
1459 "LO0", "LO1", "LO2", "LO3",
1462 static const char * const fregnames
[] = {
1463 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1464 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1465 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1466 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1469 static const char * const msaregnames
[] = {
1470 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1471 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1472 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1473 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1474 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1475 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1476 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1477 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1478 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1479 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1480 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1481 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1482 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1483 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1484 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1485 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1488 #define LOG_DISAS(...) \
1490 if (MIPS_DEBUG_DISAS) { \
1491 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1495 #define MIPS_INVAL(op) \
1497 if (MIPS_DEBUG_DISAS) { \
1498 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1499 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1500 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1501 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1505 /* General purpose registers moves. */
1506 static inline void gen_load_gpr (TCGv t
, int reg
)
1509 tcg_gen_movi_tl(t
, 0);
1511 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1514 static inline void gen_store_gpr (TCGv t
, int reg
)
1517 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1520 /* Moves to/from shadow registers. */
1521 static inline void gen_load_srsgpr (int from
, int to
)
1523 TCGv t0
= tcg_temp_new();
1526 tcg_gen_movi_tl(t0
, 0);
1528 TCGv_i32 t2
= tcg_temp_new_i32();
1529 TCGv_ptr addr
= tcg_temp_new_ptr();
1531 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1532 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1533 tcg_gen_andi_i32(t2
, t2
, 0xf);
1534 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1535 tcg_gen_ext_i32_ptr(addr
, t2
);
1536 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1538 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1539 tcg_temp_free_ptr(addr
);
1540 tcg_temp_free_i32(t2
);
1542 gen_store_gpr(t0
, to
);
1546 static inline void gen_store_srsgpr (int from
, int to
)
1549 TCGv t0
= tcg_temp_new();
1550 TCGv_i32 t2
= tcg_temp_new_i32();
1551 TCGv_ptr addr
= tcg_temp_new_ptr();
1553 gen_load_gpr(t0
, from
);
1554 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1555 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1556 tcg_gen_andi_i32(t2
, t2
, 0xf);
1557 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1558 tcg_gen_ext_i32_ptr(addr
, t2
);
1559 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1561 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1562 tcg_temp_free_ptr(addr
);
1563 tcg_temp_free_i32(t2
);
1569 static inline void gen_save_pc(target_ulong pc
)
1571 tcg_gen_movi_tl(cpu_PC
, pc
);
1574 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1576 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1577 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1578 gen_save_pc(ctx
->pc
);
1579 ctx
->saved_pc
= ctx
->pc
;
1581 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1582 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1583 ctx
->saved_hflags
= ctx
->hflags
;
1584 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1590 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1596 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1598 ctx
->saved_hflags
= ctx
->hflags
;
1599 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1605 ctx
->btarget
= env
->btarget
;
1610 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1612 TCGv_i32 texcp
= tcg_const_i32(excp
);
1613 TCGv_i32 terr
= tcg_const_i32(err
);
1614 save_cpu_state(ctx
, 1);
1615 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1616 tcg_temp_free_i32(terr
);
1617 tcg_temp_free_i32(texcp
);
1618 ctx
->bstate
= BS_EXCP
;
1621 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1623 gen_helper_0e0i(raise_exception
, excp
);
1626 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1628 generate_exception_err(ctx
, excp
, 0);
1631 /* Floating point register moves. */
1632 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1634 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1635 generate_exception(ctx
, EXCP_RI
);
1637 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1640 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1643 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1644 generate_exception(ctx
, EXCP_RI
);
1646 t64
= tcg_temp_new_i64();
1647 tcg_gen_extu_i32_i64(t64
, t
);
1648 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1649 tcg_temp_free_i64(t64
);
1652 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1654 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1655 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1657 gen_load_fpr32(ctx
, t
, reg
| 1);
1661 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1663 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1664 TCGv_i64 t64
= tcg_temp_new_i64();
1665 tcg_gen_extu_i32_i64(t64
, t
);
1666 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1667 tcg_temp_free_i64(t64
);
1669 gen_store_fpr32(ctx
, t
, reg
| 1);
1673 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1675 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1676 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1678 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1682 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1684 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1685 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1688 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1689 t0
= tcg_temp_new_i64();
1690 tcg_gen_shri_i64(t0
, t
, 32);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1692 tcg_temp_free_i64(t0
);
1696 static inline int get_fp_bit (int cc
)
1704 /* Addresses computation */
1705 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1707 tcg_gen_add_tl(ret
, arg0
, arg1
);
1709 #if defined(TARGET_MIPS64)
1710 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1711 tcg_gen_ext32s_i64(ret
, ret
);
1716 /* Addresses computation (translation time) */
1717 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1720 target_long sum
= base
+ offset
;
1722 #if defined(TARGET_MIPS64)
1723 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1730 /* Sign-extract the low 32-bits to a target_long. */
1731 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1733 #if defined(TARGET_MIPS64)
1734 tcg_gen_ext32s_i64(ret
, arg
);
1736 tcg_gen_extrl_i64_i32(ret
, arg
);
1740 /* Sign-extract the high 32-bits to a target_long. */
1741 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1743 #if defined(TARGET_MIPS64)
1744 tcg_gen_sari_i64(ret
, arg
, 32);
1746 tcg_gen_extrh_i64_i32(ret
, arg
);
1750 static inline void check_cp0_enabled(DisasContext
*ctx
)
1752 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1753 generate_exception_err(ctx
, EXCP_CpU
, 0);
1756 static inline void check_cp1_enabled(DisasContext
*ctx
)
1758 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1759 generate_exception_err(ctx
, EXCP_CpU
, 1);
1762 /* Verify that the processor is running with COP1X instructions enabled.
1763 This is associated with the nabla symbol in the MIPS32 and MIPS64
1766 static inline void check_cop1x(DisasContext
*ctx
)
1768 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1769 generate_exception_end(ctx
, EXCP_RI
);
1772 /* Verify that the processor is running with 64-bit floating-point
1773 operations enabled. */
1775 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1777 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1778 generate_exception_end(ctx
, EXCP_RI
);
1782 * Verify if floating point register is valid; an operation is not defined
1783 * if bit 0 of any register specification is set and the FR bit in the
1784 * Status register equals zero, since the register numbers specify an
1785 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1786 * in the Status register equals one, both even and odd register numbers
1787 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1789 * Multiple 64 bit wide registers can be checked by calling
1790 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1792 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1794 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1795 generate_exception_end(ctx
, EXCP_RI
);
1798 /* Verify that the processor is running with DSP instructions enabled.
1799 This is enabled by CP0 Status register MX(24) bit.
1802 static inline void check_dsp(DisasContext
*ctx
)
1804 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1805 if (ctx
->insn_flags
& ASE_DSP
) {
1806 generate_exception_end(ctx
, EXCP_DSPDIS
);
1808 generate_exception_end(ctx
, EXCP_RI
);
1813 static inline void check_dspr2(DisasContext
*ctx
)
1815 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1816 if (ctx
->insn_flags
& ASE_DSP
) {
1817 generate_exception_end(ctx
, EXCP_DSPDIS
);
1819 generate_exception_end(ctx
, EXCP_RI
);
1824 /* This code generates a "reserved instruction" exception if the
1825 CPU does not support the instruction set corresponding to flags. */
1826 static inline void check_insn(DisasContext
*ctx
, int flags
)
1828 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1829 generate_exception_end(ctx
, EXCP_RI
);
1833 /* This code generates a "reserved instruction" exception if the
1834 CPU has corresponding flag set which indicates that the instruction
1835 has been removed. */
1836 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1838 if (unlikely(ctx
->insn_flags
& flags
)) {
1839 generate_exception_end(ctx
, EXCP_RI
);
1843 /* This code generates a "reserved instruction" exception if the
1844 CPU does not support 64-bit paired-single (PS) floating point data type */
1845 static inline void check_ps(DisasContext
*ctx
)
1847 if (unlikely(!ctx
->ps
)) {
1848 generate_exception(ctx
, EXCP_RI
);
1850 check_cp1_64bitmode(ctx
);
1853 #ifdef TARGET_MIPS64
1854 /* This code generates a "reserved instruction" exception if 64-bit
1855 instructions are not enabled. */
1856 static inline void check_mips_64(DisasContext
*ctx
)
1858 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1859 generate_exception_end(ctx
, EXCP_RI
);
1863 #ifndef CONFIG_USER_ONLY
1864 static inline void check_mvh(DisasContext
*ctx
)
1866 if (unlikely(!ctx
->mvh
)) {
1867 generate_exception(ctx
, EXCP_RI
);
1872 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1873 calling interface for 32 and 64-bit FPRs. No sense in changing
1874 all callers for gen_load_fpr32 when we need the CTX parameter for
1876 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1877 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1878 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1879 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1880 int ft, int fs, int cc) \
1882 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1883 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1892 check_cp1_registers(ctx, fs | ft); \
1900 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1901 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1903 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1904 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1905 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1906 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1907 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1908 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1909 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1910 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1911 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1912 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1913 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1914 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1915 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1916 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1917 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1918 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1921 tcg_temp_free_i##bits (fp0); \
1922 tcg_temp_free_i##bits (fp1); \
1925 FOP_CONDS(, 0, d
, FMT_D
, 64)
1926 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1927 FOP_CONDS(, 0, s
, FMT_S
, 32)
1928 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1929 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1930 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1933 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1934 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1935 int ft, int fs, int fd) \
1937 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1938 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1939 if (ifmt == FMT_D) { \
1940 check_cp1_registers(ctx, fs | ft | fd); \
1942 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1943 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1946 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1949 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2015 tcg_temp_free_i ## bits (fp0); \
2016 tcg_temp_free_i ## bits (fp1); \
2019 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2020 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2022 #undef gen_ldcmp_fpr32
2023 #undef gen_ldcmp_fpr64
2025 /* load/store instructions. */
2026 #ifdef CONFIG_USER_ONLY
2027 #define OP_LD_ATOMIC(insn,fname) \
2028 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2030 TCGv t0 = tcg_temp_new(); \
2031 tcg_gen_mov_tl(t0, arg1); \
2032 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2033 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2034 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2035 tcg_temp_free(t0); \
2038 #define OP_LD_ATOMIC(insn,fname) \
2039 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2041 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2044 OP_LD_ATOMIC(ll
,ld32s
);
2045 #if defined(TARGET_MIPS64)
2046 OP_LD_ATOMIC(lld
,ld64
);
2050 #ifdef CONFIG_USER_ONLY
2051 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2052 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2054 TCGv t0 = tcg_temp_new(); \
2055 TCGLabel *l1 = gen_new_label(); \
2056 TCGLabel *l2 = gen_new_label(); \
2058 tcg_gen_andi_tl(t0, arg2, almask); \
2059 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2060 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2061 generate_exception(ctx, EXCP_AdES); \
2062 gen_set_label(l1); \
2063 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2064 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2065 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2066 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2067 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2068 generate_exception_end(ctx, EXCP_SC); \
2069 gen_set_label(l2); \
2070 tcg_gen_movi_tl(t0, 0); \
2071 gen_store_gpr(t0, rt); \
2072 tcg_temp_free(t0); \
2075 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2076 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2078 TCGv t0 = tcg_temp_new(); \
2079 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2080 gen_store_gpr(t0, rt); \
2081 tcg_temp_free(t0); \
2084 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2085 #if defined(TARGET_MIPS64)
2086 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2090 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2091 int base
, int16_t offset
)
2094 tcg_gen_movi_tl(addr
, offset
);
2095 } else if (offset
== 0) {
2096 gen_load_gpr(addr
, base
);
2098 tcg_gen_movi_tl(addr
, offset
);
2099 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2103 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2105 target_ulong pc
= ctx
->pc
;
2107 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2108 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2113 pc
&= ~(target_ulong
)3;
2118 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2119 int rt
, int base
, int16_t offset
)
2123 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2124 /* Loongson CPU uses a load to zero register for prefetch.
2125 We emulate it as a NOP. On other CPU we must perform the
2126 actual memory access. */
2130 t0
= tcg_temp_new();
2131 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2134 #if defined(TARGET_MIPS64)
2136 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2137 ctx
->default_tcg_memop_mask
);
2138 gen_store_gpr(t0
, rt
);
2141 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2142 ctx
->default_tcg_memop_mask
);
2143 gen_store_gpr(t0
, rt
);
2147 op_ld_lld(t0
, t0
, ctx
);
2148 gen_store_gpr(t0
, rt
);
2151 t1
= tcg_temp_new();
2152 /* Do a byte access to possibly trigger a page
2153 fault with the unaligned address. */
2154 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2155 tcg_gen_andi_tl(t1
, t0
, 7);
2156 #ifndef TARGET_WORDS_BIGENDIAN
2157 tcg_gen_xori_tl(t1
, t1
, 7);
2159 tcg_gen_shli_tl(t1
, t1
, 3);
2160 tcg_gen_andi_tl(t0
, t0
, ~7);
2161 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2162 tcg_gen_shl_tl(t0
, t0
, t1
);
2163 t2
= tcg_const_tl(-1);
2164 tcg_gen_shl_tl(t2
, t2
, t1
);
2165 gen_load_gpr(t1
, rt
);
2166 tcg_gen_andc_tl(t1
, t1
, t2
);
2168 tcg_gen_or_tl(t0
, t0
, t1
);
2170 gen_store_gpr(t0
, rt
);
2173 t1
= tcg_temp_new();
2174 /* Do a byte access to possibly trigger a page
2175 fault with the unaligned address. */
2176 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2177 tcg_gen_andi_tl(t1
, t0
, 7);
2178 #ifdef TARGET_WORDS_BIGENDIAN
2179 tcg_gen_xori_tl(t1
, t1
, 7);
2181 tcg_gen_shli_tl(t1
, t1
, 3);
2182 tcg_gen_andi_tl(t0
, t0
, ~7);
2183 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2184 tcg_gen_shr_tl(t0
, t0
, t1
);
2185 tcg_gen_xori_tl(t1
, t1
, 63);
2186 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2187 tcg_gen_shl_tl(t2
, t2
, t1
);
2188 gen_load_gpr(t1
, rt
);
2189 tcg_gen_and_tl(t1
, t1
, t2
);
2191 tcg_gen_or_tl(t0
, t0
, t1
);
2193 gen_store_gpr(t0
, rt
);
2196 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2197 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2200 gen_store_gpr(t0
, rt
);
2204 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2205 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2207 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2208 gen_store_gpr(t0
, rt
);
2211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2212 ctx
->default_tcg_memop_mask
);
2213 gen_store_gpr(t0
, rt
);
2216 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2217 ctx
->default_tcg_memop_mask
);
2218 gen_store_gpr(t0
, rt
);
2221 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2222 ctx
->default_tcg_memop_mask
);
2223 gen_store_gpr(t0
, rt
);
2226 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2227 gen_store_gpr(t0
, rt
);
2230 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2231 gen_store_gpr(t0
, rt
);
2234 t1
= tcg_temp_new();
2235 /* Do a byte access to possibly trigger a page
2236 fault with the unaligned address. */
2237 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2238 tcg_gen_andi_tl(t1
, t0
, 3);
2239 #ifndef TARGET_WORDS_BIGENDIAN
2240 tcg_gen_xori_tl(t1
, t1
, 3);
2242 tcg_gen_shli_tl(t1
, t1
, 3);
2243 tcg_gen_andi_tl(t0
, t0
, ~3);
2244 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2245 tcg_gen_shl_tl(t0
, t0
, t1
);
2246 t2
= tcg_const_tl(-1);
2247 tcg_gen_shl_tl(t2
, t2
, t1
);
2248 gen_load_gpr(t1
, rt
);
2249 tcg_gen_andc_tl(t1
, t1
, t2
);
2251 tcg_gen_or_tl(t0
, t0
, t1
);
2253 tcg_gen_ext32s_tl(t0
, t0
);
2254 gen_store_gpr(t0
, rt
);
2257 t1
= tcg_temp_new();
2258 /* Do a byte access to possibly trigger a page
2259 fault with the unaligned address. */
2260 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2261 tcg_gen_andi_tl(t1
, t0
, 3);
2262 #ifdef TARGET_WORDS_BIGENDIAN
2263 tcg_gen_xori_tl(t1
, t1
, 3);
2265 tcg_gen_shli_tl(t1
, t1
, 3);
2266 tcg_gen_andi_tl(t0
, t0
, ~3);
2267 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2268 tcg_gen_shr_tl(t0
, t0
, t1
);
2269 tcg_gen_xori_tl(t1
, t1
, 31);
2270 t2
= tcg_const_tl(0xfffffffeull
);
2271 tcg_gen_shl_tl(t2
, t2
, t1
);
2272 gen_load_gpr(t1
, rt
);
2273 tcg_gen_and_tl(t1
, t1
, t2
);
2275 tcg_gen_or_tl(t0
, t0
, t1
);
2277 tcg_gen_ext32s_tl(t0
, t0
);
2278 gen_store_gpr(t0
, rt
);
2282 op_ld_ll(t0
, t0
, ctx
);
2283 gen_store_gpr(t0
, rt
);
2290 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2291 int base
, int16_t offset
)
2293 TCGv t0
= tcg_temp_new();
2294 TCGv t1
= tcg_temp_new();
2296 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2297 gen_load_gpr(t1
, rt
);
2299 #if defined(TARGET_MIPS64)
2301 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2302 ctx
->default_tcg_memop_mask
);
2305 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2308 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2312 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2313 ctx
->default_tcg_memop_mask
);
2316 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2317 ctx
->default_tcg_memop_mask
);
2320 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2323 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2326 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2334 /* Store conditional */
2335 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2336 int base
, int16_t offset
)
2340 #ifdef CONFIG_USER_ONLY
2341 t0
= tcg_temp_local_new();
2342 t1
= tcg_temp_local_new();
2344 t0
= tcg_temp_new();
2345 t1
= tcg_temp_new();
2347 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2348 gen_load_gpr(t1
, rt
);
2350 #if defined(TARGET_MIPS64)
2353 op_st_scd(t1
, t0
, rt
, ctx
);
2358 op_st_sc(t1
, t0
, rt
, ctx
);
2365 /* Load and store */
2366 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2367 int base
, int16_t offset
)
2369 TCGv t0
= tcg_temp_new();
2371 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2372 /* Don't do NOP if destination is zero: we must perform the actual
2377 TCGv_i32 fp0
= tcg_temp_new_i32();
2378 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2379 ctx
->default_tcg_memop_mask
);
2380 gen_store_fpr32(ctx
, fp0
, ft
);
2381 tcg_temp_free_i32(fp0
);
2386 TCGv_i32 fp0
= tcg_temp_new_i32();
2387 gen_load_fpr32(ctx
, fp0
, ft
);
2388 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2389 ctx
->default_tcg_memop_mask
);
2390 tcg_temp_free_i32(fp0
);
2395 TCGv_i64 fp0
= tcg_temp_new_i64();
2396 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2397 ctx
->default_tcg_memop_mask
);
2398 gen_store_fpr64(ctx
, fp0
, ft
);
2399 tcg_temp_free_i64(fp0
);
2404 TCGv_i64 fp0
= tcg_temp_new_i64();
2405 gen_load_fpr64(ctx
, fp0
, ft
);
2406 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2407 ctx
->default_tcg_memop_mask
);
2408 tcg_temp_free_i64(fp0
);
2412 MIPS_INVAL("flt_ldst");
2413 generate_exception_end(ctx
, EXCP_RI
);
2420 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2421 int rs
, int16_t imm
)
2423 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2424 check_cp1_enabled(ctx
);
2428 check_insn(ctx
, ISA_MIPS2
);
2431 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2434 generate_exception_err(ctx
, EXCP_CpU
, 1);
2438 /* Arithmetic with immediate operand */
2439 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2440 int rt
, int rs
, int16_t imm
)
2442 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2444 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2445 /* If no destination, treat it as a NOP.
2446 For addi, we must generate the overflow exception when needed. */
2452 TCGv t0
= tcg_temp_local_new();
2453 TCGv t1
= tcg_temp_new();
2454 TCGv t2
= tcg_temp_new();
2455 TCGLabel
*l1
= gen_new_label();
2457 gen_load_gpr(t1
, rs
);
2458 tcg_gen_addi_tl(t0
, t1
, uimm
);
2459 tcg_gen_ext32s_tl(t0
, t0
);
2461 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2462 tcg_gen_xori_tl(t2
, t0
, uimm
);
2463 tcg_gen_and_tl(t1
, t1
, t2
);
2465 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2467 /* operands of same sign, result different sign */
2468 generate_exception(ctx
, EXCP_OVERFLOW
);
2470 tcg_gen_ext32s_tl(t0
, t0
);
2471 gen_store_gpr(t0
, rt
);
2477 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2478 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2480 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2483 #if defined(TARGET_MIPS64)
2486 TCGv t0
= tcg_temp_local_new();
2487 TCGv t1
= tcg_temp_new();
2488 TCGv t2
= tcg_temp_new();
2489 TCGLabel
*l1
= gen_new_label();
2491 gen_load_gpr(t1
, rs
);
2492 tcg_gen_addi_tl(t0
, t1
, uimm
);
2494 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2495 tcg_gen_xori_tl(t2
, t0
, uimm
);
2496 tcg_gen_and_tl(t1
, t1
, t2
);
2498 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2500 /* operands of same sign, result different sign */
2501 generate_exception(ctx
, EXCP_OVERFLOW
);
2503 gen_store_gpr(t0
, rt
);
2509 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2511 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2518 /* Logic with immediate operand */
2519 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2520 int rt
, int rs
, int16_t imm
)
2525 /* If no destination, treat it as a NOP. */
2528 uimm
= (uint16_t)imm
;
2531 if (likely(rs
!= 0))
2532 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2534 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2538 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2540 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2543 if (likely(rs
!= 0))
2544 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2546 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2549 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2551 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2552 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2554 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2563 /* Set on less than with immediate operand */
2564 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2565 int rt
, int rs
, int16_t imm
)
2567 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2571 /* If no destination, treat it as a NOP. */
2574 t0
= tcg_temp_new();
2575 gen_load_gpr(t0
, rs
);
2578 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2581 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2587 /* Shifts with immediate operand */
2588 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2589 int rt
, int rs
, int16_t imm
)
2591 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2595 /* If no destination, treat it as a NOP. */
2599 t0
= tcg_temp_new();
2600 gen_load_gpr(t0
, rs
);
2603 tcg_gen_shli_tl(t0
, t0
, uimm
);
2604 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2607 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2611 tcg_gen_ext32u_tl(t0
, t0
);
2612 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2614 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2619 TCGv_i32 t1
= tcg_temp_new_i32();
2621 tcg_gen_trunc_tl_i32(t1
, t0
);
2622 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2623 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2624 tcg_temp_free_i32(t1
);
2626 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2629 #if defined(TARGET_MIPS64)
2631 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2634 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2643 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2647 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2650 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2653 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2664 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2665 int rd
, int rs
, int rt
)
2667 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2668 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2669 /* If no destination, treat it as a NOP.
2670 For add & sub, we must generate the overflow exception when needed. */
2677 TCGv t0
= tcg_temp_local_new();
2678 TCGv t1
= tcg_temp_new();
2679 TCGv t2
= tcg_temp_new();
2680 TCGLabel
*l1
= gen_new_label();
2682 gen_load_gpr(t1
, rs
);
2683 gen_load_gpr(t2
, rt
);
2684 tcg_gen_add_tl(t0
, t1
, t2
);
2685 tcg_gen_ext32s_tl(t0
, t0
);
2686 tcg_gen_xor_tl(t1
, t1
, t2
);
2687 tcg_gen_xor_tl(t2
, t0
, t2
);
2688 tcg_gen_andc_tl(t1
, t2
, t1
);
2690 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2692 /* operands of same sign, result different sign */
2693 generate_exception(ctx
, EXCP_OVERFLOW
);
2695 gen_store_gpr(t0
, rd
);
2700 if (rs
!= 0 && rt
!= 0) {
2701 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2702 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2703 } else if (rs
== 0 && rt
!= 0) {
2704 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2705 } else if (rs
!= 0 && rt
== 0) {
2706 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2708 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2713 TCGv t0
= tcg_temp_local_new();
2714 TCGv t1
= tcg_temp_new();
2715 TCGv t2
= tcg_temp_new();
2716 TCGLabel
*l1
= gen_new_label();
2718 gen_load_gpr(t1
, rs
);
2719 gen_load_gpr(t2
, rt
);
2720 tcg_gen_sub_tl(t0
, t1
, t2
);
2721 tcg_gen_ext32s_tl(t0
, t0
);
2722 tcg_gen_xor_tl(t2
, t1
, t2
);
2723 tcg_gen_xor_tl(t1
, t0
, t1
);
2724 tcg_gen_and_tl(t1
, t1
, t2
);
2726 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2728 /* operands of different sign, first operand and result different sign */
2729 generate_exception(ctx
, EXCP_OVERFLOW
);
2731 gen_store_gpr(t0
, rd
);
2736 if (rs
!= 0 && rt
!= 0) {
2737 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2738 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2739 } else if (rs
== 0 && rt
!= 0) {
2740 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2741 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2742 } else if (rs
!= 0 && rt
== 0) {
2743 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2745 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2748 #if defined(TARGET_MIPS64)
2751 TCGv t0
= tcg_temp_local_new();
2752 TCGv t1
= tcg_temp_new();
2753 TCGv t2
= tcg_temp_new();
2754 TCGLabel
*l1
= gen_new_label();
2756 gen_load_gpr(t1
, rs
);
2757 gen_load_gpr(t2
, rt
);
2758 tcg_gen_add_tl(t0
, t1
, t2
);
2759 tcg_gen_xor_tl(t1
, t1
, t2
);
2760 tcg_gen_xor_tl(t2
, t0
, t2
);
2761 tcg_gen_andc_tl(t1
, t2
, t1
);
2763 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2765 /* operands of same sign, result different sign */
2766 generate_exception(ctx
, EXCP_OVERFLOW
);
2768 gen_store_gpr(t0
, rd
);
2773 if (rs
!= 0 && rt
!= 0) {
2774 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2775 } else if (rs
== 0 && rt
!= 0) {
2776 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2777 } else if (rs
!= 0 && rt
== 0) {
2778 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2785 TCGv t0
= tcg_temp_local_new();
2786 TCGv t1
= tcg_temp_new();
2787 TCGv t2
= tcg_temp_new();
2788 TCGLabel
*l1
= gen_new_label();
2790 gen_load_gpr(t1
, rs
);
2791 gen_load_gpr(t2
, rt
);
2792 tcg_gen_sub_tl(t0
, t1
, t2
);
2793 tcg_gen_xor_tl(t2
, t1
, t2
);
2794 tcg_gen_xor_tl(t1
, t0
, t1
);
2795 tcg_gen_and_tl(t1
, t1
, t2
);
2797 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2799 /* operands of different sign, first operand and result different sign */
2800 generate_exception(ctx
, EXCP_OVERFLOW
);
2802 gen_store_gpr(t0
, rd
);
2807 if (rs
!= 0 && rt
!= 0) {
2808 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2809 } else if (rs
== 0 && rt
!= 0) {
2810 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2811 } else if (rs
!= 0 && rt
== 0) {
2812 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2814 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2819 if (likely(rs
!= 0 && rt
!= 0)) {
2820 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2821 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2823 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2829 /* Conditional move */
2830 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2831 int rd
, int rs
, int rt
)
2836 /* If no destination, treat it as a NOP. */
2840 t0
= tcg_temp_new();
2841 gen_load_gpr(t0
, rt
);
2842 t1
= tcg_const_tl(0);
2843 t2
= tcg_temp_new();
2844 gen_load_gpr(t2
, rs
);
2847 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2850 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2853 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2856 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2865 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2866 int rd
, int rs
, int rt
)
2869 /* If no destination, treat it as a NOP. */
2875 if (likely(rs
!= 0 && rt
!= 0)) {
2876 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2878 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2882 if (rs
!= 0 && rt
!= 0) {
2883 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2884 } else if (rs
== 0 && rt
!= 0) {
2885 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2886 } else if (rs
!= 0 && rt
== 0) {
2887 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2889 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2893 if (likely(rs
!= 0 && rt
!= 0)) {
2894 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2895 } else if (rs
== 0 && rt
!= 0) {
2896 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2897 } else if (rs
!= 0 && rt
== 0) {
2898 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2900 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2904 if (likely(rs
!= 0 && rt
!= 0)) {
2905 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2906 } else if (rs
== 0 && rt
!= 0) {
2907 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2908 } else if (rs
!= 0 && rt
== 0) {
2909 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2911 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2917 /* Set on lower than */
2918 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2919 int rd
, int rs
, int rt
)
2924 /* If no destination, treat it as a NOP. */
2928 t0
= tcg_temp_new();
2929 t1
= tcg_temp_new();
2930 gen_load_gpr(t0
, rs
);
2931 gen_load_gpr(t1
, rt
);
2934 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2937 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2945 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2946 int rd
, int rs
, int rt
)
2951 /* If no destination, treat it as a NOP.
2952 For add & sub, we must generate the overflow exception when needed. */
2956 t0
= tcg_temp_new();
2957 t1
= tcg_temp_new();
2958 gen_load_gpr(t0
, rs
);
2959 gen_load_gpr(t1
, rt
);
2962 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2963 tcg_gen_shl_tl(t0
, t1
, t0
);
2964 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2967 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2968 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2971 tcg_gen_ext32u_tl(t1
, t1
);
2972 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2973 tcg_gen_shr_tl(t0
, t1
, t0
);
2974 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2978 TCGv_i32 t2
= tcg_temp_new_i32();
2979 TCGv_i32 t3
= tcg_temp_new_i32();
2981 tcg_gen_trunc_tl_i32(t2
, t0
);
2982 tcg_gen_trunc_tl_i32(t3
, t1
);
2983 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2984 tcg_gen_rotr_i32(t2
, t3
, t2
);
2985 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2986 tcg_temp_free_i32(t2
);
2987 tcg_temp_free_i32(t3
);
2990 #if defined(TARGET_MIPS64)
2992 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2993 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3000 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3001 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3004 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3005 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3013 /* Arithmetic on HI/LO registers */
3014 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3016 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3027 #if defined(TARGET_MIPS64)
3029 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3033 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3037 #if defined(TARGET_MIPS64)
3039 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3043 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3048 #if defined(TARGET_MIPS64)
3050 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3054 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3057 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3062 #if defined(TARGET_MIPS64)
3064 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3068 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3071 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3077 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3080 TCGv t0
= tcg_const_tl(addr
);
3081 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3082 gen_store_gpr(t0
, reg
);
3086 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3092 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3095 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3096 addr
= addr_add(ctx
, pc
, offset
);
3097 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3101 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3102 addr
= addr_add(ctx
, pc
, offset
);
3103 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3105 #if defined(TARGET_MIPS64)
3108 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3109 addr
= addr_add(ctx
, pc
, offset
);
3110 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3114 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3117 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3118 addr
= addr_add(ctx
, pc
, offset
);
3119 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3124 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3125 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3126 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3129 #if defined(TARGET_MIPS64)
3130 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3131 case R6_OPC_LDPC
+ (1 << 16):
3132 case R6_OPC_LDPC
+ (2 << 16):
3133 case R6_OPC_LDPC
+ (3 << 16):
3135 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3136 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3137 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3141 MIPS_INVAL("OPC_PCREL");
3142 generate_exception_end(ctx
, EXCP_RI
);
3149 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3158 t0
= tcg_temp_new();
3159 t1
= tcg_temp_new();
3161 gen_load_gpr(t0
, rs
);
3162 gen_load_gpr(t1
, rt
);
3167 TCGv t2
= tcg_temp_new();
3168 TCGv t3
= tcg_temp_new();
3169 tcg_gen_ext32s_tl(t0
, t0
);
3170 tcg_gen_ext32s_tl(t1
, t1
);
3171 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3172 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3173 tcg_gen_and_tl(t2
, t2
, t3
);
3174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3175 tcg_gen_or_tl(t2
, t2
, t3
);
3176 tcg_gen_movi_tl(t3
, 0);
3177 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3178 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3179 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3186 TCGv t2
= tcg_temp_new();
3187 TCGv t3
= tcg_temp_new();
3188 tcg_gen_ext32s_tl(t0
, t0
);
3189 tcg_gen_ext32s_tl(t1
, t1
);
3190 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3191 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3192 tcg_gen_and_tl(t2
, t2
, t3
);
3193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3194 tcg_gen_or_tl(t2
, t2
, t3
);
3195 tcg_gen_movi_tl(t3
, 0);
3196 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3197 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3198 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3205 TCGv t2
= tcg_const_tl(0);
3206 TCGv t3
= tcg_const_tl(1);
3207 tcg_gen_ext32u_tl(t0
, t0
);
3208 tcg_gen_ext32u_tl(t1
, t1
);
3209 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3210 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3211 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3218 TCGv t2
= tcg_const_tl(0);
3219 TCGv t3
= tcg_const_tl(1);
3220 tcg_gen_ext32u_tl(t0
, t0
);
3221 tcg_gen_ext32u_tl(t1
, t1
);
3222 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3223 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3224 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3231 TCGv_i32 t2
= tcg_temp_new_i32();
3232 TCGv_i32 t3
= tcg_temp_new_i32();
3233 tcg_gen_trunc_tl_i32(t2
, t0
);
3234 tcg_gen_trunc_tl_i32(t3
, t1
);
3235 tcg_gen_mul_i32(t2
, t2
, t3
);
3236 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3237 tcg_temp_free_i32(t2
);
3238 tcg_temp_free_i32(t3
);
3243 TCGv_i32 t2
= tcg_temp_new_i32();
3244 TCGv_i32 t3
= tcg_temp_new_i32();
3245 tcg_gen_trunc_tl_i32(t2
, t0
);
3246 tcg_gen_trunc_tl_i32(t3
, t1
);
3247 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3248 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3249 tcg_temp_free_i32(t2
);
3250 tcg_temp_free_i32(t3
);
3255 TCGv_i32 t2
= tcg_temp_new_i32();
3256 TCGv_i32 t3
= tcg_temp_new_i32();
3257 tcg_gen_trunc_tl_i32(t2
, t0
);
3258 tcg_gen_trunc_tl_i32(t3
, t1
);
3259 tcg_gen_mul_i32(t2
, t2
, t3
);
3260 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3261 tcg_temp_free_i32(t2
);
3262 tcg_temp_free_i32(t3
);
3267 TCGv_i32 t2
= tcg_temp_new_i32();
3268 TCGv_i32 t3
= tcg_temp_new_i32();
3269 tcg_gen_trunc_tl_i32(t2
, t0
);
3270 tcg_gen_trunc_tl_i32(t3
, t1
);
3271 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3272 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3273 tcg_temp_free_i32(t2
);
3274 tcg_temp_free_i32(t3
);
3277 #if defined(TARGET_MIPS64)
3280 TCGv t2
= tcg_temp_new();
3281 TCGv t3
= tcg_temp_new();
3282 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3283 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3284 tcg_gen_and_tl(t2
, t2
, t3
);
3285 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3286 tcg_gen_or_tl(t2
, t2
, t3
);
3287 tcg_gen_movi_tl(t3
, 0);
3288 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3289 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3296 TCGv t2
= tcg_temp_new();
3297 TCGv t3
= tcg_temp_new();
3298 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3299 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3300 tcg_gen_and_tl(t2
, t2
, t3
);
3301 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3302 tcg_gen_or_tl(t2
, t2
, t3
);
3303 tcg_gen_movi_tl(t3
, 0);
3304 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3305 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3312 TCGv t2
= tcg_const_tl(0);
3313 TCGv t3
= tcg_const_tl(1);
3314 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3315 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3322 TCGv t2
= tcg_const_tl(0);
3323 TCGv t3
= tcg_const_tl(1);
3324 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3325 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3331 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3335 TCGv t2
= tcg_temp_new();
3336 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3341 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3345 TCGv t2
= tcg_temp_new();
3346 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3352 MIPS_INVAL("r6 mul/div");
3353 generate_exception_end(ctx
, EXCP_RI
);
3361 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3362 int acc
, int rs
, int rt
)
3366 t0
= tcg_temp_new();
3367 t1
= tcg_temp_new();
3369 gen_load_gpr(t0
, rs
);
3370 gen_load_gpr(t1
, rt
);
3379 TCGv t2
= tcg_temp_new();
3380 TCGv t3
= tcg_temp_new();
3381 tcg_gen_ext32s_tl(t0
, t0
);
3382 tcg_gen_ext32s_tl(t1
, t1
);
3383 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3384 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3385 tcg_gen_and_tl(t2
, t2
, t3
);
3386 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3387 tcg_gen_or_tl(t2
, t2
, t3
);
3388 tcg_gen_movi_tl(t3
, 0);
3389 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3390 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3391 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3392 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3393 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3400 TCGv t2
= tcg_const_tl(0);
3401 TCGv t3
= tcg_const_tl(1);
3402 tcg_gen_ext32u_tl(t0
, t0
);
3403 tcg_gen_ext32u_tl(t1
, t1
);
3404 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3405 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3406 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3407 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3408 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3415 TCGv_i32 t2
= tcg_temp_new_i32();
3416 TCGv_i32 t3
= tcg_temp_new_i32();
3417 tcg_gen_trunc_tl_i32(t2
, t0
);
3418 tcg_gen_trunc_tl_i32(t3
, t1
);
3419 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3420 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3421 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3422 tcg_temp_free_i32(t2
);
3423 tcg_temp_free_i32(t3
);
3428 TCGv_i32 t2
= tcg_temp_new_i32();
3429 TCGv_i32 t3
= tcg_temp_new_i32();
3430 tcg_gen_trunc_tl_i32(t2
, t0
);
3431 tcg_gen_trunc_tl_i32(t3
, t1
);
3432 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3433 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3434 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3435 tcg_temp_free_i32(t2
);
3436 tcg_temp_free_i32(t3
);
3439 #if defined(TARGET_MIPS64)
3442 TCGv t2
= tcg_temp_new();
3443 TCGv t3
= tcg_temp_new();
3444 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3445 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3446 tcg_gen_and_tl(t2
, t2
, t3
);
3447 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3448 tcg_gen_or_tl(t2
, t2
, t3
);
3449 tcg_gen_movi_tl(t3
, 0);
3450 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3451 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3452 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3459 TCGv t2
= tcg_const_tl(0);
3460 TCGv t3
= tcg_const_tl(1);
3461 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3462 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3463 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3469 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3472 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3477 TCGv_i64 t2
= tcg_temp_new_i64();
3478 TCGv_i64 t3
= tcg_temp_new_i64();
3480 tcg_gen_ext_tl_i64(t2
, t0
);
3481 tcg_gen_ext_tl_i64(t3
, t1
);
3482 tcg_gen_mul_i64(t2
, t2
, t3
);
3483 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3484 tcg_gen_add_i64(t2
, t2
, t3
);
3485 tcg_temp_free_i64(t3
);
3486 gen_move_low32(cpu_LO
[acc
], t2
);
3487 gen_move_high32(cpu_HI
[acc
], t2
);
3488 tcg_temp_free_i64(t2
);
3493 TCGv_i64 t2
= tcg_temp_new_i64();
3494 TCGv_i64 t3
= tcg_temp_new_i64();
3496 tcg_gen_ext32u_tl(t0
, t0
);
3497 tcg_gen_ext32u_tl(t1
, t1
);
3498 tcg_gen_extu_tl_i64(t2
, t0
);
3499 tcg_gen_extu_tl_i64(t3
, t1
);
3500 tcg_gen_mul_i64(t2
, t2
, t3
);
3501 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3502 tcg_gen_add_i64(t2
, t2
, t3
);
3503 tcg_temp_free_i64(t3
);
3504 gen_move_low32(cpu_LO
[acc
], t2
);
3505 gen_move_high32(cpu_HI
[acc
], t2
);
3506 tcg_temp_free_i64(t2
);
3511 TCGv_i64 t2
= tcg_temp_new_i64();
3512 TCGv_i64 t3
= tcg_temp_new_i64();
3514 tcg_gen_ext_tl_i64(t2
, t0
);
3515 tcg_gen_ext_tl_i64(t3
, t1
);
3516 tcg_gen_mul_i64(t2
, t2
, t3
);
3517 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3518 tcg_gen_sub_i64(t2
, t3
, t2
);
3519 tcg_temp_free_i64(t3
);
3520 gen_move_low32(cpu_LO
[acc
], t2
);
3521 gen_move_high32(cpu_HI
[acc
], t2
);
3522 tcg_temp_free_i64(t2
);
3527 TCGv_i64 t2
= tcg_temp_new_i64();
3528 TCGv_i64 t3
= tcg_temp_new_i64();
3530 tcg_gen_ext32u_tl(t0
, t0
);
3531 tcg_gen_ext32u_tl(t1
, t1
);
3532 tcg_gen_extu_tl_i64(t2
, t0
);
3533 tcg_gen_extu_tl_i64(t3
, t1
);
3534 tcg_gen_mul_i64(t2
, t2
, t3
);
3535 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3536 tcg_gen_sub_i64(t2
, t3
, t2
);
3537 tcg_temp_free_i64(t3
);
3538 gen_move_low32(cpu_LO
[acc
], t2
);
3539 gen_move_high32(cpu_HI
[acc
], t2
);
3540 tcg_temp_free_i64(t2
);
3544 MIPS_INVAL("mul/div");
3545 generate_exception_end(ctx
, EXCP_RI
);
3553 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3554 int rd
, int rs
, int rt
)
3556 TCGv t0
= tcg_temp_new();
3557 TCGv t1
= tcg_temp_new();
3559 gen_load_gpr(t0
, rs
);
3560 gen_load_gpr(t1
, rt
);
3563 case OPC_VR54XX_MULS
:
3564 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3566 case OPC_VR54XX_MULSU
:
3567 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MACC
:
3570 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MACCU
:
3573 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MSAC
:
3576 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MSACU
:
3579 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MULHI
:
3582 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULHIU
:
3585 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MULSHI
:
3588 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MULSHIU
:
3591 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MACCHI
:
3594 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MACCHIU
:
3597 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3599 case OPC_VR54XX_MSACHI
:
3600 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3602 case OPC_VR54XX_MSACHIU
:
3603 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3606 MIPS_INVAL("mul vr54xx");
3607 generate_exception_end(ctx
, EXCP_RI
);
3610 gen_store_gpr(t0
, rd
);
3617 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3626 t0
= tcg_temp_new();
3627 gen_load_gpr(t0
, rs
);
3631 gen_helper_clo(cpu_gpr
[rd
], t0
);
3635 gen_helper_clz(cpu_gpr
[rd
], t0
);
3637 #if defined(TARGET_MIPS64)
3640 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3644 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3651 /* Godson integer instructions */
3652 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3653 int rd
, int rs
, int rt
)
3665 case OPC_MULTU_G_2E
:
3666 case OPC_MULTU_G_2F
:
3667 #if defined(TARGET_MIPS64)
3668 case OPC_DMULT_G_2E
:
3669 case OPC_DMULT_G_2F
:
3670 case OPC_DMULTU_G_2E
:
3671 case OPC_DMULTU_G_2F
:
3673 t0
= tcg_temp_new();
3674 t1
= tcg_temp_new();
3677 t0
= tcg_temp_local_new();
3678 t1
= tcg_temp_local_new();
3682 gen_load_gpr(t0
, rs
);
3683 gen_load_gpr(t1
, rt
);
3688 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3689 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3691 case OPC_MULTU_G_2E
:
3692 case OPC_MULTU_G_2F
:
3693 tcg_gen_ext32u_tl(t0
, t0
);
3694 tcg_gen_ext32u_tl(t1
, t1
);
3695 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3696 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3701 TCGLabel
*l1
= gen_new_label();
3702 TCGLabel
*l2
= gen_new_label();
3703 TCGLabel
*l3
= gen_new_label();
3704 tcg_gen_ext32s_tl(t0
, t0
);
3705 tcg_gen_ext32s_tl(t1
, t1
);
3706 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3707 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3710 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3711 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3712 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3715 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3716 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3723 TCGLabel
*l1
= gen_new_label();
3724 TCGLabel
*l2
= gen_new_label();
3725 tcg_gen_ext32u_tl(t0
, t0
);
3726 tcg_gen_ext32u_tl(t1
, t1
);
3727 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3728 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3731 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3732 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3739 TCGLabel
*l1
= gen_new_label();
3740 TCGLabel
*l2
= gen_new_label();
3741 TCGLabel
*l3
= gen_new_label();
3742 tcg_gen_ext32u_tl(t0
, t0
);
3743 tcg_gen_ext32u_tl(t1
, t1
);
3744 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3745 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3746 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3751 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3752 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3759 TCGLabel
*l1
= gen_new_label();
3760 TCGLabel
*l2
= gen_new_label();
3761 tcg_gen_ext32u_tl(t0
, t0
);
3762 tcg_gen_ext32u_tl(t1
, t1
);
3763 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3764 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3767 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3768 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3772 #if defined(TARGET_MIPS64)
3773 case OPC_DMULT_G_2E
:
3774 case OPC_DMULT_G_2F
:
3775 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3777 case OPC_DMULTU_G_2E
:
3778 case OPC_DMULTU_G_2F
:
3779 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3784 TCGLabel
*l1
= gen_new_label();
3785 TCGLabel
*l2
= gen_new_label();
3786 TCGLabel
*l3
= gen_new_label();
3787 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3788 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3791 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3793 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3796 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3800 case OPC_DDIVU_G_2E
:
3801 case OPC_DDIVU_G_2F
:
3803 TCGLabel
*l1
= gen_new_label();
3804 TCGLabel
*l2
= gen_new_label();
3805 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3809 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3816 TCGLabel
*l1
= gen_new_label();
3817 TCGLabel
*l2
= gen_new_label();
3818 TCGLabel
*l3
= gen_new_label();
3819 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3820 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3821 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3823 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3826 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3830 case OPC_DMODU_G_2E
:
3831 case OPC_DMODU_G_2F
:
3833 TCGLabel
*l1
= gen_new_label();
3834 TCGLabel
*l2
= gen_new_label();
3835 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3836 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3839 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3850 /* Loongson multimedia instructions */
3851 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3853 uint32_t opc
, shift_max
;
3856 opc
= MASK_LMI(ctx
->opcode
);
3862 t0
= tcg_temp_local_new_i64();
3863 t1
= tcg_temp_local_new_i64();
3866 t0
= tcg_temp_new_i64();
3867 t1
= tcg_temp_new_i64();
3871 gen_load_fpr64(ctx
, t0
, rs
);
3872 gen_load_fpr64(ctx
, t1
, rt
);
3874 #define LMI_HELPER(UP, LO) \
3875 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3876 #define LMI_HELPER_1(UP, LO) \
3877 case OPC_##UP: gen_helper_##LO(t0, t0); break
3878 #define LMI_DIRECT(UP, LO, OP) \
3879 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3882 LMI_HELPER(PADDSH
, paddsh
);
3883 LMI_HELPER(PADDUSH
, paddush
);
3884 LMI_HELPER(PADDH
, paddh
);
3885 LMI_HELPER(PADDW
, paddw
);
3886 LMI_HELPER(PADDSB
, paddsb
);
3887 LMI_HELPER(PADDUSB
, paddusb
);
3888 LMI_HELPER(PADDB
, paddb
);
3890 LMI_HELPER(PSUBSH
, psubsh
);
3891 LMI_HELPER(PSUBUSH
, psubush
);
3892 LMI_HELPER(PSUBH
, psubh
);
3893 LMI_HELPER(PSUBW
, psubw
);
3894 LMI_HELPER(PSUBSB
, psubsb
);
3895 LMI_HELPER(PSUBUSB
, psubusb
);
3896 LMI_HELPER(PSUBB
, psubb
);
3898 LMI_HELPER(PSHUFH
, pshufh
);
3899 LMI_HELPER(PACKSSWH
, packsswh
);
3900 LMI_HELPER(PACKSSHB
, packsshb
);
3901 LMI_HELPER(PACKUSHB
, packushb
);
3903 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3904 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3905 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3906 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3907 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3908 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3910 LMI_HELPER(PAVGH
, pavgh
);
3911 LMI_HELPER(PAVGB
, pavgb
);
3912 LMI_HELPER(PMAXSH
, pmaxsh
);
3913 LMI_HELPER(PMINSH
, pminsh
);
3914 LMI_HELPER(PMAXUB
, pmaxub
);
3915 LMI_HELPER(PMINUB
, pminub
);
3917 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3918 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3919 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3920 LMI_HELPER(PCMPGTH
, pcmpgth
);
3921 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3922 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3924 LMI_HELPER(PSLLW
, psllw
);
3925 LMI_HELPER(PSLLH
, psllh
);
3926 LMI_HELPER(PSRLW
, psrlw
);
3927 LMI_HELPER(PSRLH
, psrlh
);
3928 LMI_HELPER(PSRAW
, psraw
);
3929 LMI_HELPER(PSRAH
, psrah
);
3931 LMI_HELPER(PMULLH
, pmullh
);
3932 LMI_HELPER(PMULHH
, pmulhh
);
3933 LMI_HELPER(PMULHUH
, pmulhuh
);
3934 LMI_HELPER(PMADDHW
, pmaddhw
);
3936 LMI_HELPER(PASUBUB
, pasubub
);
3937 LMI_HELPER_1(BIADD
, biadd
);
3938 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3940 LMI_DIRECT(PADDD
, paddd
, add
);
3941 LMI_DIRECT(PSUBD
, psubd
, sub
);
3942 LMI_DIRECT(XOR_CP2
, xor, xor);
3943 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3944 LMI_DIRECT(AND_CP2
, and, and);
3945 LMI_DIRECT(PANDN
, pandn
, andc
);
3946 LMI_DIRECT(OR
, or, or);
3949 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3952 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3955 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3958 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3962 tcg_gen_andi_i64(t1
, t1
, 3);
3963 tcg_gen_shli_i64(t1
, t1
, 4);
3964 tcg_gen_shr_i64(t0
, t0
, t1
);
3965 tcg_gen_ext16u_i64(t0
, t0
);
3969 tcg_gen_add_i64(t0
, t0
, t1
);
3970 tcg_gen_ext32s_i64(t0
, t0
);
3973 tcg_gen_sub_i64(t0
, t0
, t1
);
3974 tcg_gen_ext32s_i64(t0
, t0
);
3996 /* Make sure shift count isn't TCG undefined behaviour. */
3997 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4002 tcg_gen_shl_i64(t0
, t0
, t1
);
4006 /* Since SRA is UndefinedResult without sign-extended inputs,
4007 we can treat SRA and DSRA the same. */
4008 tcg_gen_sar_i64(t0
, t0
, t1
);
4011 /* We want to shift in zeros for SRL; zero-extend first. */
4012 tcg_gen_ext32u_i64(t0
, t0
);
4015 tcg_gen_shr_i64(t0
, t0
, t1
);
4019 if (shift_max
== 32) {
4020 tcg_gen_ext32s_i64(t0
, t0
);
4023 /* Shifts larger than MAX produce zero. */
4024 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4025 tcg_gen_neg_i64(t1
, t1
);
4026 tcg_gen_and_i64(t0
, t0
, t1
);
4032 TCGv_i64 t2
= tcg_temp_new_i64();
4033 TCGLabel
*lab
= gen_new_label();
4035 tcg_gen_mov_i64(t2
, t0
);
4036 tcg_gen_add_i64(t0
, t1
, t2
);
4037 if (opc
== OPC_ADD_CP2
) {
4038 tcg_gen_ext32s_i64(t0
, t0
);
4040 tcg_gen_xor_i64(t1
, t1
, t2
);
4041 tcg_gen_xor_i64(t2
, t2
, t0
);
4042 tcg_gen_andc_i64(t1
, t2
, t1
);
4043 tcg_temp_free_i64(t2
);
4044 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4045 generate_exception(ctx
, EXCP_OVERFLOW
);
4053 TCGv_i64 t2
= tcg_temp_new_i64();
4054 TCGLabel
*lab
= gen_new_label();
4056 tcg_gen_mov_i64(t2
, t0
);
4057 tcg_gen_sub_i64(t0
, t1
, t2
);
4058 if (opc
== OPC_SUB_CP2
) {
4059 tcg_gen_ext32s_i64(t0
, t0
);
4061 tcg_gen_xor_i64(t1
, t1
, t2
);
4062 tcg_gen_xor_i64(t2
, t2
, t0
);
4063 tcg_gen_and_i64(t1
, t1
, t2
);
4064 tcg_temp_free_i64(t2
);
4065 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4066 generate_exception(ctx
, EXCP_OVERFLOW
);
4072 tcg_gen_ext32u_i64(t0
, t0
);
4073 tcg_gen_ext32u_i64(t1
, t1
);
4074 tcg_gen_mul_i64(t0
, t0
, t1
);
4083 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4084 FD field is the CC field? */
4086 MIPS_INVAL("loongson_cp2");
4087 generate_exception_end(ctx
, EXCP_RI
);
4094 gen_store_fpr64(ctx
, t0
, rd
);
4096 tcg_temp_free_i64(t0
);
4097 tcg_temp_free_i64(t1
);
4101 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4102 int rs
, int rt
, int16_t imm
)
4105 TCGv t0
= tcg_temp_new();
4106 TCGv t1
= tcg_temp_new();
4109 /* Load needed operands */
4117 /* Compare two registers */
4119 gen_load_gpr(t0
, rs
);
4120 gen_load_gpr(t1
, rt
);
4130 /* Compare register to immediate */
4131 if (rs
!= 0 || imm
!= 0) {
4132 gen_load_gpr(t0
, rs
);
4133 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4140 case OPC_TEQ
: /* rs == rs */
4141 case OPC_TEQI
: /* r0 == 0 */
4142 case OPC_TGE
: /* rs >= rs */
4143 case OPC_TGEI
: /* r0 >= 0 */
4144 case OPC_TGEU
: /* rs >= rs unsigned */
4145 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4147 generate_exception_end(ctx
, EXCP_TRAP
);
4149 case OPC_TLT
: /* rs < rs */
4150 case OPC_TLTI
: /* r0 < 0 */
4151 case OPC_TLTU
: /* rs < rs unsigned */
4152 case OPC_TLTIU
: /* r0 < 0 unsigned */
4153 case OPC_TNE
: /* rs != rs */
4154 case OPC_TNEI
: /* r0 != 0 */
4155 /* Never trap: treat as NOP. */
4159 TCGLabel
*l1
= gen_new_label();
4164 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4168 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4172 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4176 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4180 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4184 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4187 generate_exception(ctx
, EXCP_TRAP
);
4194 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4196 if (unlikely(ctx
->singlestep_enabled
)) {
4200 #ifndef CONFIG_USER_ONLY
4201 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4207 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4209 if (use_goto_tb(ctx
, dest
)) {
4212 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4215 if (ctx
->singlestep_enabled
) {
4216 save_cpu_state(ctx
, 0);
4217 gen_helper_raise_exception_debug(cpu_env
);
4223 /* Branches (before delay slot) */
4224 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4226 int rs
, int rt
, int32_t offset
,
4229 target_ulong btgt
= -1;
4231 int bcond_compute
= 0;
4232 TCGv t0
= tcg_temp_new();
4233 TCGv t1
= tcg_temp_new();
4235 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4236 #ifdef MIPS_DEBUG_DISAS
4237 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4238 TARGET_FMT_lx
"\n", ctx
->pc
);
4240 generate_exception_end(ctx
, EXCP_RI
);
4244 /* Load needed operands */
4250 /* Compare two registers */
4252 gen_load_gpr(t0
, rs
);
4253 gen_load_gpr(t1
, rt
);
4256 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4270 /* Compare to zero */
4272 gen_load_gpr(t0
, rs
);
4275 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4278 #if defined(TARGET_MIPS64)
4280 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4282 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4285 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4290 /* Jump to immediate */
4291 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4295 /* Jump to register */
4296 if (offset
!= 0 && offset
!= 16) {
4297 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4298 others are reserved. */
4299 MIPS_INVAL("jump hint");
4300 generate_exception_end(ctx
, EXCP_RI
);
4303 gen_load_gpr(btarget
, rs
);
4306 MIPS_INVAL("branch/jump");
4307 generate_exception_end(ctx
, EXCP_RI
);
4310 if (bcond_compute
== 0) {
4311 /* No condition to be computed */
4313 case OPC_BEQ
: /* rx == rx */
4314 case OPC_BEQL
: /* rx == rx likely */
4315 case OPC_BGEZ
: /* 0 >= 0 */
4316 case OPC_BGEZL
: /* 0 >= 0 likely */
4317 case OPC_BLEZ
: /* 0 <= 0 */
4318 case OPC_BLEZL
: /* 0 <= 0 likely */
4320 ctx
->hflags
|= MIPS_HFLAG_B
;
4322 case OPC_BGEZAL
: /* 0 >= 0 */
4323 case OPC_BGEZALL
: /* 0 >= 0 likely */
4324 /* Always take and link */
4326 ctx
->hflags
|= MIPS_HFLAG_B
;
4328 case OPC_BNE
: /* rx != rx */
4329 case OPC_BGTZ
: /* 0 > 0 */
4330 case OPC_BLTZ
: /* 0 < 0 */
4333 case OPC_BLTZAL
: /* 0 < 0 */
4334 /* Handle as an unconditional branch to get correct delay
4337 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4338 ctx
->hflags
|= MIPS_HFLAG_B
;
4340 case OPC_BLTZALL
: /* 0 < 0 likely */
4341 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4342 /* Skip the instruction in the delay slot */
4345 case OPC_BNEL
: /* rx != rx likely */
4346 case OPC_BGTZL
: /* 0 > 0 likely */
4347 case OPC_BLTZL
: /* 0 < 0 likely */
4348 /* Skip the instruction in the delay slot */
4352 ctx
->hflags
|= MIPS_HFLAG_B
;
4355 ctx
->hflags
|= MIPS_HFLAG_BX
;
4359 ctx
->hflags
|= MIPS_HFLAG_B
;
4362 ctx
->hflags
|= MIPS_HFLAG_BR
;
4366 ctx
->hflags
|= MIPS_HFLAG_BR
;
4369 MIPS_INVAL("branch/jump");
4370 generate_exception_end(ctx
, EXCP_RI
);
4376 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4379 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4382 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4385 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4388 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4391 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4398 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4411 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4414 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4422 #if defined(TARGET_MIPS64)
4424 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4428 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4431 ctx
->hflags
|= MIPS_HFLAG_BC
;
4434 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4437 ctx
->hflags
|= MIPS_HFLAG_BL
;
4440 MIPS_INVAL("conditional branch/jump");
4441 generate_exception_end(ctx
, EXCP_RI
);
4446 ctx
->btarget
= btgt
;
4448 switch (delayslot_size
) {
4450 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4453 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4458 int post_delay
= insn_bytes
+ delayslot_size
;
4459 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4461 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4465 if (insn_bytes
== 2)
4466 ctx
->hflags
|= MIPS_HFLAG_B16
;
4471 /* special3 bitfield operations */
4472 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4473 int rs
, int lsb
, int msb
)
4475 TCGv t0
= tcg_temp_new();
4476 TCGv t1
= tcg_temp_new();
4478 gen_load_gpr(t1
, rs
);
4481 if (lsb
+ msb
> 31) {
4484 tcg_gen_shri_tl(t0
, t1
, lsb
);
4486 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4488 tcg_gen_ext32s_tl(t0
, t0
);
4491 #if defined(TARGET_MIPS64)
4500 if (lsb
+ msb
> 63) {
4503 tcg_gen_shri_tl(t0
, t1
, lsb
);
4505 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4513 gen_load_gpr(t0
, rt
);
4514 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4515 tcg_gen_ext32s_tl(t0
, t0
);
4517 #if defined(TARGET_MIPS64)
4528 gen_load_gpr(t0
, rt
);
4529 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4534 MIPS_INVAL("bitops");
4535 generate_exception_end(ctx
, EXCP_RI
);
4540 gen_store_gpr(t0
, rt
);
4545 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4550 /* If no destination, treat it as a NOP. */
4554 t0
= tcg_temp_new();
4555 gen_load_gpr(t0
, rt
);
4559 TCGv t1
= tcg_temp_new();
4561 tcg_gen_shri_tl(t1
, t0
, 8);
4562 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4563 tcg_gen_shli_tl(t0
, t0
, 8);
4564 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4565 tcg_gen_or_tl(t0
, t0
, t1
);
4567 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4571 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4574 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4576 #if defined(TARGET_MIPS64)
4579 TCGv t1
= tcg_temp_new();
4581 tcg_gen_shri_tl(t1
, t0
, 8);
4582 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4583 tcg_gen_shli_tl(t0
, t0
, 8);
4584 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4585 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4591 TCGv t1
= tcg_temp_new();
4593 tcg_gen_shri_tl(t1
, t0
, 16);
4594 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4595 tcg_gen_shli_tl(t0
, t0
, 16);
4596 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4597 tcg_gen_or_tl(t0
, t0
, t1
);
4598 tcg_gen_shri_tl(t1
, t0
, 32);
4599 tcg_gen_shli_tl(t0
, t0
, 32);
4600 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4606 MIPS_INVAL("bsfhl");
4607 generate_exception_end(ctx
, EXCP_RI
);
4614 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4623 t0
= tcg_temp_new();
4624 t1
= tcg_temp_new();
4625 gen_load_gpr(t0
, rs
);
4626 gen_load_gpr(t1
, rt
);
4627 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4628 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4629 if (opc
== OPC_LSA
) {
4630 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4639 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4647 t0
= tcg_temp_new();
4648 gen_load_gpr(t0
, rt
);
4652 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4654 #if defined(TARGET_MIPS64)
4656 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4661 TCGv t1
= tcg_temp_new();
4662 gen_load_gpr(t1
, rs
);
4666 TCGv_i64 t2
= tcg_temp_new_i64();
4667 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4668 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4669 gen_move_low32(cpu_gpr
[rd
], t2
);
4670 tcg_temp_free_i64(t2
);
4673 #if defined(TARGET_MIPS64)
4675 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4676 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4677 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4687 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4694 t0
= tcg_temp_new();
4695 gen_load_gpr(t0
, rt
);
4698 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4700 #if defined(TARGET_MIPS64)
4702 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4709 #ifndef CONFIG_USER_ONLY
4710 /* CP0 (MMU and control) */
4711 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4713 TCGv_i64 t0
= tcg_temp_new_i64();
4714 TCGv_i64 t1
= tcg_temp_new_i64();
4716 tcg_gen_ext_tl_i64(t0
, arg
);
4717 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4718 #if defined(TARGET_MIPS64)
4719 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4721 tcg_gen_concat32_i64(t1
, t1
, t0
);
4723 tcg_gen_st_i64(t1
, cpu_env
, off
);
4724 tcg_temp_free_i64(t1
);
4725 tcg_temp_free_i64(t0
);
4728 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4730 TCGv_i64 t0
= tcg_temp_new_i64();
4731 TCGv_i64 t1
= tcg_temp_new_i64();
4733 tcg_gen_ext_tl_i64(t0
, arg
);
4734 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4735 tcg_gen_concat32_i64(t1
, t1
, t0
);
4736 tcg_gen_st_i64(t1
, cpu_env
, off
);
4737 tcg_temp_free_i64(t1
);
4738 tcg_temp_free_i64(t0
);
4741 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4743 TCGv_i64 t0
= tcg_temp_new_i64();
4745 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4746 #if defined(TARGET_MIPS64)
4747 tcg_gen_shri_i64(t0
, t0
, 30);
4749 tcg_gen_shri_i64(t0
, t0
, 32);
4751 gen_move_low32(arg
, t0
);
4752 tcg_temp_free_i64(t0
);
4755 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4757 TCGv_i64 t0
= tcg_temp_new_i64();
4759 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4760 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4761 gen_move_low32(arg
, t0
);
4762 tcg_temp_free_i64(t0
);
4765 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4767 TCGv_i32 t0
= tcg_temp_new_i32();
4769 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4770 tcg_gen_ext_i32_tl(arg
, t0
);
4771 tcg_temp_free_i32(t0
);
4774 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4776 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4777 tcg_gen_ext32s_tl(arg
, arg
);
4780 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4782 TCGv_i32 t0
= tcg_temp_new_i32();
4784 tcg_gen_trunc_tl_i32(t0
, arg
);
4785 tcg_gen_st_i32(t0
, cpu_env
, off
);
4786 tcg_temp_free_i32(t0
);
4789 #define CP0_CHECK(c) \
4792 goto cp0_unimplemented; \
4796 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4798 const char *rn
= "invalid";
4800 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4806 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4810 goto cp0_unimplemented
;
4816 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4820 goto cp0_unimplemented
;
4826 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4827 ctx
->CP0_LLAddr_shift
);
4831 CP0_CHECK(ctx
->mrp
);
4832 gen_helper_mfhc0_maar(arg
, cpu_env
);
4836 goto cp0_unimplemented
;
4845 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4849 goto cp0_unimplemented
;
4853 goto cp0_unimplemented
;
4856 (void)rn
; /* avoid a compiler warning */
4857 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4861 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4862 tcg_gen_movi_tl(arg
, 0);
4865 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4867 const char *rn
= "invalid";
4868 uint64_t mask
= ctx
->PAMask
>> 36;
4870 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4876 tcg_gen_andi_tl(arg
, arg
, mask
);
4877 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4881 goto cp0_unimplemented
;
4887 tcg_gen_andi_tl(arg
, arg
, mask
);
4888 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4892 goto cp0_unimplemented
;
4898 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4899 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4900 relevant for modern MIPS cores supporting MTHC0, therefore
4901 treating MTHC0 to LLAddr as NOP. */
4905 CP0_CHECK(ctx
->mrp
);
4906 gen_helper_mthc0_maar(cpu_env
, arg
);
4910 goto cp0_unimplemented
;
4919 tcg_gen_andi_tl(arg
, arg
, mask
);
4920 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4924 goto cp0_unimplemented
;
4928 goto cp0_unimplemented
;
4931 (void)rn
; /* avoid a compiler warning */
4933 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4936 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4938 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4939 tcg_gen_movi_tl(arg
, 0);
4941 tcg_gen_movi_tl(arg
, ~0);
4945 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4947 const char *rn
= "invalid";
4950 check_insn(ctx
, ISA_MIPS32
);
4956 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4960 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4961 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4965 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4966 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4970 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4971 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4976 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4980 goto cp0_unimplemented
;
4986 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4987 gen_helper_mfc0_random(arg
, cpu_env
);
4991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4992 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4997 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5007 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5012 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5016 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5017 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5018 rn
= "VPEScheFBack";
5021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5022 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5026 goto cp0_unimplemented
;
5033 TCGv_i64 tmp
= tcg_temp_new_i64();
5034 tcg_gen_ld_i64(tmp
, cpu_env
,
5035 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5036 #if defined(TARGET_MIPS64)
5038 /* Move RI/XI fields to bits 31:30 */
5039 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5040 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5043 gen_move_low32(arg
, tmp
);
5044 tcg_temp_free_i64(tmp
);
5049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5050 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5054 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5055 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5059 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5060 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5064 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5065 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5069 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5070 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5074 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5075 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5080 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5084 goto cp0_unimplemented
;
5091 TCGv_i64 tmp
= tcg_temp_new_i64();
5092 tcg_gen_ld_i64(tmp
, cpu_env
,
5093 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5094 #if defined(TARGET_MIPS64)
5096 /* Move RI/XI fields to bits 31:30 */
5097 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5098 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5101 gen_move_low32(arg
, tmp
);
5102 tcg_temp_free_i64(tmp
);
5108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5109 rn
= "GlobalNumber";
5112 goto cp0_unimplemented
;
5118 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5119 tcg_gen_ext32s_tl(arg
, arg
);
5123 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5124 rn
= "ContextConfig";
5125 goto cp0_unimplemented
;
5128 CP0_CHECK(ctx
->ulri
);
5129 tcg_gen_ld32s_tl(arg
, cpu_env
,
5130 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5134 goto cp0_unimplemented
;
5140 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5144 check_insn(ctx
, ISA_MIPS32R2
);
5145 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5149 goto cp0_unimplemented
;
5155 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5159 check_insn(ctx
, ISA_MIPS32R2
);
5160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5164 check_insn(ctx
, ISA_MIPS32R2
);
5165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5169 check_insn(ctx
, ISA_MIPS32R2
);
5170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5174 check_insn(ctx
, ISA_MIPS32R2
);
5175 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5179 check_insn(ctx
, ISA_MIPS32R2
);
5180 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5184 goto cp0_unimplemented
;
5190 check_insn(ctx
, ISA_MIPS32R2
);
5191 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5195 goto cp0_unimplemented
;
5201 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5202 tcg_gen_ext32s_tl(arg
, arg
);
5207 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5212 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5216 goto cp0_unimplemented
;
5222 /* Mark as an IO operation because we read the time. */
5223 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5226 gen_helper_mfc0_count(arg
, cpu_env
);
5227 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5230 /* Break the TB to be able to take timer interrupts immediately
5231 after reading count. */
5232 ctx
->bstate
= BS_STOP
;
5235 /* 6,7 are implementation dependent */
5237 goto cp0_unimplemented
;
5243 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5244 tcg_gen_ext32s_tl(arg
, arg
);
5248 goto cp0_unimplemented
;
5254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5257 /* 6,7 are implementation dependent */
5259 goto cp0_unimplemented
;
5265 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5269 check_insn(ctx
, ISA_MIPS32R2
);
5270 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5274 check_insn(ctx
, ISA_MIPS32R2
);
5275 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5279 check_insn(ctx
, ISA_MIPS32R2
);
5280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5284 goto cp0_unimplemented
;
5290 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5294 goto cp0_unimplemented
;
5300 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5301 tcg_gen_ext32s_tl(arg
, arg
);
5305 goto cp0_unimplemented
;
5311 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5315 check_insn(ctx
, ISA_MIPS32R2
);
5316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5320 check_insn(ctx
, ISA_MIPS32R2
);
5321 CP0_CHECK(ctx
->cmgcr
);
5322 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5323 tcg_gen_ext32s_tl(arg
, arg
);
5327 goto cp0_unimplemented
;
5333 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5337 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5341 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5356 /* 6,7 are implementation dependent */
5358 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5362 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5366 goto cp0_unimplemented
;
5372 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5376 CP0_CHECK(ctx
->mrp
);
5377 gen_helper_mfc0_maar(arg
, cpu_env
);
5381 CP0_CHECK(ctx
->mrp
);
5382 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5386 goto cp0_unimplemented
;
5392 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5396 goto cp0_unimplemented
;
5402 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5406 goto cp0_unimplemented
;
5412 #if defined(TARGET_MIPS64)
5413 check_insn(ctx
, ISA_MIPS3
);
5414 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5415 tcg_gen_ext32s_tl(arg
, arg
);
5420 goto cp0_unimplemented
;
5424 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5425 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5428 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5432 goto cp0_unimplemented
;
5436 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5437 rn
= "'Diagnostic"; /* implementation dependent */
5442 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5446 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5447 rn
= "TraceControl";
5450 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5451 rn
= "TraceControl2";
5454 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5455 rn
= "UserTraceData";
5458 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5462 goto cp0_unimplemented
;
5469 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5470 tcg_gen_ext32s_tl(arg
, arg
);
5474 goto cp0_unimplemented
;
5480 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5481 rn
= "Performance0";
5484 // gen_helper_mfc0_performance1(arg);
5485 rn
= "Performance1";
5488 // gen_helper_mfc0_performance2(arg);
5489 rn
= "Performance2";
5492 // gen_helper_mfc0_performance3(arg);
5493 rn
= "Performance3";
5496 // gen_helper_mfc0_performance4(arg);
5497 rn
= "Performance4";
5500 // gen_helper_mfc0_performance5(arg);
5501 rn
= "Performance5";
5504 // gen_helper_mfc0_performance6(arg);
5505 rn
= "Performance6";
5508 // gen_helper_mfc0_performance7(arg);
5509 rn
= "Performance7";
5512 goto cp0_unimplemented
;
5518 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5522 goto cp0_unimplemented
;
5528 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5532 goto cp0_unimplemented
;
5542 TCGv_i64 tmp
= tcg_temp_new_i64();
5543 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5544 gen_move_low32(arg
, tmp
);
5545 tcg_temp_free_i64(tmp
);
5553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5557 goto cp0_unimplemented
;
5566 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5573 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5577 goto cp0_unimplemented
;
5583 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5584 tcg_gen_ext32s_tl(arg
, arg
);
5588 goto cp0_unimplemented
;
5595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5599 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5600 tcg_gen_ld_tl(arg
, cpu_env
,
5601 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5602 tcg_gen_ext32s_tl(arg
, arg
);
5606 goto cp0_unimplemented
;
5610 goto cp0_unimplemented
;
5612 (void)rn
; /* avoid a compiler warning */
5613 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5617 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5618 gen_mfc0_unimplemented(ctx
, arg
);
5621 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5623 const char *rn
= "invalid";
5626 check_insn(ctx
, ISA_MIPS32
);
5628 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5636 gen_helper_mtc0_index(cpu_env
, arg
);
5640 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5641 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5645 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5650 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5660 goto cp0_unimplemented
;
5670 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5671 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5675 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5676 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5680 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5681 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5685 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5686 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5690 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5691 tcg_gen_st_tl(arg
, cpu_env
,
5692 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5696 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5697 tcg_gen_st_tl(arg
, cpu_env
,
5698 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5699 rn
= "VPEScheFBack";
5702 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5703 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5707 goto cp0_unimplemented
;
5713 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5717 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5718 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5722 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5723 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5727 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5728 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5732 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5733 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5737 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5738 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5742 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5743 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5747 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5748 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5752 goto cp0_unimplemented
;
5758 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5764 rn
= "GlobalNumber";
5767 goto cp0_unimplemented
;
5773 gen_helper_mtc0_context(cpu_env
, arg
);
5777 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5778 rn
= "ContextConfig";
5779 goto cp0_unimplemented
;
5782 CP0_CHECK(ctx
->ulri
);
5783 tcg_gen_st_tl(arg
, cpu_env
,
5784 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5788 goto cp0_unimplemented
;
5794 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5798 check_insn(ctx
, ISA_MIPS32R2
);
5799 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5801 ctx
->bstate
= BS_STOP
;
5804 goto cp0_unimplemented
;
5810 gen_helper_mtc0_wired(cpu_env
, arg
);
5814 check_insn(ctx
, ISA_MIPS32R2
);
5815 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5819 check_insn(ctx
, ISA_MIPS32R2
);
5820 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5824 check_insn(ctx
, ISA_MIPS32R2
);
5825 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5829 check_insn(ctx
, ISA_MIPS32R2
);
5830 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5834 check_insn(ctx
, ISA_MIPS32R2
);
5835 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5839 goto cp0_unimplemented
;
5845 check_insn(ctx
, ISA_MIPS32R2
);
5846 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5847 ctx
->bstate
= BS_STOP
;
5851 goto cp0_unimplemented
;
5869 goto cp0_unimplemented
;
5875 gen_helper_mtc0_count(cpu_env
, arg
);
5878 /* 6,7 are implementation dependent */
5880 goto cp0_unimplemented
;
5886 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5890 goto cp0_unimplemented
;
5896 gen_helper_mtc0_compare(cpu_env
, arg
);
5899 /* 6,7 are implementation dependent */
5901 goto cp0_unimplemented
;
5907 save_cpu_state(ctx
, 1);
5908 gen_helper_mtc0_status(cpu_env
, arg
);
5909 /* BS_STOP isn't good enough here, hflags may have changed. */
5910 gen_save_pc(ctx
->pc
+ 4);
5911 ctx
->bstate
= BS_EXCP
;
5915 check_insn(ctx
, ISA_MIPS32R2
);
5916 gen_helper_mtc0_intctl(cpu_env
, arg
);
5917 /* Stop translation as we may have switched the execution mode */
5918 ctx
->bstate
= BS_STOP
;
5922 check_insn(ctx
, ISA_MIPS32R2
);
5923 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5924 /* Stop translation as we may have switched the execution mode */
5925 ctx
->bstate
= BS_STOP
;
5929 check_insn(ctx
, ISA_MIPS32R2
);
5930 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5931 /* Stop translation as we may have switched the execution mode */
5932 ctx
->bstate
= BS_STOP
;
5936 goto cp0_unimplemented
;
5942 save_cpu_state(ctx
, 1);
5943 gen_helper_mtc0_cause(cpu_env
, arg
);
5947 goto cp0_unimplemented
;
5953 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5957 goto cp0_unimplemented
;
5967 check_insn(ctx
, ISA_MIPS32R2
);
5968 gen_helper_mtc0_ebase(cpu_env
, arg
);
5972 goto cp0_unimplemented
;
5978 gen_helper_mtc0_config0(cpu_env
, arg
);
5980 /* Stop translation as we may have switched the execution mode */
5981 ctx
->bstate
= BS_STOP
;
5984 /* ignored, read only */
5988 gen_helper_mtc0_config2(cpu_env
, arg
);
5990 /* Stop translation as we may have switched the execution mode */
5991 ctx
->bstate
= BS_STOP
;
5994 gen_helper_mtc0_config3(cpu_env
, arg
);
5996 /* Stop translation as we may have switched the execution mode */
5997 ctx
->bstate
= BS_STOP
;
6000 gen_helper_mtc0_config4(cpu_env
, arg
);
6002 ctx
->bstate
= BS_STOP
;
6005 gen_helper_mtc0_config5(cpu_env
, arg
);
6007 /* Stop translation as we may have switched the execution mode */
6008 ctx
->bstate
= BS_STOP
;
6010 /* 6,7 are implementation dependent */
6020 rn
= "Invalid config selector";
6021 goto cp0_unimplemented
;
6027 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6031 CP0_CHECK(ctx
->mrp
);
6032 gen_helper_mtc0_maar(cpu_env
, arg
);
6036 CP0_CHECK(ctx
->mrp
);
6037 gen_helper_mtc0_maari(cpu_env
, arg
);
6041 goto cp0_unimplemented
;
6047 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6051 goto cp0_unimplemented
;
6057 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6061 goto cp0_unimplemented
;
6067 #if defined(TARGET_MIPS64)
6068 check_insn(ctx
, ISA_MIPS3
);
6069 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6074 goto cp0_unimplemented
;
6078 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6079 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6082 gen_helper_mtc0_framemask(cpu_env
, arg
);
6086 goto cp0_unimplemented
;
6091 rn
= "Diagnostic"; /* implementation dependent */
6096 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6097 /* BS_STOP isn't good enough here, hflags may have changed. */
6098 gen_save_pc(ctx
->pc
+ 4);
6099 ctx
->bstate
= BS_EXCP
;
6103 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6104 rn
= "TraceControl";
6105 /* Stop translation as we may have switched the execution mode */
6106 ctx
->bstate
= BS_STOP
;
6109 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6110 rn
= "TraceControl2";
6111 /* Stop translation as we may have switched the execution mode */
6112 ctx
->bstate
= BS_STOP
;
6115 /* Stop translation as we may have switched the execution mode */
6116 ctx
->bstate
= BS_STOP
;
6117 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6118 rn
= "UserTraceData";
6119 /* Stop translation as we may have switched the execution mode */
6120 ctx
->bstate
= BS_STOP
;
6123 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6124 /* Stop translation as we may have switched the execution mode */
6125 ctx
->bstate
= BS_STOP
;
6129 goto cp0_unimplemented
;
6136 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6140 goto cp0_unimplemented
;
6146 gen_helper_mtc0_performance0(cpu_env
, arg
);
6147 rn
= "Performance0";
6150 // gen_helper_mtc0_performance1(arg);
6151 rn
= "Performance1";
6154 // gen_helper_mtc0_performance2(arg);
6155 rn
= "Performance2";
6158 // gen_helper_mtc0_performance3(arg);
6159 rn
= "Performance3";
6162 // gen_helper_mtc0_performance4(arg);
6163 rn
= "Performance4";
6166 // gen_helper_mtc0_performance5(arg);
6167 rn
= "Performance5";
6170 // gen_helper_mtc0_performance6(arg);
6171 rn
= "Performance6";
6174 // gen_helper_mtc0_performance7(arg);
6175 rn
= "Performance7";
6178 goto cp0_unimplemented
;
6184 gen_helper_mtc0_errctl(cpu_env
, arg
);
6185 ctx
->bstate
= BS_STOP
;
6189 goto cp0_unimplemented
;
6199 goto cp0_unimplemented
;
6208 gen_helper_mtc0_taglo(cpu_env
, arg
);
6215 gen_helper_mtc0_datalo(cpu_env
, arg
);
6219 goto cp0_unimplemented
;
6228 gen_helper_mtc0_taghi(cpu_env
, arg
);
6235 gen_helper_mtc0_datahi(cpu_env
, arg
);
6240 goto cp0_unimplemented
;
6246 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6250 goto cp0_unimplemented
;
6257 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6261 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6262 tcg_gen_st_tl(arg
, cpu_env
,
6263 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6267 goto cp0_unimplemented
;
6269 /* Stop translation as we may have switched the execution mode */
6270 ctx
->bstate
= BS_STOP
;
6273 goto cp0_unimplemented
;
6275 (void)rn
; /* avoid a compiler warning */
6276 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6277 /* For simplicity assume that all writes can cause interrupts. */
6278 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6280 ctx
->bstate
= BS_STOP
;
6285 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6288 #if defined(TARGET_MIPS64)
6289 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6291 const char *rn
= "invalid";
6294 check_insn(ctx
, ISA_MIPS64
);
6300 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6304 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6305 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6309 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6310 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6314 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6315 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6324 goto cp0_unimplemented
;
6330 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6331 gen_helper_mfc0_random(arg
, cpu_env
);
6335 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6340 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6341 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6345 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6346 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6350 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6351 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6355 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6356 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6360 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6361 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6362 rn
= "VPEScheFBack";
6365 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6366 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6370 goto cp0_unimplemented
;
6376 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6380 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6381 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6385 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6386 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6391 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6395 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6396 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6400 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6401 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6405 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6406 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6410 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6411 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6415 goto cp0_unimplemented
;
6421 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6426 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6427 rn
= "GlobalNumber";
6430 goto cp0_unimplemented
;
6436 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6440 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6441 rn
= "ContextConfig";
6442 goto cp0_unimplemented
;
6445 CP0_CHECK(ctx
->ulri
);
6446 tcg_gen_ld_tl(arg
, cpu_env
,
6447 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6451 goto cp0_unimplemented
;
6457 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6461 check_insn(ctx
, ISA_MIPS32R2
);
6462 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6466 goto cp0_unimplemented
;
6472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6476 check_insn(ctx
, ISA_MIPS32R2
);
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6481 check_insn(ctx
, ISA_MIPS32R2
);
6482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6486 check_insn(ctx
, ISA_MIPS32R2
);
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6491 check_insn(ctx
, ISA_MIPS32R2
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6496 check_insn(ctx
, ISA_MIPS32R2
);
6497 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6501 goto cp0_unimplemented
;
6507 check_insn(ctx
, ISA_MIPS32R2
);
6508 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6512 goto cp0_unimplemented
;
6518 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6528 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6532 goto cp0_unimplemented
;
6538 /* Mark as an IO operation because we read the time. */
6539 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6542 gen_helper_mfc0_count(arg
, cpu_env
);
6543 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6546 /* Break the TB to be able to take timer interrupts immediately
6547 after reading count. */
6548 ctx
->bstate
= BS_STOP
;
6551 /* 6,7 are implementation dependent */
6553 goto cp0_unimplemented
;
6559 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6563 goto cp0_unimplemented
;
6569 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6572 /* 6,7 are implementation dependent */
6574 goto cp0_unimplemented
;
6580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6584 check_insn(ctx
, ISA_MIPS32R2
);
6585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6589 check_insn(ctx
, ISA_MIPS32R2
);
6590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6594 check_insn(ctx
, ISA_MIPS32R2
);
6595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6599 goto cp0_unimplemented
;
6605 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6609 goto cp0_unimplemented
;
6615 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6619 goto cp0_unimplemented
;
6625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6629 check_insn(ctx
, ISA_MIPS32R2
);
6630 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6634 check_insn(ctx
, ISA_MIPS32R2
);
6635 CP0_CHECK(ctx
->cmgcr
);
6636 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6640 goto cp0_unimplemented
;
6646 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6650 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6654 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6658 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6669 /* 6,7 are implementation dependent */
6671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6675 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6679 goto cp0_unimplemented
;
6685 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6689 CP0_CHECK(ctx
->mrp
);
6690 gen_helper_dmfc0_maar(arg
, cpu_env
);
6694 CP0_CHECK(ctx
->mrp
);
6695 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6699 goto cp0_unimplemented
;
6705 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6709 goto cp0_unimplemented
;
6715 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6719 goto cp0_unimplemented
;
6725 check_insn(ctx
, ISA_MIPS3
);
6726 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6730 goto cp0_unimplemented
;
6734 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6735 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6742 goto cp0_unimplemented
;
6746 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6747 rn
= "'Diagnostic"; /* implementation dependent */
6752 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6756 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6757 rn
= "TraceControl";
6760 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6761 rn
= "TraceControl2";
6764 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6765 rn
= "UserTraceData";
6768 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6772 goto cp0_unimplemented
;
6779 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6783 goto cp0_unimplemented
;
6789 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6790 rn
= "Performance0";
6793 // gen_helper_dmfc0_performance1(arg);
6794 rn
= "Performance1";
6797 // gen_helper_dmfc0_performance2(arg);
6798 rn
= "Performance2";
6801 // gen_helper_dmfc0_performance3(arg);
6802 rn
= "Performance3";
6805 // gen_helper_dmfc0_performance4(arg);
6806 rn
= "Performance4";
6809 // gen_helper_dmfc0_performance5(arg);
6810 rn
= "Performance5";
6813 // gen_helper_dmfc0_performance6(arg);
6814 rn
= "Performance6";
6817 // gen_helper_dmfc0_performance7(arg);
6818 rn
= "Performance7";
6821 goto cp0_unimplemented
;
6827 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6831 goto cp0_unimplemented
;
6838 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6842 goto cp0_unimplemented
;
6851 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6858 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6862 goto cp0_unimplemented
;
6871 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6882 goto cp0_unimplemented
;
6888 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6892 goto cp0_unimplemented
;
6899 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6903 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6904 tcg_gen_ld_tl(arg
, cpu_env
,
6905 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6909 goto cp0_unimplemented
;
6913 goto cp0_unimplemented
;
6915 (void)rn
; /* avoid a compiler warning */
6916 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6920 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6921 gen_mfc0_unimplemented(ctx
, arg
);
6924 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6926 const char *rn
= "invalid";
6929 check_insn(ctx
, ISA_MIPS64
);
6931 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6939 gen_helper_mtc0_index(cpu_env
, arg
);
6943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6944 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6948 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6963 goto cp0_unimplemented
;
6973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6974 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6979 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6984 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6988 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6989 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6993 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6994 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6998 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6999 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7000 rn
= "VPEScheFBack";
7003 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7004 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7008 goto cp0_unimplemented
;
7014 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7018 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7019 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7024 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7028 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7029 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7033 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7034 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7038 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7039 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7043 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7044 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7049 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7053 goto cp0_unimplemented
;
7059 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7065 rn
= "GlobalNumber";
7068 goto cp0_unimplemented
;
7074 gen_helper_mtc0_context(cpu_env
, arg
);
7078 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7079 rn
= "ContextConfig";
7080 goto cp0_unimplemented
;
7083 CP0_CHECK(ctx
->ulri
);
7084 tcg_gen_st_tl(arg
, cpu_env
,
7085 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7089 goto cp0_unimplemented
;
7095 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7099 check_insn(ctx
, ISA_MIPS32R2
);
7100 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7104 goto cp0_unimplemented
;
7110 gen_helper_mtc0_wired(cpu_env
, arg
);
7114 check_insn(ctx
, ISA_MIPS32R2
);
7115 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7119 check_insn(ctx
, ISA_MIPS32R2
);
7120 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7124 check_insn(ctx
, ISA_MIPS32R2
);
7125 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7129 check_insn(ctx
, ISA_MIPS32R2
);
7130 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7134 check_insn(ctx
, ISA_MIPS32R2
);
7135 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7139 goto cp0_unimplemented
;
7145 check_insn(ctx
, ISA_MIPS32R2
);
7146 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7147 ctx
->bstate
= BS_STOP
;
7151 goto cp0_unimplemented
;
7169 goto cp0_unimplemented
;
7175 gen_helper_mtc0_count(cpu_env
, arg
);
7178 /* 6,7 are implementation dependent */
7180 goto cp0_unimplemented
;
7182 /* Stop translation as we may have switched the execution mode */
7183 ctx
->bstate
= BS_STOP
;
7188 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7192 goto cp0_unimplemented
;
7198 gen_helper_mtc0_compare(cpu_env
, arg
);
7201 /* 6,7 are implementation dependent */
7203 goto cp0_unimplemented
;
7205 /* Stop translation as we may have switched the execution mode */
7206 ctx
->bstate
= BS_STOP
;
7211 save_cpu_state(ctx
, 1);
7212 gen_helper_mtc0_status(cpu_env
, arg
);
7213 /* BS_STOP isn't good enough here, hflags may have changed. */
7214 gen_save_pc(ctx
->pc
+ 4);
7215 ctx
->bstate
= BS_EXCP
;
7219 check_insn(ctx
, ISA_MIPS32R2
);
7220 gen_helper_mtc0_intctl(cpu_env
, arg
);
7221 /* Stop translation as we may have switched the execution mode */
7222 ctx
->bstate
= BS_STOP
;
7226 check_insn(ctx
, ISA_MIPS32R2
);
7227 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7228 /* Stop translation as we may have switched the execution mode */
7229 ctx
->bstate
= BS_STOP
;
7233 check_insn(ctx
, ISA_MIPS32R2
);
7234 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7235 /* Stop translation as we may have switched the execution mode */
7236 ctx
->bstate
= BS_STOP
;
7240 goto cp0_unimplemented
;
7246 save_cpu_state(ctx
, 1);
7247 /* Mark as an IO operation because we may trigger a software
7249 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7252 gen_helper_mtc0_cause(cpu_env
, arg
);
7253 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7256 /* Stop translation as we may have triggered an intetrupt */
7257 ctx
->bstate
= BS_STOP
;
7261 goto cp0_unimplemented
;
7267 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7271 goto cp0_unimplemented
;
7281 check_insn(ctx
, ISA_MIPS32R2
);
7282 gen_helper_mtc0_ebase(cpu_env
, arg
);
7286 goto cp0_unimplemented
;
7292 gen_helper_mtc0_config0(cpu_env
, arg
);
7294 /* Stop translation as we may have switched the execution mode */
7295 ctx
->bstate
= BS_STOP
;
7298 /* ignored, read only */
7302 gen_helper_mtc0_config2(cpu_env
, arg
);
7304 /* Stop translation as we may have switched the execution mode */
7305 ctx
->bstate
= BS_STOP
;
7308 gen_helper_mtc0_config3(cpu_env
, arg
);
7310 /* Stop translation as we may have switched the execution mode */
7311 ctx
->bstate
= BS_STOP
;
7314 /* currently ignored */
7318 gen_helper_mtc0_config5(cpu_env
, arg
);
7320 /* Stop translation as we may have switched the execution mode */
7321 ctx
->bstate
= BS_STOP
;
7323 /* 6,7 are implementation dependent */
7325 rn
= "Invalid config selector";
7326 goto cp0_unimplemented
;
7332 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7336 CP0_CHECK(ctx
->mrp
);
7337 gen_helper_mtc0_maar(cpu_env
, arg
);
7341 CP0_CHECK(ctx
->mrp
);
7342 gen_helper_mtc0_maari(cpu_env
, arg
);
7346 goto cp0_unimplemented
;
7352 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7356 goto cp0_unimplemented
;
7362 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7366 goto cp0_unimplemented
;
7372 check_insn(ctx
, ISA_MIPS3
);
7373 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7377 goto cp0_unimplemented
;
7381 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7382 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7385 gen_helper_mtc0_framemask(cpu_env
, arg
);
7389 goto cp0_unimplemented
;
7394 rn
= "Diagnostic"; /* implementation dependent */
7399 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7400 /* BS_STOP isn't good enough here, hflags may have changed. */
7401 gen_save_pc(ctx
->pc
+ 4);
7402 ctx
->bstate
= BS_EXCP
;
7406 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7407 /* Stop translation as we may have switched the execution mode */
7408 ctx
->bstate
= BS_STOP
;
7409 rn
= "TraceControl";
7412 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7413 /* Stop translation as we may have switched the execution mode */
7414 ctx
->bstate
= BS_STOP
;
7415 rn
= "TraceControl2";
7418 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7419 /* Stop translation as we may have switched the execution mode */
7420 ctx
->bstate
= BS_STOP
;
7421 rn
= "UserTraceData";
7424 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7425 /* Stop translation as we may have switched the execution mode */
7426 ctx
->bstate
= BS_STOP
;
7430 goto cp0_unimplemented
;
7437 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7441 goto cp0_unimplemented
;
7447 gen_helper_mtc0_performance0(cpu_env
, arg
);
7448 rn
= "Performance0";
7451 // gen_helper_mtc0_performance1(cpu_env, arg);
7452 rn
= "Performance1";
7455 // gen_helper_mtc0_performance2(cpu_env, arg);
7456 rn
= "Performance2";
7459 // gen_helper_mtc0_performance3(cpu_env, arg);
7460 rn
= "Performance3";
7463 // gen_helper_mtc0_performance4(cpu_env, arg);
7464 rn
= "Performance4";
7467 // gen_helper_mtc0_performance5(cpu_env, arg);
7468 rn
= "Performance5";
7471 // gen_helper_mtc0_performance6(cpu_env, arg);
7472 rn
= "Performance6";
7475 // gen_helper_mtc0_performance7(cpu_env, arg);
7476 rn
= "Performance7";
7479 goto cp0_unimplemented
;
7485 gen_helper_mtc0_errctl(cpu_env
, arg
);
7486 ctx
->bstate
= BS_STOP
;
7490 goto cp0_unimplemented
;
7500 goto cp0_unimplemented
;
7509 gen_helper_mtc0_taglo(cpu_env
, arg
);
7516 gen_helper_mtc0_datalo(cpu_env
, arg
);
7520 goto cp0_unimplemented
;
7529 gen_helper_mtc0_taghi(cpu_env
, arg
);
7536 gen_helper_mtc0_datahi(cpu_env
, arg
);
7541 goto cp0_unimplemented
;
7547 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7551 goto cp0_unimplemented
;
7558 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7562 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7563 tcg_gen_st_tl(arg
, cpu_env
,
7564 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7568 goto cp0_unimplemented
;
7570 /* Stop translation as we may have switched the execution mode */
7571 ctx
->bstate
= BS_STOP
;
7574 goto cp0_unimplemented
;
7576 (void)rn
; /* avoid a compiler warning */
7577 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7578 /* For simplicity assume that all writes can cause interrupts. */
7579 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7581 ctx
->bstate
= BS_STOP
;
7586 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7588 #endif /* TARGET_MIPS64 */
7590 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7591 int u
, int sel
, int h
)
7593 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7594 TCGv t0
= tcg_temp_local_new();
7596 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7597 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7598 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7599 tcg_gen_movi_tl(t0
, -1);
7600 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7601 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7602 tcg_gen_movi_tl(t0
, -1);
7608 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7611 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7621 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7624 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7627 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7630 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7633 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7636 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7639 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7642 gen_mfc0(ctx
, t0
, rt
, sel
);
7649 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7652 gen_mfc0(ctx
, t0
, rt
, sel
);
7658 gen_helper_mftc0_status(t0
, cpu_env
);
7661 gen_mfc0(ctx
, t0
, rt
, sel
);
7667 gen_helper_mftc0_cause(t0
, cpu_env
);
7677 gen_helper_mftc0_epc(t0
, cpu_env
);
7687 gen_helper_mftc0_ebase(t0
, cpu_env
);
7697 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7707 gen_helper_mftc0_debug(t0
, cpu_env
);
7710 gen_mfc0(ctx
, t0
, rt
, sel
);
7715 gen_mfc0(ctx
, t0
, rt
, sel
);
7717 } else switch (sel
) {
7718 /* GPR registers. */
7720 gen_helper_1e0i(mftgpr
, t0
, rt
);
7722 /* Auxiliary CPU registers */
7726 gen_helper_1e0i(mftlo
, t0
, 0);
7729 gen_helper_1e0i(mfthi
, t0
, 0);
7732 gen_helper_1e0i(mftacx
, t0
, 0);
7735 gen_helper_1e0i(mftlo
, t0
, 1);
7738 gen_helper_1e0i(mfthi
, t0
, 1);
7741 gen_helper_1e0i(mftacx
, t0
, 1);
7744 gen_helper_1e0i(mftlo
, t0
, 2);
7747 gen_helper_1e0i(mfthi
, t0
, 2);
7750 gen_helper_1e0i(mftacx
, t0
, 2);
7753 gen_helper_1e0i(mftlo
, t0
, 3);
7756 gen_helper_1e0i(mfthi
, t0
, 3);
7759 gen_helper_1e0i(mftacx
, t0
, 3);
7762 gen_helper_mftdsp(t0
, cpu_env
);
7768 /* Floating point (COP1). */
7770 /* XXX: For now we support only a single FPU context. */
7772 TCGv_i32 fp0
= tcg_temp_new_i32();
7774 gen_load_fpr32(ctx
, fp0
, rt
);
7775 tcg_gen_ext_i32_tl(t0
, fp0
);
7776 tcg_temp_free_i32(fp0
);
7778 TCGv_i32 fp0
= tcg_temp_new_i32();
7780 gen_load_fpr32h(ctx
, fp0
, rt
);
7781 tcg_gen_ext_i32_tl(t0
, fp0
);
7782 tcg_temp_free_i32(fp0
);
7786 /* XXX: For now we support only a single FPU context. */
7787 gen_helper_1e0i(cfc1
, t0
, rt
);
7789 /* COP2: Not implemented. */
7796 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7797 gen_store_gpr(t0
, rd
);
7803 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7804 generate_exception_end(ctx
, EXCP_RI
);
7807 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7808 int u
, int sel
, int h
)
7810 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7811 TCGv t0
= tcg_temp_local_new();
7813 gen_load_gpr(t0
, rt
);
7814 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7815 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7816 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7818 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7819 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7826 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7829 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7839 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7842 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7845 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7848 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7851 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7854 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7857 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7860 gen_mtc0(ctx
, t0
, rd
, sel
);
7867 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7870 gen_mtc0(ctx
, t0
, rd
, sel
);
7876 gen_helper_mttc0_status(cpu_env
, t0
);
7879 gen_mtc0(ctx
, t0
, rd
, sel
);
7885 gen_helper_mttc0_cause(cpu_env
, t0
);
7895 gen_helper_mttc0_ebase(cpu_env
, t0
);
7905 gen_helper_mttc0_debug(cpu_env
, t0
);
7908 gen_mtc0(ctx
, t0
, rd
, sel
);
7913 gen_mtc0(ctx
, t0
, rd
, sel
);
7915 } else switch (sel
) {
7916 /* GPR registers. */
7918 gen_helper_0e1i(mttgpr
, t0
, rd
);
7920 /* Auxiliary CPU registers */
7924 gen_helper_0e1i(mttlo
, t0
, 0);
7927 gen_helper_0e1i(mtthi
, t0
, 0);
7930 gen_helper_0e1i(mttacx
, t0
, 0);
7933 gen_helper_0e1i(mttlo
, t0
, 1);
7936 gen_helper_0e1i(mtthi
, t0
, 1);
7939 gen_helper_0e1i(mttacx
, t0
, 1);
7942 gen_helper_0e1i(mttlo
, t0
, 2);
7945 gen_helper_0e1i(mtthi
, t0
, 2);
7948 gen_helper_0e1i(mttacx
, t0
, 2);
7951 gen_helper_0e1i(mttlo
, t0
, 3);
7954 gen_helper_0e1i(mtthi
, t0
, 3);
7957 gen_helper_0e1i(mttacx
, t0
, 3);
7960 gen_helper_mttdsp(cpu_env
, t0
);
7966 /* Floating point (COP1). */
7968 /* XXX: For now we support only a single FPU context. */
7970 TCGv_i32 fp0
= tcg_temp_new_i32();
7972 tcg_gen_trunc_tl_i32(fp0
, t0
);
7973 gen_store_fpr32(ctx
, fp0
, rd
);
7974 tcg_temp_free_i32(fp0
);
7976 TCGv_i32 fp0
= tcg_temp_new_i32();
7978 tcg_gen_trunc_tl_i32(fp0
, t0
);
7979 gen_store_fpr32h(ctx
, fp0
, rd
);
7980 tcg_temp_free_i32(fp0
);
7984 /* XXX: For now we support only a single FPU context. */
7986 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7988 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7989 tcg_temp_free_i32(fs_tmp
);
7991 /* Stop translation as we may have changed hflags */
7992 ctx
->bstate
= BS_STOP
;
7994 /* COP2: Not implemented. */
8001 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8007 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8008 generate_exception_end(ctx
, EXCP_RI
);
8011 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8013 const char *opn
= "ldst";
8015 check_cp0_enabled(ctx
);
8022 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8027 TCGv t0
= tcg_temp_new();
8029 gen_load_gpr(t0
, rt
);
8030 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8035 #if defined(TARGET_MIPS64)
8037 check_insn(ctx
, ISA_MIPS3
);
8042 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8046 check_insn(ctx
, ISA_MIPS3
);
8048 TCGv t0
= tcg_temp_new();
8050 gen_load_gpr(t0
, rt
);
8051 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8063 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8069 TCGv t0
= tcg_temp_new();
8070 gen_load_gpr(t0
, rt
);
8071 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8077 check_insn(ctx
, ASE_MT
);
8082 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8083 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8087 check_insn(ctx
, ASE_MT
);
8088 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8089 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8094 if (!env
->tlb
->helper_tlbwi
)
8096 gen_helper_tlbwi(cpu_env
);
8101 if (!env
->tlb
->helper_tlbinv
) {
8104 gen_helper_tlbinv(cpu_env
);
8105 } /* treat as nop if TLBINV not supported */
8110 if (!env
->tlb
->helper_tlbinvf
) {
8113 gen_helper_tlbinvf(cpu_env
);
8114 } /* treat as nop if TLBINV not supported */
8118 if (!env
->tlb
->helper_tlbwr
)
8120 gen_helper_tlbwr(cpu_env
);
8124 if (!env
->tlb
->helper_tlbp
)
8126 gen_helper_tlbp(cpu_env
);
8130 if (!env
->tlb
->helper_tlbr
)
8132 gen_helper_tlbr(cpu_env
);
8134 case OPC_ERET
: /* OPC_ERETNC */
8135 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8136 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8139 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8140 if (ctx
->opcode
& (1 << bit_shift
)) {
8143 check_insn(ctx
, ISA_MIPS32R5
);
8144 gen_helper_eretnc(cpu_env
);
8148 check_insn(ctx
, ISA_MIPS2
);
8149 gen_helper_eret(cpu_env
);
8151 ctx
->bstate
= BS_EXCP
;
8156 check_insn(ctx
, ISA_MIPS32
);
8157 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8158 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8161 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8163 generate_exception_end(ctx
, EXCP_RI
);
8165 gen_helper_deret(cpu_env
);
8166 ctx
->bstate
= BS_EXCP
;
8171 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8172 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8173 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8176 /* If we get an exception, we want to restart at next instruction */
8178 save_cpu_state(ctx
, 1);
8180 gen_helper_wait(cpu_env
);
8181 ctx
->bstate
= BS_EXCP
;
8186 generate_exception_end(ctx
, EXCP_RI
);
8189 (void)opn
; /* avoid a compiler warning */
8191 #endif /* !CONFIG_USER_ONLY */
8193 /* CP1 Branches (before delay slot) */
8194 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8195 int32_t cc
, int32_t offset
)
8197 target_ulong btarget
;
8198 TCGv_i32 t0
= tcg_temp_new_i32();
8200 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8201 generate_exception_end(ctx
, EXCP_RI
);
8206 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8208 btarget
= ctx
->pc
+ 4 + offset
;
8212 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8213 tcg_gen_not_i32(t0
, t0
);
8214 tcg_gen_andi_i32(t0
, t0
, 1);
8215 tcg_gen_extu_i32_tl(bcond
, t0
);
8218 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8219 tcg_gen_not_i32(t0
, t0
);
8220 tcg_gen_andi_i32(t0
, t0
, 1);
8221 tcg_gen_extu_i32_tl(bcond
, t0
);
8224 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8225 tcg_gen_andi_i32(t0
, t0
, 1);
8226 tcg_gen_extu_i32_tl(bcond
, t0
);
8229 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8230 tcg_gen_andi_i32(t0
, t0
, 1);
8231 tcg_gen_extu_i32_tl(bcond
, t0
);
8233 ctx
->hflags
|= MIPS_HFLAG_BL
;
8237 TCGv_i32 t1
= tcg_temp_new_i32();
8238 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8239 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8240 tcg_gen_nand_i32(t0
, t0
, t1
);
8241 tcg_temp_free_i32(t1
);
8242 tcg_gen_andi_i32(t0
, t0
, 1);
8243 tcg_gen_extu_i32_tl(bcond
, t0
);
8248 TCGv_i32 t1
= tcg_temp_new_i32();
8249 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8250 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8251 tcg_gen_or_i32(t0
, t0
, t1
);
8252 tcg_temp_free_i32(t1
);
8253 tcg_gen_andi_i32(t0
, t0
, 1);
8254 tcg_gen_extu_i32_tl(bcond
, t0
);
8259 TCGv_i32 t1
= tcg_temp_new_i32();
8260 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8261 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8262 tcg_gen_and_i32(t0
, t0
, t1
);
8263 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8264 tcg_gen_and_i32(t0
, t0
, t1
);
8265 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8266 tcg_gen_nand_i32(t0
, t0
, t1
);
8267 tcg_temp_free_i32(t1
);
8268 tcg_gen_andi_i32(t0
, t0
, 1);
8269 tcg_gen_extu_i32_tl(bcond
, t0
);
8274 TCGv_i32 t1
= tcg_temp_new_i32();
8275 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8276 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8277 tcg_gen_or_i32(t0
, t0
, t1
);
8278 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8279 tcg_gen_or_i32(t0
, t0
, t1
);
8280 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8281 tcg_gen_or_i32(t0
, t0
, t1
);
8282 tcg_temp_free_i32(t1
);
8283 tcg_gen_andi_i32(t0
, t0
, 1);
8284 tcg_gen_extu_i32_tl(bcond
, t0
);
8287 ctx
->hflags
|= MIPS_HFLAG_BC
;
8290 MIPS_INVAL("cp1 cond branch");
8291 generate_exception_end(ctx
, EXCP_RI
);
8294 ctx
->btarget
= btarget
;
8295 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8297 tcg_temp_free_i32(t0
);
8300 /* R6 CP1 Branches */
8301 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8302 int32_t ft
, int32_t offset
,
8305 target_ulong btarget
;
8306 TCGv_i64 t0
= tcg_temp_new_i64();
8308 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8309 #ifdef MIPS_DEBUG_DISAS
8310 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8313 generate_exception_end(ctx
, EXCP_RI
);
8317 gen_load_fpr64(ctx
, t0
, ft
);
8318 tcg_gen_andi_i64(t0
, t0
, 1);
8320 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8324 tcg_gen_xori_i64(t0
, t0
, 1);
8325 ctx
->hflags
|= MIPS_HFLAG_BC
;
8328 /* t0 already set */
8329 ctx
->hflags
|= MIPS_HFLAG_BC
;
8332 MIPS_INVAL("cp1 cond branch");
8333 generate_exception_end(ctx
, EXCP_RI
);
8337 tcg_gen_trunc_i64_tl(bcond
, t0
);
8339 ctx
->btarget
= btarget
;
8341 switch (delayslot_size
) {
8343 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8346 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8351 tcg_temp_free_i64(t0
);
8354 /* Coprocessor 1 (FPU) */
8356 #define FOP(func, fmt) (((fmt) << 21) | (func))
8359 OPC_ADD_S
= FOP(0, FMT_S
),
8360 OPC_SUB_S
= FOP(1, FMT_S
),
8361 OPC_MUL_S
= FOP(2, FMT_S
),
8362 OPC_DIV_S
= FOP(3, FMT_S
),
8363 OPC_SQRT_S
= FOP(4, FMT_S
),
8364 OPC_ABS_S
= FOP(5, FMT_S
),
8365 OPC_MOV_S
= FOP(6, FMT_S
),
8366 OPC_NEG_S
= FOP(7, FMT_S
),
8367 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8368 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8369 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8370 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8371 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8372 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8373 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8374 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8375 OPC_SEL_S
= FOP(16, FMT_S
),
8376 OPC_MOVCF_S
= FOP(17, FMT_S
),
8377 OPC_MOVZ_S
= FOP(18, FMT_S
),
8378 OPC_MOVN_S
= FOP(19, FMT_S
),
8379 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8380 OPC_RECIP_S
= FOP(21, FMT_S
),
8381 OPC_RSQRT_S
= FOP(22, FMT_S
),
8382 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8383 OPC_MADDF_S
= FOP(24, FMT_S
),
8384 OPC_MSUBF_S
= FOP(25, FMT_S
),
8385 OPC_RINT_S
= FOP(26, FMT_S
),
8386 OPC_CLASS_S
= FOP(27, FMT_S
),
8387 OPC_MIN_S
= FOP(28, FMT_S
),
8388 OPC_RECIP2_S
= FOP(28, FMT_S
),
8389 OPC_MINA_S
= FOP(29, FMT_S
),
8390 OPC_RECIP1_S
= FOP(29, FMT_S
),
8391 OPC_MAX_S
= FOP(30, FMT_S
),
8392 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8393 OPC_MAXA_S
= FOP(31, FMT_S
),
8394 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8395 OPC_CVT_D_S
= FOP(33, FMT_S
),
8396 OPC_CVT_W_S
= FOP(36, FMT_S
),
8397 OPC_CVT_L_S
= FOP(37, FMT_S
),
8398 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8399 OPC_CMP_F_S
= FOP (48, FMT_S
),
8400 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8401 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8402 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8403 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8404 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8405 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8406 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8407 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8408 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8409 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8410 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8411 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8412 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8413 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8414 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8416 OPC_ADD_D
= FOP(0, FMT_D
),
8417 OPC_SUB_D
= FOP(1, FMT_D
),
8418 OPC_MUL_D
= FOP(2, FMT_D
),
8419 OPC_DIV_D
= FOP(3, FMT_D
),
8420 OPC_SQRT_D
= FOP(4, FMT_D
),
8421 OPC_ABS_D
= FOP(5, FMT_D
),
8422 OPC_MOV_D
= FOP(6, FMT_D
),
8423 OPC_NEG_D
= FOP(7, FMT_D
),
8424 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8425 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8426 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8427 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8428 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8429 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8430 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8431 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8432 OPC_SEL_D
= FOP(16, FMT_D
),
8433 OPC_MOVCF_D
= FOP(17, FMT_D
),
8434 OPC_MOVZ_D
= FOP(18, FMT_D
),
8435 OPC_MOVN_D
= FOP(19, FMT_D
),
8436 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8437 OPC_RECIP_D
= FOP(21, FMT_D
),
8438 OPC_RSQRT_D
= FOP(22, FMT_D
),
8439 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8440 OPC_MADDF_D
= FOP(24, FMT_D
),
8441 OPC_MSUBF_D
= FOP(25, FMT_D
),
8442 OPC_RINT_D
= FOP(26, FMT_D
),
8443 OPC_CLASS_D
= FOP(27, FMT_D
),
8444 OPC_MIN_D
= FOP(28, FMT_D
),
8445 OPC_RECIP2_D
= FOP(28, FMT_D
),
8446 OPC_MINA_D
= FOP(29, FMT_D
),
8447 OPC_RECIP1_D
= FOP(29, FMT_D
),
8448 OPC_MAX_D
= FOP(30, FMT_D
),
8449 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8450 OPC_MAXA_D
= FOP(31, FMT_D
),
8451 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8452 OPC_CVT_S_D
= FOP(32, FMT_D
),
8453 OPC_CVT_W_D
= FOP(36, FMT_D
),
8454 OPC_CVT_L_D
= FOP(37, FMT_D
),
8455 OPC_CMP_F_D
= FOP (48, FMT_D
),
8456 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8457 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8458 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8459 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8460 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8461 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8462 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8463 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8464 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8465 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8466 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8467 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8468 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8469 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8470 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8472 OPC_CVT_S_W
= FOP(32, FMT_W
),
8473 OPC_CVT_D_W
= FOP(33, FMT_W
),
8474 OPC_CVT_S_L
= FOP(32, FMT_L
),
8475 OPC_CVT_D_L
= FOP(33, FMT_L
),
8476 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8478 OPC_ADD_PS
= FOP(0, FMT_PS
),
8479 OPC_SUB_PS
= FOP(1, FMT_PS
),
8480 OPC_MUL_PS
= FOP(2, FMT_PS
),
8481 OPC_DIV_PS
= FOP(3, FMT_PS
),
8482 OPC_ABS_PS
= FOP(5, FMT_PS
),
8483 OPC_MOV_PS
= FOP(6, FMT_PS
),
8484 OPC_NEG_PS
= FOP(7, FMT_PS
),
8485 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8486 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8487 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8488 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8489 OPC_MULR_PS
= FOP(26, FMT_PS
),
8490 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8491 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8492 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8493 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8495 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8496 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8497 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8498 OPC_PLL_PS
= FOP(44, FMT_PS
),
8499 OPC_PLU_PS
= FOP(45, FMT_PS
),
8500 OPC_PUL_PS
= FOP(46, FMT_PS
),
8501 OPC_PUU_PS
= FOP(47, FMT_PS
),
8502 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8503 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8504 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8505 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8506 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8507 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8508 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8509 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8510 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8511 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8512 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8513 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8514 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8515 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8516 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8517 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8521 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8522 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8523 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8524 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8525 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8526 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8527 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8528 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8529 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8530 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8531 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8532 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8533 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8534 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8535 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8536 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8537 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8538 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8539 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8540 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8541 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8542 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8544 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8545 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8546 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8547 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8548 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8549 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8550 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8551 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8552 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8553 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8554 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8555 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8556 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8557 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8558 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8559 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8560 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8561 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8562 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8563 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8564 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8565 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8567 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8569 TCGv t0
= tcg_temp_new();
8574 TCGv_i32 fp0
= tcg_temp_new_i32();
8576 gen_load_fpr32(ctx
, fp0
, fs
);
8577 tcg_gen_ext_i32_tl(t0
, fp0
);
8578 tcg_temp_free_i32(fp0
);
8580 gen_store_gpr(t0
, rt
);
8583 gen_load_gpr(t0
, rt
);
8585 TCGv_i32 fp0
= tcg_temp_new_i32();
8587 tcg_gen_trunc_tl_i32(fp0
, t0
);
8588 gen_store_fpr32(ctx
, fp0
, fs
);
8589 tcg_temp_free_i32(fp0
);
8593 gen_helper_1e0i(cfc1
, t0
, fs
);
8594 gen_store_gpr(t0
, rt
);
8597 gen_load_gpr(t0
, rt
);
8598 save_cpu_state(ctx
, 0);
8600 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8602 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8603 tcg_temp_free_i32(fs_tmp
);
8605 /* Stop translation as we may have changed hflags */
8606 ctx
->bstate
= BS_STOP
;
8608 #if defined(TARGET_MIPS64)
8610 gen_load_fpr64(ctx
, t0
, fs
);
8611 gen_store_gpr(t0
, rt
);
8614 gen_load_gpr(t0
, rt
);
8615 gen_store_fpr64(ctx
, t0
, fs
);
8620 TCGv_i32 fp0
= tcg_temp_new_i32();
8622 gen_load_fpr32h(ctx
, fp0
, fs
);
8623 tcg_gen_ext_i32_tl(t0
, fp0
);
8624 tcg_temp_free_i32(fp0
);
8626 gen_store_gpr(t0
, rt
);
8629 gen_load_gpr(t0
, rt
);
8631 TCGv_i32 fp0
= tcg_temp_new_i32();
8633 tcg_gen_trunc_tl_i32(fp0
, t0
);
8634 gen_store_fpr32h(ctx
, fp0
, fs
);
8635 tcg_temp_free_i32(fp0
);
8639 MIPS_INVAL("cp1 move");
8640 generate_exception_end(ctx
, EXCP_RI
);
8648 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8664 l1
= gen_new_label();
8665 t0
= tcg_temp_new_i32();
8666 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8667 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8668 tcg_temp_free_i32(t0
);
8670 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8672 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8677 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8681 TCGv_i32 t0
= tcg_temp_new_i32();
8682 TCGLabel
*l1
= gen_new_label();
8689 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8690 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8691 gen_load_fpr32(ctx
, t0
, fs
);
8692 gen_store_fpr32(ctx
, t0
, fd
);
8694 tcg_temp_free_i32(t0
);
8697 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8700 TCGv_i32 t0
= tcg_temp_new_i32();
8702 TCGLabel
*l1
= gen_new_label();
8709 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8710 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8711 tcg_temp_free_i32(t0
);
8712 fp0
= tcg_temp_new_i64();
8713 gen_load_fpr64(ctx
, fp0
, fs
);
8714 gen_store_fpr64(ctx
, fp0
, fd
);
8715 tcg_temp_free_i64(fp0
);
8719 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8723 TCGv_i32 t0
= tcg_temp_new_i32();
8724 TCGLabel
*l1
= gen_new_label();
8725 TCGLabel
*l2
= gen_new_label();
8732 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8733 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8734 gen_load_fpr32(ctx
, t0
, fs
);
8735 gen_store_fpr32(ctx
, t0
, fd
);
8738 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8739 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8740 gen_load_fpr32h(ctx
, t0
, fs
);
8741 gen_store_fpr32h(ctx
, t0
, fd
);
8742 tcg_temp_free_i32(t0
);
8746 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8749 TCGv_i32 t1
= tcg_const_i32(0);
8750 TCGv_i32 fp0
= tcg_temp_new_i32();
8751 TCGv_i32 fp1
= tcg_temp_new_i32();
8752 TCGv_i32 fp2
= tcg_temp_new_i32();
8753 gen_load_fpr32(ctx
, fp0
, fd
);
8754 gen_load_fpr32(ctx
, fp1
, ft
);
8755 gen_load_fpr32(ctx
, fp2
, fs
);
8759 tcg_gen_andi_i32(fp0
, fp0
, 1);
8760 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8763 tcg_gen_andi_i32(fp1
, fp1
, 1);
8764 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8767 tcg_gen_andi_i32(fp1
, fp1
, 1);
8768 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8771 MIPS_INVAL("gen_sel_s");
8772 generate_exception_end(ctx
, EXCP_RI
);
8776 gen_store_fpr32(ctx
, fp0
, fd
);
8777 tcg_temp_free_i32(fp2
);
8778 tcg_temp_free_i32(fp1
);
8779 tcg_temp_free_i32(fp0
);
8780 tcg_temp_free_i32(t1
);
8783 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8786 TCGv_i64 t1
= tcg_const_i64(0);
8787 TCGv_i64 fp0
= tcg_temp_new_i64();
8788 TCGv_i64 fp1
= tcg_temp_new_i64();
8789 TCGv_i64 fp2
= tcg_temp_new_i64();
8790 gen_load_fpr64(ctx
, fp0
, fd
);
8791 gen_load_fpr64(ctx
, fp1
, ft
);
8792 gen_load_fpr64(ctx
, fp2
, fs
);
8796 tcg_gen_andi_i64(fp0
, fp0
, 1);
8797 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8800 tcg_gen_andi_i64(fp1
, fp1
, 1);
8801 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8804 tcg_gen_andi_i64(fp1
, fp1
, 1);
8805 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8808 MIPS_INVAL("gen_sel_d");
8809 generate_exception_end(ctx
, EXCP_RI
);
8813 gen_store_fpr64(ctx
, fp0
, fd
);
8814 tcg_temp_free_i64(fp2
);
8815 tcg_temp_free_i64(fp1
);
8816 tcg_temp_free_i64(fp0
);
8817 tcg_temp_free_i64(t1
);
8820 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8821 int ft
, int fs
, int fd
, int cc
)
8823 uint32_t func
= ctx
->opcode
& 0x3f;
8827 TCGv_i32 fp0
= tcg_temp_new_i32();
8828 TCGv_i32 fp1
= tcg_temp_new_i32();
8830 gen_load_fpr32(ctx
, fp0
, fs
);
8831 gen_load_fpr32(ctx
, fp1
, ft
);
8832 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8833 tcg_temp_free_i32(fp1
);
8834 gen_store_fpr32(ctx
, fp0
, fd
);
8835 tcg_temp_free_i32(fp0
);
8840 TCGv_i32 fp0
= tcg_temp_new_i32();
8841 TCGv_i32 fp1
= tcg_temp_new_i32();
8843 gen_load_fpr32(ctx
, fp0
, fs
);
8844 gen_load_fpr32(ctx
, fp1
, ft
);
8845 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8846 tcg_temp_free_i32(fp1
);
8847 gen_store_fpr32(ctx
, fp0
, fd
);
8848 tcg_temp_free_i32(fp0
);
8853 TCGv_i32 fp0
= tcg_temp_new_i32();
8854 TCGv_i32 fp1
= tcg_temp_new_i32();
8856 gen_load_fpr32(ctx
, fp0
, fs
);
8857 gen_load_fpr32(ctx
, fp1
, ft
);
8858 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8859 tcg_temp_free_i32(fp1
);
8860 gen_store_fpr32(ctx
, fp0
, fd
);
8861 tcg_temp_free_i32(fp0
);
8866 TCGv_i32 fp0
= tcg_temp_new_i32();
8867 TCGv_i32 fp1
= tcg_temp_new_i32();
8869 gen_load_fpr32(ctx
, fp0
, fs
);
8870 gen_load_fpr32(ctx
, fp1
, ft
);
8871 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8872 tcg_temp_free_i32(fp1
);
8873 gen_store_fpr32(ctx
, fp0
, fd
);
8874 tcg_temp_free_i32(fp0
);
8879 TCGv_i32 fp0
= tcg_temp_new_i32();
8881 gen_load_fpr32(ctx
, fp0
, fs
);
8882 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8883 gen_store_fpr32(ctx
, fp0
, fd
);
8884 tcg_temp_free_i32(fp0
);
8889 TCGv_i32 fp0
= tcg_temp_new_i32();
8891 gen_load_fpr32(ctx
, fp0
, fs
);
8892 gen_helper_float_abs_s(fp0
, fp0
);
8893 gen_store_fpr32(ctx
, fp0
, fd
);
8894 tcg_temp_free_i32(fp0
);
8899 TCGv_i32 fp0
= tcg_temp_new_i32();
8901 gen_load_fpr32(ctx
, fp0
, fs
);
8902 gen_store_fpr32(ctx
, fp0
, fd
);
8903 tcg_temp_free_i32(fp0
);
8908 TCGv_i32 fp0
= tcg_temp_new_i32();
8910 gen_load_fpr32(ctx
, fp0
, fs
);
8911 gen_helper_float_chs_s(fp0
, fp0
);
8912 gen_store_fpr32(ctx
, fp0
, fd
);
8913 tcg_temp_free_i32(fp0
);
8917 check_cp1_64bitmode(ctx
);
8919 TCGv_i32 fp32
= tcg_temp_new_i32();
8920 TCGv_i64 fp64
= tcg_temp_new_i64();
8922 gen_load_fpr32(ctx
, fp32
, fs
);
8923 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8924 tcg_temp_free_i32(fp32
);
8925 gen_store_fpr64(ctx
, fp64
, fd
);
8926 tcg_temp_free_i64(fp64
);
8930 check_cp1_64bitmode(ctx
);
8932 TCGv_i32 fp32
= tcg_temp_new_i32();
8933 TCGv_i64 fp64
= tcg_temp_new_i64();
8935 gen_load_fpr32(ctx
, fp32
, fs
);
8936 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8937 tcg_temp_free_i32(fp32
);
8938 gen_store_fpr64(ctx
, fp64
, fd
);
8939 tcg_temp_free_i64(fp64
);
8943 check_cp1_64bitmode(ctx
);
8945 TCGv_i32 fp32
= tcg_temp_new_i32();
8946 TCGv_i64 fp64
= tcg_temp_new_i64();
8948 gen_load_fpr32(ctx
, fp32
, fs
);
8949 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8950 tcg_temp_free_i32(fp32
);
8951 gen_store_fpr64(ctx
, fp64
, fd
);
8952 tcg_temp_free_i64(fp64
);
8956 check_cp1_64bitmode(ctx
);
8958 TCGv_i32 fp32
= tcg_temp_new_i32();
8959 TCGv_i64 fp64
= tcg_temp_new_i64();
8961 gen_load_fpr32(ctx
, fp32
, fs
);
8962 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8963 tcg_temp_free_i32(fp32
);
8964 gen_store_fpr64(ctx
, fp64
, fd
);
8965 tcg_temp_free_i64(fp64
);
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8972 gen_load_fpr32(ctx
, fp0
, fs
);
8973 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8974 gen_store_fpr32(ctx
, fp0
, fd
);
8975 tcg_temp_free_i32(fp0
);
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8982 gen_load_fpr32(ctx
, fp0
, fs
);
8983 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8984 gen_store_fpr32(ctx
, fp0
, fd
);
8985 tcg_temp_free_i32(fp0
);
8990 TCGv_i32 fp0
= tcg_temp_new_i32();
8992 gen_load_fpr32(ctx
, fp0
, fs
);
8993 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8994 gen_store_fpr32(ctx
, fp0
, fd
);
8995 tcg_temp_free_i32(fp0
);
9000 TCGv_i32 fp0
= tcg_temp_new_i32();
9002 gen_load_fpr32(ctx
, fp0
, fs
);
9003 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
9004 gen_store_fpr32(ctx
, fp0
, fd
);
9005 tcg_temp_free_i32(fp0
);
9009 check_insn(ctx
, ISA_MIPS32R6
);
9010 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9013 check_insn(ctx
, ISA_MIPS32R6
);
9014 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9017 check_insn(ctx
, ISA_MIPS32R6
);
9018 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9021 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9022 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9025 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9027 TCGLabel
*l1
= gen_new_label();
9031 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9033 fp0
= tcg_temp_new_i32();
9034 gen_load_fpr32(ctx
, fp0
, fs
);
9035 gen_store_fpr32(ctx
, fp0
, fd
);
9036 tcg_temp_free_i32(fp0
);
9041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9043 TCGLabel
*l1
= gen_new_label();
9047 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9048 fp0
= tcg_temp_new_i32();
9049 gen_load_fpr32(ctx
, fp0
, fs
);
9050 gen_store_fpr32(ctx
, fp0
, fd
);
9051 tcg_temp_free_i32(fp0
);
9058 TCGv_i32 fp0
= tcg_temp_new_i32();
9060 gen_load_fpr32(ctx
, fp0
, fs
);
9061 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9062 gen_store_fpr32(ctx
, fp0
, fd
);
9063 tcg_temp_free_i32(fp0
);
9068 TCGv_i32 fp0
= tcg_temp_new_i32();
9070 gen_load_fpr32(ctx
, fp0
, fs
);
9071 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9072 gen_store_fpr32(ctx
, fp0
, fd
);
9073 tcg_temp_free_i32(fp0
);
9077 check_insn(ctx
, ISA_MIPS32R6
);
9079 TCGv_i32 fp0
= tcg_temp_new_i32();
9080 TCGv_i32 fp1
= tcg_temp_new_i32();
9081 TCGv_i32 fp2
= tcg_temp_new_i32();
9082 gen_load_fpr32(ctx
, fp0
, fs
);
9083 gen_load_fpr32(ctx
, fp1
, ft
);
9084 gen_load_fpr32(ctx
, fp2
, fd
);
9085 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9086 gen_store_fpr32(ctx
, fp2
, fd
);
9087 tcg_temp_free_i32(fp2
);
9088 tcg_temp_free_i32(fp1
);
9089 tcg_temp_free_i32(fp0
);
9093 check_insn(ctx
, ISA_MIPS32R6
);
9095 TCGv_i32 fp0
= tcg_temp_new_i32();
9096 TCGv_i32 fp1
= tcg_temp_new_i32();
9097 TCGv_i32 fp2
= tcg_temp_new_i32();
9098 gen_load_fpr32(ctx
, fp0
, fs
);
9099 gen_load_fpr32(ctx
, fp1
, ft
);
9100 gen_load_fpr32(ctx
, fp2
, fd
);
9101 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9102 gen_store_fpr32(ctx
, fp2
, fd
);
9103 tcg_temp_free_i32(fp2
);
9104 tcg_temp_free_i32(fp1
);
9105 tcg_temp_free_i32(fp0
);
9109 check_insn(ctx
, ISA_MIPS32R6
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9112 gen_load_fpr32(ctx
, fp0
, fs
);
9113 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9114 gen_store_fpr32(ctx
, fp0
, fd
);
9115 tcg_temp_free_i32(fp0
);
9119 check_insn(ctx
, ISA_MIPS32R6
);
9121 TCGv_i32 fp0
= tcg_temp_new_i32();
9122 gen_load_fpr32(ctx
, fp0
, fs
);
9123 gen_helper_float_class_s(fp0
, fp0
);
9124 gen_store_fpr32(ctx
, fp0
, fd
);
9125 tcg_temp_free_i32(fp0
);
9128 case OPC_MIN_S
: /* OPC_RECIP2_S */
9129 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9131 TCGv_i32 fp0
= tcg_temp_new_i32();
9132 TCGv_i32 fp1
= tcg_temp_new_i32();
9133 TCGv_i32 fp2
= tcg_temp_new_i32();
9134 gen_load_fpr32(ctx
, fp0
, fs
);
9135 gen_load_fpr32(ctx
, fp1
, ft
);
9136 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9137 gen_store_fpr32(ctx
, fp2
, fd
);
9138 tcg_temp_free_i32(fp2
);
9139 tcg_temp_free_i32(fp1
);
9140 tcg_temp_free_i32(fp0
);
9143 check_cp1_64bitmode(ctx
);
9145 TCGv_i32 fp0
= tcg_temp_new_i32();
9146 TCGv_i32 fp1
= tcg_temp_new_i32();
9148 gen_load_fpr32(ctx
, fp0
, fs
);
9149 gen_load_fpr32(ctx
, fp1
, ft
);
9150 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9151 tcg_temp_free_i32(fp1
);
9152 gen_store_fpr32(ctx
, fp0
, fd
);
9153 tcg_temp_free_i32(fp0
);
9157 case OPC_MINA_S
: /* OPC_RECIP1_S */
9158 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9160 TCGv_i32 fp0
= tcg_temp_new_i32();
9161 TCGv_i32 fp1
= tcg_temp_new_i32();
9162 TCGv_i32 fp2
= tcg_temp_new_i32();
9163 gen_load_fpr32(ctx
, fp0
, fs
);
9164 gen_load_fpr32(ctx
, fp1
, ft
);
9165 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9166 gen_store_fpr32(ctx
, fp2
, fd
);
9167 tcg_temp_free_i32(fp2
);
9168 tcg_temp_free_i32(fp1
);
9169 tcg_temp_free_i32(fp0
);
9172 check_cp1_64bitmode(ctx
);
9174 TCGv_i32 fp0
= tcg_temp_new_i32();
9176 gen_load_fpr32(ctx
, fp0
, fs
);
9177 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9178 gen_store_fpr32(ctx
, fp0
, fd
);
9179 tcg_temp_free_i32(fp0
);
9183 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9184 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9186 TCGv_i32 fp0
= tcg_temp_new_i32();
9187 TCGv_i32 fp1
= tcg_temp_new_i32();
9188 gen_load_fpr32(ctx
, fp0
, fs
);
9189 gen_load_fpr32(ctx
, fp1
, ft
);
9190 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9191 gen_store_fpr32(ctx
, fp1
, fd
);
9192 tcg_temp_free_i32(fp1
);
9193 tcg_temp_free_i32(fp0
);
9196 check_cp1_64bitmode(ctx
);
9198 TCGv_i32 fp0
= tcg_temp_new_i32();
9200 gen_load_fpr32(ctx
, fp0
, fs
);
9201 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9202 gen_store_fpr32(ctx
, fp0
, fd
);
9203 tcg_temp_free_i32(fp0
);
9207 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9208 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9210 TCGv_i32 fp0
= tcg_temp_new_i32();
9211 TCGv_i32 fp1
= tcg_temp_new_i32();
9212 gen_load_fpr32(ctx
, fp0
, fs
);
9213 gen_load_fpr32(ctx
, fp1
, ft
);
9214 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9215 gen_store_fpr32(ctx
, fp1
, fd
);
9216 tcg_temp_free_i32(fp1
);
9217 tcg_temp_free_i32(fp0
);
9220 check_cp1_64bitmode(ctx
);
9222 TCGv_i32 fp0
= tcg_temp_new_i32();
9223 TCGv_i32 fp1
= tcg_temp_new_i32();
9225 gen_load_fpr32(ctx
, fp0
, fs
);
9226 gen_load_fpr32(ctx
, fp1
, ft
);
9227 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9228 tcg_temp_free_i32(fp1
);
9229 gen_store_fpr32(ctx
, fp0
, fd
);
9230 tcg_temp_free_i32(fp0
);
9235 check_cp1_registers(ctx
, fd
);
9237 TCGv_i32 fp32
= tcg_temp_new_i32();
9238 TCGv_i64 fp64
= tcg_temp_new_i64();
9240 gen_load_fpr32(ctx
, fp32
, fs
);
9241 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9242 tcg_temp_free_i32(fp32
);
9243 gen_store_fpr64(ctx
, fp64
, fd
);
9244 tcg_temp_free_i64(fp64
);
9249 TCGv_i32 fp0
= tcg_temp_new_i32();
9251 gen_load_fpr32(ctx
, fp0
, fs
);
9252 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9253 gen_store_fpr32(ctx
, fp0
, fd
);
9254 tcg_temp_free_i32(fp0
);
9258 check_cp1_64bitmode(ctx
);
9260 TCGv_i32 fp32
= tcg_temp_new_i32();
9261 TCGv_i64 fp64
= tcg_temp_new_i64();
9263 gen_load_fpr32(ctx
, fp32
, fs
);
9264 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9265 tcg_temp_free_i32(fp32
);
9266 gen_store_fpr64(ctx
, fp64
, fd
);
9267 tcg_temp_free_i64(fp64
);
9273 TCGv_i64 fp64
= tcg_temp_new_i64();
9274 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9275 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9277 gen_load_fpr32(ctx
, fp32_0
, fs
);
9278 gen_load_fpr32(ctx
, fp32_1
, ft
);
9279 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9280 tcg_temp_free_i32(fp32_1
);
9281 tcg_temp_free_i32(fp32_0
);
9282 gen_store_fpr64(ctx
, fp64
, fd
);
9283 tcg_temp_free_i64(fp64
);
9295 case OPC_CMP_NGLE_S
:
9302 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9303 if (ctx
->opcode
& (1 << 6)) {
9304 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9306 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9310 check_cp1_registers(ctx
, fs
| ft
| fd
);
9312 TCGv_i64 fp0
= tcg_temp_new_i64();
9313 TCGv_i64 fp1
= tcg_temp_new_i64();
9315 gen_load_fpr64(ctx
, fp0
, fs
);
9316 gen_load_fpr64(ctx
, fp1
, ft
);
9317 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9318 tcg_temp_free_i64(fp1
);
9319 gen_store_fpr64(ctx
, fp0
, fd
);
9320 tcg_temp_free_i64(fp0
);
9324 check_cp1_registers(ctx
, fs
| ft
| fd
);
9326 TCGv_i64 fp0
= tcg_temp_new_i64();
9327 TCGv_i64 fp1
= tcg_temp_new_i64();
9329 gen_load_fpr64(ctx
, fp0
, fs
);
9330 gen_load_fpr64(ctx
, fp1
, ft
);
9331 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9332 tcg_temp_free_i64(fp1
);
9333 gen_store_fpr64(ctx
, fp0
, fd
);
9334 tcg_temp_free_i64(fp0
);
9338 check_cp1_registers(ctx
, fs
| ft
| fd
);
9340 TCGv_i64 fp0
= tcg_temp_new_i64();
9341 TCGv_i64 fp1
= tcg_temp_new_i64();
9343 gen_load_fpr64(ctx
, fp0
, fs
);
9344 gen_load_fpr64(ctx
, fp1
, ft
);
9345 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9346 tcg_temp_free_i64(fp1
);
9347 gen_store_fpr64(ctx
, fp0
, fd
);
9348 tcg_temp_free_i64(fp0
);
9352 check_cp1_registers(ctx
, fs
| ft
| fd
);
9354 TCGv_i64 fp0
= tcg_temp_new_i64();
9355 TCGv_i64 fp1
= tcg_temp_new_i64();
9357 gen_load_fpr64(ctx
, fp0
, fs
);
9358 gen_load_fpr64(ctx
, fp1
, ft
);
9359 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9360 tcg_temp_free_i64(fp1
);
9361 gen_store_fpr64(ctx
, fp0
, fd
);
9362 tcg_temp_free_i64(fp0
);
9366 check_cp1_registers(ctx
, fs
| fd
);
9368 TCGv_i64 fp0
= tcg_temp_new_i64();
9370 gen_load_fpr64(ctx
, fp0
, fs
);
9371 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9372 gen_store_fpr64(ctx
, fp0
, fd
);
9373 tcg_temp_free_i64(fp0
);
9377 check_cp1_registers(ctx
, fs
| fd
);
9379 TCGv_i64 fp0
= tcg_temp_new_i64();
9381 gen_load_fpr64(ctx
, fp0
, fs
);
9382 gen_helper_float_abs_d(fp0
, fp0
);
9383 gen_store_fpr64(ctx
, fp0
, fd
);
9384 tcg_temp_free_i64(fp0
);
9388 check_cp1_registers(ctx
, fs
| fd
);
9390 TCGv_i64 fp0
= tcg_temp_new_i64();
9392 gen_load_fpr64(ctx
, fp0
, fs
);
9393 gen_store_fpr64(ctx
, fp0
, fd
);
9394 tcg_temp_free_i64(fp0
);
9398 check_cp1_registers(ctx
, fs
| fd
);
9400 TCGv_i64 fp0
= tcg_temp_new_i64();
9402 gen_load_fpr64(ctx
, fp0
, fs
);
9403 gen_helper_float_chs_d(fp0
, fp0
);
9404 gen_store_fpr64(ctx
, fp0
, fd
);
9405 tcg_temp_free_i64(fp0
);
9409 check_cp1_64bitmode(ctx
);
9411 TCGv_i64 fp0
= tcg_temp_new_i64();
9413 gen_load_fpr64(ctx
, fp0
, fs
);
9414 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9415 gen_store_fpr64(ctx
, fp0
, fd
);
9416 tcg_temp_free_i64(fp0
);
9420 check_cp1_64bitmode(ctx
);
9422 TCGv_i64 fp0
= tcg_temp_new_i64();
9424 gen_load_fpr64(ctx
, fp0
, fs
);
9425 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9426 gen_store_fpr64(ctx
, fp0
, fd
);
9427 tcg_temp_free_i64(fp0
);
9431 check_cp1_64bitmode(ctx
);
9433 TCGv_i64 fp0
= tcg_temp_new_i64();
9435 gen_load_fpr64(ctx
, fp0
, fs
);
9436 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9437 gen_store_fpr64(ctx
, fp0
, fd
);
9438 tcg_temp_free_i64(fp0
);
9442 check_cp1_64bitmode(ctx
);
9444 TCGv_i64 fp0
= tcg_temp_new_i64();
9446 gen_load_fpr64(ctx
, fp0
, fs
);
9447 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9448 gen_store_fpr64(ctx
, fp0
, fd
);
9449 tcg_temp_free_i64(fp0
);
9453 check_cp1_registers(ctx
, fs
);
9455 TCGv_i32 fp32
= tcg_temp_new_i32();
9456 TCGv_i64 fp64
= tcg_temp_new_i64();
9458 gen_load_fpr64(ctx
, fp64
, fs
);
9459 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9460 tcg_temp_free_i64(fp64
);
9461 gen_store_fpr32(ctx
, fp32
, fd
);
9462 tcg_temp_free_i32(fp32
);
9466 check_cp1_registers(ctx
, fs
);
9468 TCGv_i32 fp32
= tcg_temp_new_i32();
9469 TCGv_i64 fp64
= tcg_temp_new_i64();
9471 gen_load_fpr64(ctx
, fp64
, fs
);
9472 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9473 tcg_temp_free_i64(fp64
);
9474 gen_store_fpr32(ctx
, fp32
, fd
);
9475 tcg_temp_free_i32(fp32
);
9479 check_cp1_registers(ctx
, fs
);
9481 TCGv_i32 fp32
= tcg_temp_new_i32();
9482 TCGv_i64 fp64
= tcg_temp_new_i64();
9484 gen_load_fpr64(ctx
, fp64
, fs
);
9485 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9486 tcg_temp_free_i64(fp64
);
9487 gen_store_fpr32(ctx
, fp32
, fd
);
9488 tcg_temp_free_i32(fp32
);
9492 check_cp1_registers(ctx
, fs
);
9494 TCGv_i32 fp32
= tcg_temp_new_i32();
9495 TCGv_i64 fp64
= tcg_temp_new_i64();
9497 gen_load_fpr64(ctx
, fp64
, fs
);
9498 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9499 tcg_temp_free_i64(fp64
);
9500 gen_store_fpr32(ctx
, fp32
, fd
);
9501 tcg_temp_free_i32(fp32
);
9505 check_insn(ctx
, ISA_MIPS32R6
);
9506 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9509 check_insn(ctx
, ISA_MIPS32R6
);
9510 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9513 check_insn(ctx
, ISA_MIPS32R6
);
9514 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9517 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9518 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9521 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9523 TCGLabel
*l1
= gen_new_label();
9527 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9529 fp0
= tcg_temp_new_i64();
9530 gen_load_fpr64(ctx
, fp0
, fs
);
9531 gen_store_fpr64(ctx
, fp0
, fd
);
9532 tcg_temp_free_i64(fp0
);
9537 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9539 TCGLabel
*l1
= gen_new_label();
9543 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9544 fp0
= tcg_temp_new_i64();
9545 gen_load_fpr64(ctx
, fp0
, fs
);
9546 gen_store_fpr64(ctx
, fp0
, fd
);
9547 tcg_temp_free_i64(fp0
);
9553 check_cp1_registers(ctx
, fs
| fd
);
9555 TCGv_i64 fp0
= tcg_temp_new_i64();
9557 gen_load_fpr64(ctx
, fp0
, fs
);
9558 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9559 gen_store_fpr64(ctx
, fp0
, fd
);
9560 tcg_temp_free_i64(fp0
);
9564 check_cp1_registers(ctx
, fs
| fd
);
9566 TCGv_i64 fp0
= tcg_temp_new_i64();
9568 gen_load_fpr64(ctx
, fp0
, fs
);
9569 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9570 gen_store_fpr64(ctx
, fp0
, fd
);
9571 tcg_temp_free_i64(fp0
);
9575 check_insn(ctx
, ISA_MIPS32R6
);
9577 TCGv_i64 fp0
= tcg_temp_new_i64();
9578 TCGv_i64 fp1
= tcg_temp_new_i64();
9579 TCGv_i64 fp2
= tcg_temp_new_i64();
9580 gen_load_fpr64(ctx
, fp0
, fs
);
9581 gen_load_fpr64(ctx
, fp1
, ft
);
9582 gen_load_fpr64(ctx
, fp2
, fd
);
9583 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9584 gen_store_fpr64(ctx
, fp2
, fd
);
9585 tcg_temp_free_i64(fp2
);
9586 tcg_temp_free_i64(fp1
);
9587 tcg_temp_free_i64(fp0
);
9591 check_insn(ctx
, ISA_MIPS32R6
);
9593 TCGv_i64 fp0
= tcg_temp_new_i64();
9594 TCGv_i64 fp1
= tcg_temp_new_i64();
9595 TCGv_i64 fp2
= tcg_temp_new_i64();
9596 gen_load_fpr64(ctx
, fp0
, fs
);
9597 gen_load_fpr64(ctx
, fp1
, ft
);
9598 gen_load_fpr64(ctx
, fp2
, fd
);
9599 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9600 gen_store_fpr64(ctx
, fp2
, fd
);
9601 tcg_temp_free_i64(fp2
);
9602 tcg_temp_free_i64(fp1
);
9603 tcg_temp_free_i64(fp0
);
9607 check_insn(ctx
, ISA_MIPS32R6
);
9609 TCGv_i64 fp0
= tcg_temp_new_i64();
9610 gen_load_fpr64(ctx
, fp0
, fs
);
9611 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9612 gen_store_fpr64(ctx
, fp0
, fd
);
9613 tcg_temp_free_i64(fp0
);
9617 check_insn(ctx
, ISA_MIPS32R6
);
9619 TCGv_i64 fp0
= tcg_temp_new_i64();
9620 gen_load_fpr64(ctx
, fp0
, fs
);
9621 gen_helper_float_class_d(fp0
, fp0
);
9622 gen_store_fpr64(ctx
, fp0
, fd
);
9623 tcg_temp_free_i64(fp0
);
9626 case OPC_MIN_D
: /* OPC_RECIP2_D */
9627 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9629 TCGv_i64 fp0
= tcg_temp_new_i64();
9630 TCGv_i64 fp1
= tcg_temp_new_i64();
9631 gen_load_fpr64(ctx
, fp0
, fs
);
9632 gen_load_fpr64(ctx
, fp1
, ft
);
9633 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9634 gen_store_fpr64(ctx
, fp1
, fd
);
9635 tcg_temp_free_i64(fp1
);
9636 tcg_temp_free_i64(fp0
);
9639 check_cp1_64bitmode(ctx
);
9641 TCGv_i64 fp0
= tcg_temp_new_i64();
9642 TCGv_i64 fp1
= tcg_temp_new_i64();
9644 gen_load_fpr64(ctx
, fp0
, fs
);
9645 gen_load_fpr64(ctx
, fp1
, ft
);
9646 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9647 tcg_temp_free_i64(fp1
);
9648 gen_store_fpr64(ctx
, fp0
, fd
);
9649 tcg_temp_free_i64(fp0
);
9653 case OPC_MINA_D
: /* OPC_RECIP1_D */
9654 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9656 TCGv_i64 fp0
= tcg_temp_new_i64();
9657 TCGv_i64 fp1
= tcg_temp_new_i64();
9658 gen_load_fpr64(ctx
, fp0
, fs
);
9659 gen_load_fpr64(ctx
, fp1
, ft
);
9660 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9661 gen_store_fpr64(ctx
, fp1
, fd
);
9662 tcg_temp_free_i64(fp1
);
9663 tcg_temp_free_i64(fp0
);
9666 check_cp1_64bitmode(ctx
);
9668 TCGv_i64 fp0
= tcg_temp_new_i64();
9670 gen_load_fpr64(ctx
, fp0
, fs
);
9671 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9672 gen_store_fpr64(ctx
, fp0
, fd
);
9673 tcg_temp_free_i64(fp0
);
9677 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9678 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9680 TCGv_i64 fp0
= tcg_temp_new_i64();
9681 TCGv_i64 fp1
= tcg_temp_new_i64();
9682 gen_load_fpr64(ctx
, fp0
, fs
);
9683 gen_load_fpr64(ctx
, fp1
, ft
);
9684 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9685 gen_store_fpr64(ctx
, fp1
, fd
);
9686 tcg_temp_free_i64(fp1
);
9687 tcg_temp_free_i64(fp0
);
9690 check_cp1_64bitmode(ctx
);
9692 TCGv_i64 fp0
= tcg_temp_new_i64();
9694 gen_load_fpr64(ctx
, fp0
, fs
);
9695 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9696 gen_store_fpr64(ctx
, fp0
, fd
);
9697 tcg_temp_free_i64(fp0
);
9701 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9702 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9704 TCGv_i64 fp0
= tcg_temp_new_i64();
9705 TCGv_i64 fp1
= tcg_temp_new_i64();
9706 gen_load_fpr64(ctx
, fp0
, fs
);
9707 gen_load_fpr64(ctx
, fp1
, ft
);
9708 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9709 gen_store_fpr64(ctx
, fp1
, fd
);
9710 tcg_temp_free_i64(fp1
);
9711 tcg_temp_free_i64(fp0
);
9714 check_cp1_64bitmode(ctx
);
9716 TCGv_i64 fp0
= tcg_temp_new_i64();
9717 TCGv_i64 fp1
= tcg_temp_new_i64();
9719 gen_load_fpr64(ctx
, fp0
, fs
);
9720 gen_load_fpr64(ctx
, fp1
, ft
);
9721 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9722 tcg_temp_free_i64(fp1
);
9723 gen_store_fpr64(ctx
, fp0
, fd
);
9724 tcg_temp_free_i64(fp0
);
9737 case OPC_CMP_NGLE_D
:
9744 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9745 if (ctx
->opcode
& (1 << 6)) {
9746 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9748 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9752 check_cp1_registers(ctx
, fs
);
9754 TCGv_i32 fp32
= tcg_temp_new_i32();
9755 TCGv_i64 fp64
= tcg_temp_new_i64();
9757 gen_load_fpr64(ctx
, fp64
, fs
);
9758 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9759 tcg_temp_free_i64(fp64
);
9760 gen_store_fpr32(ctx
, fp32
, fd
);
9761 tcg_temp_free_i32(fp32
);
9765 check_cp1_registers(ctx
, fs
);
9767 TCGv_i32 fp32
= tcg_temp_new_i32();
9768 TCGv_i64 fp64
= tcg_temp_new_i64();
9770 gen_load_fpr64(ctx
, fp64
, fs
);
9771 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9772 tcg_temp_free_i64(fp64
);
9773 gen_store_fpr32(ctx
, fp32
, fd
);
9774 tcg_temp_free_i32(fp32
);
9778 check_cp1_64bitmode(ctx
);
9780 TCGv_i64 fp0
= tcg_temp_new_i64();
9782 gen_load_fpr64(ctx
, fp0
, fs
);
9783 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9784 gen_store_fpr64(ctx
, fp0
, fd
);
9785 tcg_temp_free_i64(fp0
);
9790 TCGv_i32 fp0
= tcg_temp_new_i32();
9792 gen_load_fpr32(ctx
, fp0
, fs
);
9793 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9794 gen_store_fpr32(ctx
, fp0
, fd
);
9795 tcg_temp_free_i32(fp0
);
9799 check_cp1_registers(ctx
, fd
);
9801 TCGv_i32 fp32
= tcg_temp_new_i32();
9802 TCGv_i64 fp64
= tcg_temp_new_i64();
9804 gen_load_fpr32(ctx
, fp32
, fs
);
9805 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9806 tcg_temp_free_i32(fp32
);
9807 gen_store_fpr64(ctx
, fp64
, fd
);
9808 tcg_temp_free_i64(fp64
);
9812 check_cp1_64bitmode(ctx
);
9814 TCGv_i32 fp32
= tcg_temp_new_i32();
9815 TCGv_i64 fp64
= tcg_temp_new_i64();
9817 gen_load_fpr64(ctx
, fp64
, fs
);
9818 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9819 tcg_temp_free_i64(fp64
);
9820 gen_store_fpr32(ctx
, fp32
, fd
);
9821 tcg_temp_free_i32(fp32
);
9825 check_cp1_64bitmode(ctx
);
9827 TCGv_i64 fp0
= tcg_temp_new_i64();
9829 gen_load_fpr64(ctx
, fp0
, fs
);
9830 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9831 gen_store_fpr64(ctx
, fp0
, fd
);
9832 tcg_temp_free_i64(fp0
);
9838 TCGv_i64 fp0
= tcg_temp_new_i64();
9840 gen_load_fpr64(ctx
, fp0
, fs
);
9841 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9842 gen_store_fpr64(ctx
, fp0
, fd
);
9843 tcg_temp_free_i64(fp0
);
9849 TCGv_i64 fp0
= tcg_temp_new_i64();
9850 TCGv_i64 fp1
= tcg_temp_new_i64();
9852 gen_load_fpr64(ctx
, fp0
, fs
);
9853 gen_load_fpr64(ctx
, fp1
, ft
);
9854 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9855 tcg_temp_free_i64(fp1
);
9856 gen_store_fpr64(ctx
, fp0
, fd
);
9857 tcg_temp_free_i64(fp0
);
9863 TCGv_i64 fp0
= tcg_temp_new_i64();
9864 TCGv_i64 fp1
= tcg_temp_new_i64();
9866 gen_load_fpr64(ctx
, fp0
, fs
);
9867 gen_load_fpr64(ctx
, fp1
, ft
);
9868 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9869 tcg_temp_free_i64(fp1
);
9870 gen_store_fpr64(ctx
, fp0
, fd
);
9871 tcg_temp_free_i64(fp0
);
9877 TCGv_i64 fp0
= tcg_temp_new_i64();
9878 TCGv_i64 fp1
= tcg_temp_new_i64();
9880 gen_load_fpr64(ctx
, fp0
, fs
);
9881 gen_load_fpr64(ctx
, fp1
, ft
);
9882 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9883 tcg_temp_free_i64(fp1
);
9884 gen_store_fpr64(ctx
, fp0
, fd
);
9885 tcg_temp_free_i64(fp0
);
9891 TCGv_i64 fp0
= tcg_temp_new_i64();
9893 gen_load_fpr64(ctx
, fp0
, fs
);
9894 gen_helper_float_abs_ps(fp0
, fp0
);
9895 gen_store_fpr64(ctx
, fp0
, fd
);
9896 tcg_temp_free_i64(fp0
);
9902 TCGv_i64 fp0
= tcg_temp_new_i64();
9904 gen_load_fpr64(ctx
, fp0
, fs
);
9905 gen_store_fpr64(ctx
, fp0
, fd
);
9906 tcg_temp_free_i64(fp0
);
9912 TCGv_i64 fp0
= tcg_temp_new_i64();
9914 gen_load_fpr64(ctx
, fp0
, fs
);
9915 gen_helper_float_chs_ps(fp0
, fp0
);
9916 gen_store_fpr64(ctx
, fp0
, fd
);
9917 tcg_temp_free_i64(fp0
);
9922 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9927 TCGLabel
*l1
= gen_new_label();
9931 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9932 fp0
= tcg_temp_new_i64();
9933 gen_load_fpr64(ctx
, fp0
, fs
);
9934 gen_store_fpr64(ctx
, fp0
, fd
);
9935 tcg_temp_free_i64(fp0
);
9942 TCGLabel
*l1
= gen_new_label();
9946 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9947 fp0
= tcg_temp_new_i64();
9948 gen_load_fpr64(ctx
, fp0
, fs
);
9949 gen_store_fpr64(ctx
, fp0
, fd
);
9950 tcg_temp_free_i64(fp0
);
9958 TCGv_i64 fp0
= tcg_temp_new_i64();
9959 TCGv_i64 fp1
= tcg_temp_new_i64();
9961 gen_load_fpr64(ctx
, fp0
, ft
);
9962 gen_load_fpr64(ctx
, fp1
, fs
);
9963 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9964 tcg_temp_free_i64(fp1
);
9965 gen_store_fpr64(ctx
, fp0
, fd
);
9966 tcg_temp_free_i64(fp0
);
9972 TCGv_i64 fp0
= tcg_temp_new_i64();
9973 TCGv_i64 fp1
= tcg_temp_new_i64();
9975 gen_load_fpr64(ctx
, fp0
, ft
);
9976 gen_load_fpr64(ctx
, fp1
, fs
);
9977 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9978 tcg_temp_free_i64(fp1
);
9979 gen_store_fpr64(ctx
, fp0
, fd
);
9980 tcg_temp_free_i64(fp0
);
9986 TCGv_i64 fp0
= tcg_temp_new_i64();
9987 TCGv_i64 fp1
= tcg_temp_new_i64();
9989 gen_load_fpr64(ctx
, fp0
, fs
);
9990 gen_load_fpr64(ctx
, fp1
, ft
);
9991 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9992 tcg_temp_free_i64(fp1
);
9993 gen_store_fpr64(ctx
, fp0
, fd
);
9994 tcg_temp_free_i64(fp0
);
10000 TCGv_i64 fp0
= tcg_temp_new_i64();
10002 gen_load_fpr64(ctx
, fp0
, fs
);
10003 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10004 gen_store_fpr64(ctx
, fp0
, fd
);
10005 tcg_temp_free_i64(fp0
);
10008 case OPC_RSQRT1_PS
:
10011 TCGv_i64 fp0
= tcg_temp_new_i64();
10013 gen_load_fpr64(ctx
, fp0
, fs
);
10014 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10015 gen_store_fpr64(ctx
, fp0
, fd
);
10016 tcg_temp_free_i64(fp0
);
10019 case OPC_RSQRT2_PS
:
10022 TCGv_i64 fp0
= tcg_temp_new_i64();
10023 TCGv_i64 fp1
= tcg_temp_new_i64();
10025 gen_load_fpr64(ctx
, fp0
, fs
);
10026 gen_load_fpr64(ctx
, fp1
, ft
);
10027 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10028 tcg_temp_free_i64(fp1
);
10029 gen_store_fpr64(ctx
, fp0
, fd
);
10030 tcg_temp_free_i64(fp0
);
10034 check_cp1_64bitmode(ctx
);
10036 TCGv_i32 fp0
= tcg_temp_new_i32();
10038 gen_load_fpr32h(ctx
, fp0
, fs
);
10039 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10040 gen_store_fpr32(ctx
, fp0
, fd
);
10041 tcg_temp_free_i32(fp0
);
10044 case OPC_CVT_PW_PS
:
10047 TCGv_i64 fp0
= tcg_temp_new_i64();
10049 gen_load_fpr64(ctx
, fp0
, fs
);
10050 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10051 gen_store_fpr64(ctx
, fp0
, fd
);
10052 tcg_temp_free_i64(fp0
);
10056 check_cp1_64bitmode(ctx
);
10058 TCGv_i32 fp0
= tcg_temp_new_i32();
10060 gen_load_fpr32(ctx
, fp0
, fs
);
10061 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10062 gen_store_fpr32(ctx
, fp0
, fd
);
10063 tcg_temp_free_i32(fp0
);
10069 TCGv_i32 fp0
= tcg_temp_new_i32();
10070 TCGv_i32 fp1
= tcg_temp_new_i32();
10072 gen_load_fpr32(ctx
, fp0
, fs
);
10073 gen_load_fpr32(ctx
, fp1
, ft
);
10074 gen_store_fpr32h(ctx
, fp0
, fd
);
10075 gen_store_fpr32(ctx
, fp1
, fd
);
10076 tcg_temp_free_i32(fp0
);
10077 tcg_temp_free_i32(fp1
);
10083 TCGv_i32 fp0
= tcg_temp_new_i32();
10084 TCGv_i32 fp1
= tcg_temp_new_i32();
10086 gen_load_fpr32(ctx
, fp0
, fs
);
10087 gen_load_fpr32h(ctx
, fp1
, ft
);
10088 gen_store_fpr32(ctx
, fp1
, fd
);
10089 gen_store_fpr32h(ctx
, fp0
, fd
);
10090 tcg_temp_free_i32(fp0
);
10091 tcg_temp_free_i32(fp1
);
10097 TCGv_i32 fp0
= tcg_temp_new_i32();
10098 TCGv_i32 fp1
= tcg_temp_new_i32();
10100 gen_load_fpr32h(ctx
, fp0
, fs
);
10101 gen_load_fpr32(ctx
, fp1
, ft
);
10102 gen_store_fpr32(ctx
, fp1
, fd
);
10103 gen_store_fpr32h(ctx
, fp0
, fd
);
10104 tcg_temp_free_i32(fp0
);
10105 tcg_temp_free_i32(fp1
);
10111 TCGv_i32 fp0
= tcg_temp_new_i32();
10112 TCGv_i32 fp1
= tcg_temp_new_i32();
10114 gen_load_fpr32h(ctx
, fp0
, fs
);
10115 gen_load_fpr32h(ctx
, fp1
, ft
);
10116 gen_store_fpr32(ctx
, fp1
, fd
);
10117 gen_store_fpr32h(ctx
, fp0
, fd
);
10118 tcg_temp_free_i32(fp0
);
10119 tcg_temp_free_i32(fp1
);
10123 case OPC_CMP_UN_PS
:
10124 case OPC_CMP_EQ_PS
:
10125 case OPC_CMP_UEQ_PS
:
10126 case OPC_CMP_OLT_PS
:
10127 case OPC_CMP_ULT_PS
:
10128 case OPC_CMP_OLE_PS
:
10129 case OPC_CMP_ULE_PS
:
10130 case OPC_CMP_SF_PS
:
10131 case OPC_CMP_NGLE_PS
:
10132 case OPC_CMP_SEQ_PS
:
10133 case OPC_CMP_NGL_PS
:
10134 case OPC_CMP_LT_PS
:
10135 case OPC_CMP_NGE_PS
:
10136 case OPC_CMP_LE_PS
:
10137 case OPC_CMP_NGT_PS
:
10138 if (ctx
->opcode
& (1 << 6)) {
10139 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10141 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10145 MIPS_INVAL("farith");
10146 generate_exception_end(ctx
, EXCP_RI
);
10151 /* Coprocessor 3 (FPU) */
10152 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10153 int fd
, int fs
, int base
, int index
)
10155 TCGv t0
= tcg_temp_new();
10158 gen_load_gpr(t0
, index
);
10159 } else if (index
== 0) {
10160 gen_load_gpr(t0
, base
);
10162 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10164 /* Don't do NOP if destination is zero: we must perform the actual
10170 TCGv_i32 fp0
= tcg_temp_new_i32();
10172 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10173 tcg_gen_trunc_tl_i32(fp0
, t0
);
10174 gen_store_fpr32(ctx
, fp0
, fd
);
10175 tcg_temp_free_i32(fp0
);
10180 check_cp1_registers(ctx
, fd
);
10182 TCGv_i64 fp0
= tcg_temp_new_i64();
10183 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10184 gen_store_fpr64(ctx
, fp0
, fd
);
10185 tcg_temp_free_i64(fp0
);
10189 check_cp1_64bitmode(ctx
);
10190 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10192 TCGv_i64 fp0
= tcg_temp_new_i64();
10194 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10195 gen_store_fpr64(ctx
, fp0
, fd
);
10196 tcg_temp_free_i64(fp0
);
10202 TCGv_i32 fp0
= tcg_temp_new_i32();
10203 gen_load_fpr32(ctx
, fp0
, fs
);
10204 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10205 tcg_temp_free_i32(fp0
);
10210 check_cp1_registers(ctx
, fs
);
10212 TCGv_i64 fp0
= tcg_temp_new_i64();
10213 gen_load_fpr64(ctx
, fp0
, fs
);
10214 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10215 tcg_temp_free_i64(fp0
);
10219 check_cp1_64bitmode(ctx
);
10220 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10222 TCGv_i64 fp0
= tcg_temp_new_i64();
10223 gen_load_fpr64(ctx
, fp0
, fs
);
10224 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10225 tcg_temp_free_i64(fp0
);
10232 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10233 int fd
, int fr
, int fs
, int ft
)
10239 TCGv t0
= tcg_temp_local_new();
10240 TCGv_i32 fp
= tcg_temp_new_i32();
10241 TCGv_i32 fph
= tcg_temp_new_i32();
10242 TCGLabel
*l1
= gen_new_label();
10243 TCGLabel
*l2
= gen_new_label();
10245 gen_load_gpr(t0
, fr
);
10246 tcg_gen_andi_tl(t0
, t0
, 0x7);
10248 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10249 gen_load_fpr32(ctx
, fp
, fs
);
10250 gen_load_fpr32h(ctx
, fph
, fs
);
10251 gen_store_fpr32(ctx
, fp
, fd
);
10252 gen_store_fpr32h(ctx
, fph
, fd
);
10255 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10257 #ifdef TARGET_WORDS_BIGENDIAN
10258 gen_load_fpr32(ctx
, fp
, fs
);
10259 gen_load_fpr32h(ctx
, fph
, ft
);
10260 gen_store_fpr32h(ctx
, fp
, fd
);
10261 gen_store_fpr32(ctx
, fph
, fd
);
10263 gen_load_fpr32h(ctx
, fph
, fs
);
10264 gen_load_fpr32(ctx
, fp
, ft
);
10265 gen_store_fpr32(ctx
, fph
, fd
);
10266 gen_store_fpr32h(ctx
, fp
, fd
);
10269 tcg_temp_free_i32(fp
);
10270 tcg_temp_free_i32(fph
);
10276 TCGv_i32 fp0
= tcg_temp_new_i32();
10277 TCGv_i32 fp1
= tcg_temp_new_i32();
10278 TCGv_i32 fp2
= tcg_temp_new_i32();
10280 gen_load_fpr32(ctx
, fp0
, fs
);
10281 gen_load_fpr32(ctx
, fp1
, ft
);
10282 gen_load_fpr32(ctx
, fp2
, fr
);
10283 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10284 tcg_temp_free_i32(fp0
);
10285 tcg_temp_free_i32(fp1
);
10286 gen_store_fpr32(ctx
, fp2
, fd
);
10287 tcg_temp_free_i32(fp2
);
10292 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10294 TCGv_i64 fp0
= tcg_temp_new_i64();
10295 TCGv_i64 fp1
= tcg_temp_new_i64();
10296 TCGv_i64 fp2
= tcg_temp_new_i64();
10298 gen_load_fpr64(ctx
, fp0
, fs
);
10299 gen_load_fpr64(ctx
, fp1
, ft
);
10300 gen_load_fpr64(ctx
, fp2
, fr
);
10301 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10302 tcg_temp_free_i64(fp0
);
10303 tcg_temp_free_i64(fp1
);
10304 gen_store_fpr64(ctx
, fp2
, fd
);
10305 tcg_temp_free_i64(fp2
);
10311 TCGv_i64 fp0
= tcg_temp_new_i64();
10312 TCGv_i64 fp1
= tcg_temp_new_i64();
10313 TCGv_i64 fp2
= tcg_temp_new_i64();
10315 gen_load_fpr64(ctx
, fp0
, fs
);
10316 gen_load_fpr64(ctx
, fp1
, ft
);
10317 gen_load_fpr64(ctx
, fp2
, fr
);
10318 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10319 tcg_temp_free_i64(fp0
);
10320 tcg_temp_free_i64(fp1
);
10321 gen_store_fpr64(ctx
, fp2
, fd
);
10322 tcg_temp_free_i64(fp2
);
10328 TCGv_i32 fp0
= tcg_temp_new_i32();
10329 TCGv_i32 fp1
= tcg_temp_new_i32();
10330 TCGv_i32 fp2
= tcg_temp_new_i32();
10332 gen_load_fpr32(ctx
, fp0
, fs
);
10333 gen_load_fpr32(ctx
, fp1
, ft
);
10334 gen_load_fpr32(ctx
, fp2
, fr
);
10335 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10336 tcg_temp_free_i32(fp0
);
10337 tcg_temp_free_i32(fp1
);
10338 gen_store_fpr32(ctx
, fp2
, fd
);
10339 tcg_temp_free_i32(fp2
);
10344 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10346 TCGv_i64 fp0
= tcg_temp_new_i64();
10347 TCGv_i64 fp1
= tcg_temp_new_i64();
10348 TCGv_i64 fp2
= tcg_temp_new_i64();
10350 gen_load_fpr64(ctx
, fp0
, fs
);
10351 gen_load_fpr64(ctx
, fp1
, ft
);
10352 gen_load_fpr64(ctx
, fp2
, fr
);
10353 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10354 tcg_temp_free_i64(fp0
);
10355 tcg_temp_free_i64(fp1
);
10356 gen_store_fpr64(ctx
, fp2
, fd
);
10357 tcg_temp_free_i64(fp2
);
10363 TCGv_i64 fp0
= tcg_temp_new_i64();
10364 TCGv_i64 fp1
= tcg_temp_new_i64();
10365 TCGv_i64 fp2
= tcg_temp_new_i64();
10367 gen_load_fpr64(ctx
, fp0
, fs
);
10368 gen_load_fpr64(ctx
, fp1
, ft
);
10369 gen_load_fpr64(ctx
, fp2
, fr
);
10370 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10371 tcg_temp_free_i64(fp0
);
10372 tcg_temp_free_i64(fp1
);
10373 gen_store_fpr64(ctx
, fp2
, fd
);
10374 tcg_temp_free_i64(fp2
);
10380 TCGv_i32 fp0
= tcg_temp_new_i32();
10381 TCGv_i32 fp1
= tcg_temp_new_i32();
10382 TCGv_i32 fp2
= tcg_temp_new_i32();
10384 gen_load_fpr32(ctx
, fp0
, fs
);
10385 gen_load_fpr32(ctx
, fp1
, ft
);
10386 gen_load_fpr32(ctx
, fp2
, fr
);
10387 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10388 tcg_temp_free_i32(fp0
);
10389 tcg_temp_free_i32(fp1
);
10390 gen_store_fpr32(ctx
, fp2
, fd
);
10391 tcg_temp_free_i32(fp2
);
10396 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10398 TCGv_i64 fp0
= tcg_temp_new_i64();
10399 TCGv_i64 fp1
= tcg_temp_new_i64();
10400 TCGv_i64 fp2
= tcg_temp_new_i64();
10402 gen_load_fpr64(ctx
, fp0
, fs
);
10403 gen_load_fpr64(ctx
, fp1
, ft
);
10404 gen_load_fpr64(ctx
, fp2
, fr
);
10405 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10406 tcg_temp_free_i64(fp0
);
10407 tcg_temp_free_i64(fp1
);
10408 gen_store_fpr64(ctx
, fp2
, fd
);
10409 tcg_temp_free_i64(fp2
);
10415 TCGv_i64 fp0
= tcg_temp_new_i64();
10416 TCGv_i64 fp1
= tcg_temp_new_i64();
10417 TCGv_i64 fp2
= tcg_temp_new_i64();
10419 gen_load_fpr64(ctx
, fp0
, fs
);
10420 gen_load_fpr64(ctx
, fp1
, ft
);
10421 gen_load_fpr64(ctx
, fp2
, fr
);
10422 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10423 tcg_temp_free_i64(fp0
);
10424 tcg_temp_free_i64(fp1
);
10425 gen_store_fpr64(ctx
, fp2
, fd
);
10426 tcg_temp_free_i64(fp2
);
10432 TCGv_i32 fp0
= tcg_temp_new_i32();
10433 TCGv_i32 fp1
= tcg_temp_new_i32();
10434 TCGv_i32 fp2
= tcg_temp_new_i32();
10436 gen_load_fpr32(ctx
, fp0
, fs
);
10437 gen_load_fpr32(ctx
, fp1
, ft
);
10438 gen_load_fpr32(ctx
, fp2
, fr
);
10439 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10440 tcg_temp_free_i32(fp0
);
10441 tcg_temp_free_i32(fp1
);
10442 gen_store_fpr32(ctx
, fp2
, fd
);
10443 tcg_temp_free_i32(fp2
);
10448 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10450 TCGv_i64 fp0
= tcg_temp_new_i64();
10451 TCGv_i64 fp1
= tcg_temp_new_i64();
10452 TCGv_i64 fp2
= tcg_temp_new_i64();
10454 gen_load_fpr64(ctx
, fp0
, fs
);
10455 gen_load_fpr64(ctx
, fp1
, ft
);
10456 gen_load_fpr64(ctx
, fp2
, fr
);
10457 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10458 tcg_temp_free_i64(fp0
);
10459 tcg_temp_free_i64(fp1
);
10460 gen_store_fpr64(ctx
, fp2
, fd
);
10461 tcg_temp_free_i64(fp2
);
10467 TCGv_i64 fp0
= tcg_temp_new_i64();
10468 TCGv_i64 fp1
= tcg_temp_new_i64();
10469 TCGv_i64 fp2
= tcg_temp_new_i64();
10471 gen_load_fpr64(ctx
, fp0
, fs
);
10472 gen_load_fpr64(ctx
, fp1
, ft
);
10473 gen_load_fpr64(ctx
, fp2
, fr
);
10474 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10475 tcg_temp_free_i64(fp0
);
10476 tcg_temp_free_i64(fp1
);
10477 gen_store_fpr64(ctx
, fp2
, fd
);
10478 tcg_temp_free_i64(fp2
);
10482 MIPS_INVAL("flt3_arith");
10483 generate_exception_end(ctx
, EXCP_RI
);
10488 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10492 #if !defined(CONFIG_USER_ONLY)
10493 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10494 Therefore only check the ISA in system mode. */
10495 check_insn(ctx
, ISA_MIPS32R2
);
10497 t0
= tcg_temp_new();
10501 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10502 gen_store_gpr(t0
, rt
);
10505 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10506 gen_store_gpr(t0
, rt
);
10509 gen_helper_rdhwr_cc(t0
, cpu_env
);
10510 gen_store_gpr(t0
, rt
);
10513 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10514 gen_store_gpr(t0
, rt
);
10517 check_insn(ctx
, ISA_MIPS32R6
);
10519 /* Performance counter registers are not implemented other than
10520 * control register 0.
10522 generate_exception(ctx
, EXCP_RI
);
10524 gen_helper_rdhwr_performance(t0
, cpu_env
);
10525 gen_store_gpr(t0
, rt
);
10528 check_insn(ctx
, ISA_MIPS32R6
);
10529 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10530 gen_store_gpr(t0
, rt
);
10533 #if defined(CONFIG_USER_ONLY)
10534 tcg_gen_ld_tl(t0
, cpu_env
,
10535 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10536 gen_store_gpr(t0
, rt
);
10539 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10540 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10541 tcg_gen_ld_tl(t0
, cpu_env
,
10542 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10543 gen_store_gpr(t0
, rt
);
10545 generate_exception_end(ctx
, EXCP_RI
);
10549 default: /* Invalid */
10550 MIPS_INVAL("rdhwr");
10551 generate_exception_end(ctx
, EXCP_RI
);
10557 static inline void clear_branch_hflags(DisasContext
*ctx
)
10559 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10560 if (ctx
->bstate
== BS_NONE
) {
10561 save_cpu_state(ctx
, 0);
10563 /* it is not safe to save ctx->hflags as hflags may be changed
10564 in execution time by the instruction in delay / forbidden slot. */
10565 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10569 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10571 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10572 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10573 /* Branches completion */
10574 clear_branch_hflags(ctx
);
10575 ctx
->bstate
= BS_BRANCH
;
10576 /* FIXME: Need to clear can_do_io. */
10577 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10578 case MIPS_HFLAG_FBNSLOT
:
10579 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10582 /* unconditional branch */
10583 if (proc_hflags
& MIPS_HFLAG_BX
) {
10584 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10586 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10588 case MIPS_HFLAG_BL
:
10589 /* blikely taken case */
10590 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10592 case MIPS_HFLAG_BC
:
10593 /* Conditional branch */
10595 TCGLabel
*l1
= gen_new_label();
10597 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10598 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10600 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10603 case MIPS_HFLAG_BR
:
10604 /* unconditional branch to register */
10605 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10606 TCGv t0
= tcg_temp_new();
10607 TCGv_i32 t1
= tcg_temp_new_i32();
10609 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10610 tcg_gen_trunc_tl_i32(t1
, t0
);
10612 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10613 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10614 tcg_gen_or_i32(hflags
, hflags
, t1
);
10615 tcg_temp_free_i32(t1
);
10617 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10619 tcg_gen_mov_tl(cpu_PC
, btarget
);
10621 if (ctx
->singlestep_enabled
) {
10622 save_cpu_state(ctx
, 0);
10623 gen_helper_raise_exception_debug(cpu_env
);
10625 tcg_gen_exit_tb(0);
10628 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10634 /* Compact Branches */
10635 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10636 int rs
, int rt
, int32_t offset
)
10638 int bcond_compute
= 0;
10639 TCGv t0
= tcg_temp_new();
10640 TCGv t1
= tcg_temp_new();
10641 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10643 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10644 #ifdef MIPS_DEBUG_DISAS
10645 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10648 generate_exception_end(ctx
, EXCP_RI
);
10652 /* Load needed operands and calculate btarget */
10654 /* compact branch */
10655 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10656 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10657 gen_load_gpr(t0
, rs
);
10658 gen_load_gpr(t1
, rt
);
10660 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10661 if (rs
<= rt
&& rs
== 0) {
10662 /* OPC_BEQZALC, OPC_BNEZALC */
10663 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10666 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10667 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10668 gen_load_gpr(t0
, rs
);
10669 gen_load_gpr(t1
, rt
);
10671 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10673 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10674 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10675 if (rs
== 0 || rs
== rt
) {
10676 /* OPC_BLEZALC, OPC_BGEZALC */
10677 /* OPC_BGTZALC, OPC_BLTZALC */
10678 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10680 gen_load_gpr(t0
, rs
);
10681 gen_load_gpr(t1
, rt
);
10683 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10687 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10692 /* OPC_BEQZC, OPC_BNEZC */
10693 gen_load_gpr(t0
, rs
);
10695 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10697 /* OPC_JIC, OPC_JIALC */
10698 TCGv tbase
= tcg_temp_new();
10699 TCGv toffset
= tcg_temp_new();
10701 gen_load_gpr(tbase
, rt
);
10702 tcg_gen_movi_tl(toffset
, offset
);
10703 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10704 tcg_temp_free(tbase
);
10705 tcg_temp_free(toffset
);
10709 MIPS_INVAL("Compact branch/jump");
10710 generate_exception_end(ctx
, EXCP_RI
);
10714 if (bcond_compute
== 0) {
10715 /* Uncoditional compact branch */
10718 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10721 ctx
->hflags
|= MIPS_HFLAG_BR
;
10724 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10727 ctx
->hflags
|= MIPS_HFLAG_B
;
10730 MIPS_INVAL("Compact branch/jump");
10731 generate_exception_end(ctx
, EXCP_RI
);
10735 /* Generating branch here as compact branches don't have delay slot */
10736 gen_branch(ctx
, 4);
10738 /* Conditional compact branch */
10739 TCGLabel
*fs
= gen_new_label();
10740 save_cpu_state(ctx
, 0);
10743 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10744 if (rs
== 0 && rt
!= 0) {
10746 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10747 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10749 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10752 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10755 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10756 if (rs
== 0 && rt
!= 0) {
10758 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10759 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10761 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10764 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10767 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10768 if (rs
== 0 && rt
!= 0) {
10770 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10771 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10773 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10776 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10779 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10780 if (rs
== 0 && rt
!= 0) {
10782 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10783 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10785 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10788 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10791 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10792 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10794 /* OPC_BOVC, OPC_BNVC */
10795 TCGv t2
= tcg_temp_new();
10796 TCGv t3
= tcg_temp_new();
10797 TCGv t4
= tcg_temp_new();
10798 TCGv input_overflow
= tcg_temp_new();
10800 gen_load_gpr(t0
, rs
);
10801 gen_load_gpr(t1
, rt
);
10802 tcg_gen_ext32s_tl(t2
, t0
);
10803 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10804 tcg_gen_ext32s_tl(t3
, t1
);
10805 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10806 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10808 tcg_gen_add_tl(t4
, t2
, t3
);
10809 tcg_gen_ext32s_tl(t4
, t4
);
10810 tcg_gen_xor_tl(t2
, t2
, t3
);
10811 tcg_gen_xor_tl(t3
, t4
, t3
);
10812 tcg_gen_andc_tl(t2
, t3
, t2
);
10813 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10814 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10815 if (opc
== OPC_BOVC
) {
10817 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10820 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10822 tcg_temp_free(input_overflow
);
10826 } else if (rs
< rt
&& rs
== 0) {
10827 /* OPC_BEQZALC, OPC_BNEZALC */
10828 if (opc
== OPC_BEQZALC
) {
10830 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10833 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10836 /* OPC_BEQC, OPC_BNEC */
10837 if (opc
== OPC_BEQC
) {
10839 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10842 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10847 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10850 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10853 MIPS_INVAL("Compact conditional branch/jump");
10854 generate_exception_end(ctx
, EXCP_RI
);
10858 /* Generating branch here as compact branches don't have delay slot */
10859 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10862 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10870 /* ISA extensions (ASEs) */
10871 /* MIPS16 extension to MIPS32 */
10873 /* MIPS16 major opcodes */
10875 M16_OPC_ADDIUSP
= 0x00,
10876 M16_OPC_ADDIUPC
= 0x01,
10878 M16_OPC_JAL
= 0x03,
10879 M16_OPC_BEQZ
= 0x04,
10880 M16_OPC_BNEQZ
= 0x05,
10881 M16_OPC_SHIFT
= 0x06,
10883 M16_OPC_RRIA
= 0x08,
10884 M16_OPC_ADDIU8
= 0x09,
10885 M16_OPC_SLTI
= 0x0a,
10886 M16_OPC_SLTIU
= 0x0b,
10889 M16_OPC_CMPI
= 0x0e,
10893 M16_OPC_LWSP
= 0x12,
10895 M16_OPC_LBU
= 0x14,
10896 M16_OPC_LHU
= 0x15,
10897 M16_OPC_LWPC
= 0x16,
10898 M16_OPC_LWU
= 0x17,
10901 M16_OPC_SWSP
= 0x1a,
10903 M16_OPC_RRR
= 0x1c,
10905 M16_OPC_EXTEND
= 0x1e,
10909 /* I8 funct field */
10928 /* RR funct field */
10962 /* I64 funct field */
10970 I64_DADDIUPC
= 0x6,
10974 /* RR ry field for CNVT */
10976 RR_RY_CNVT_ZEB
= 0x0,
10977 RR_RY_CNVT_ZEH
= 0x1,
10978 RR_RY_CNVT_ZEW
= 0x2,
10979 RR_RY_CNVT_SEB
= 0x4,
10980 RR_RY_CNVT_SEH
= 0x5,
10981 RR_RY_CNVT_SEW
= 0x6,
10984 static int xlat (int r
)
10986 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10991 static void gen_mips16_save (DisasContext
*ctx
,
10992 int xsregs
, int aregs
,
10993 int do_ra
, int do_s0
, int do_s1
,
10996 TCGv t0
= tcg_temp_new();
10997 TCGv t1
= tcg_temp_new();
10998 TCGv t2
= tcg_temp_new();
11028 generate_exception_end(ctx
, EXCP_RI
);
11034 gen_base_offset_addr(ctx
, t0
, 29, 12);
11035 gen_load_gpr(t1
, 7);
11036 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11039 gen_base_offset_addr(ctx
, t0
, 29, 8);
11040 gen_load_gpr(t1
, 6);
11041 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11044 gen_base_offset_addr(ctx
, t0
, 29, 4);
11045 gen_load_gpr(t1
, 5);
11046 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11049 gen_base_offset_addr(ctx
, t0
, 29, 0);
11050 gen_load_gpr(t1
, 4);
11051 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11054 gen_load_gpr(t0
, 29);
11056 #define DECR_AND_STORE(reg) do { \
11057 tcg_gen_movi_tl(t2, -4); \
11058 gen_op_addr_add(ctx, t0, t0, t2); \
11059 gen_load_gpr(t1, reg); \
11060 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11064 DECR_AND_STORE(31);
11069 DECR_AND_STORE(30);
11072 DECR_AND_STORE(23);
11075 DECR_AND_STORE(22);
11078 DECR_AND_STORE(21);
11081 DECR_AND_STORE(20);
11084 DECR_AND_STORE(19);
11087 DECR_AND_STORE(18);
11091 DECR_AND_STORE(17);
11094 DECR_AND_STORE(16);
11124 generate_exception_end(ctx
, EXCP_RI
);
11140 #undef DECR_AND_STORE
11142 tcg_gen_movi_tl(t2
, -framesize
);
11143 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11149 static void gen_mips16_restore (DisasContext
*ctx
,
11150 int xsregs
, int aregs
,
11151 int do_ra
, int do_s0
, int do_s1
,
11155 TCGv t0
= tcg_temp_new();
11156 TCGv t1
= tcg_temp_new();
11157 TCGv t2
= tcg_temp_new();
11159 tcg_gen_movi_tl(t2
, framesize
);
11160 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11162 #define DECR_AND_LOAD(reg) do { \
11163 tcg_gen_movi_tl(t2, -4); \
11164 gen_op_addr_add(ctx, t0, t0, t2); \
11165 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11166 gen_store_gpr(t1, reg); \
11230 generate_exception_end(ctx
, EXCP_RI
);
11246 #undef DECR_AND_LOAD
11248 tcg_gen_movi_tl(t2
, framesize
);
11249 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11255 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11256 int is_64_bit
, int extended
)
11260 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11261 generate_exception_end(ctx
, EXCP_RI
);
11265 t0
= tcg_temp_new();
11267 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11268 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11270 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11276 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11279 TCGv_i32 t0
= tcg_const_i32(op
);
11280 TCGv t1
= tcg_temp_new();
11281 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11282 gen_helper_cache(cpu_env
, t1
, t0
);
11285 #if defined(TARGET_MIPS64)
11286 static void decode_i64_mips16 (DisasContext
*ctx
,
11287 int ry
, int funct
, int16_t offset
,
11292 check_insn(ctx
, ISA_MIPS3
);
11293 check_mips_64(ctx
);
11294 offset
= extended
? offset
: offset
<< 3;
11295 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11298 check_insn(ctx
, ISA_MIPS3
);
11299 check_mips_64(ctx
);
11300 offset
= extended
? offset
: offset
<< 3;
11301 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11304 check_insn(ctx
, ISA_MIPS3
);
11305 check_mips_64(ctx
);
11306 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11307 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11310 check_insn(ctx
, ISA_MIPS3
);
11311 check_mips_64(ctx
);
11312 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11313 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11316 check_insn(ctx
, ISA_MIPS3
);
11317 check_mips_64(ctx
);
11318 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11319 generate_exception_end(ctx
, EXCP_RI
);
11321 offset
= extended
? offset
: offset
<< 3;
11322 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11326 check_insn(ctx
, ISA_MIPS3
);
11327 check_mips_64(ctx
);
11328 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11329 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11332 check_insn(ctx
, ISA_MIPS3
);
11333 check_mips_64(ctx
);
11334 offset
= extended
? offset
: offset
<< 2;
11335 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11338 check_insn(ctx
, ISA_MIPS3
);
11339 check_mips_64(ctx
);
11340 offset
= extended
? offset
: offset
<< 2;
11341 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11347 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11349 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11350 int op
, rx
, ry
, funct
, sa
;
11351 int16_t imm
, offset
;
11353 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11354 op
= (ctx
->opcode
>> 11) & 0x1f;
11355 sa
= (ctx
->opcode
>> 22) & 0x1f;
11356 funct
= (ctx
->opcode
>> 8) & 0x7;
11357 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11358 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11359 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11360 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11361 | (ctx
->opcode
& 0x1f));
11363 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11366 case M16_OPC_ADDIUSP
:
11367 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11369 case M16_OPC_ADDIUPC
:
11370 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11373 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11374 /* No delay slot, so just process as a normal instruction */
11377 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11378 /* No delay slot, so just process as a normal instruction */
11380 case M16_OPC_BNEQZ
:
11381 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11382 /* No delay slot, so just process as a normal instruction */
11384 case M16_OPC_SHIFT
:
11385 switch (ctx
->opcode
& 0x3) {
11387 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11390 #if defined(TARGET_MIPS64)
11391 check_mips_64(ctx
);
11392 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11394 generate_exception_end(ctx
, EXCP_RI
);
11398 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11401 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11405 #if defined(TARGET_MIPS64)
11407 check_insn(ctx
, ISA_MIPS3
);
11408 check_mips_64(ctx
);
11409 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11413 imm
= ctx
->opcode
& 0xf;
11414 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11415 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11416 imm
= (int16_t) (imm
<< 1) >> 1;
11417 if ((ctx
->opcode
>> 4) & 0x1) {
11418 #if defined(TARGET_MIPS64)
11419 check_mips_64(ctx
);
11420 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11422 generate_exception_end(ctx
, EXCP_RI
);
11425 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11428 case M16_OPC_ADDIU8
:
11429 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11432 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11434 case M16_OPC_SLTIU
:
11435 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11440 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11443 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11446 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11449 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11452 check_insn(ctx
, ISA_MIPS32
);
11454 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11455 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11456 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11457 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11458 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11459 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11460 | (ctx
->opcode
& 0xf)) << 3;
11462 if (ctx
->opcode
& (1 << 7)) {
11463 gen_mips16_save(ctx
, xsregs
, aregs
,
11464 do_ra
, do_s0
, do_s1
,
11467 gen_mips16_restore(ctx
, xsregs
, aregs
,
11468 do_ra
, do_s0
, do_s1
,
11474 generate_exception_end(ctx
, EXCP_RI
);
11479 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11482 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11484 #if defined(TARGET_MIPS64)
11486 check_insn(ctx
, ISA_MIPS3
);
11487 check_mips_64(ctx
);
11488 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11492 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11495 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11498 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11501 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11504 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11507 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11510 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11512 #if defined(TARGET_MIPS64)
11514 check_insn(ctx
, ISA_MIPS3
);
11515 check_mips_64(ctx
);
11516 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11520 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11523 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11526 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11529 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11531 #if defined(TARGET_MIPS64)
11533 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11537 generate_exception_end(ctx
, EXCP_RI
);
11544 static inline bool is_uhi(int sdbbp_code
)
11546 #ifdef CONFIG_USER_ONLY
11549 return semihosting_enabled() && sdbbp_code
== 1;
11553 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11557 int op
, cnvt_op
, op1
, offset
;
11561 op
= (ctx
->opcode
>> 11) & 0x1f;
11562 sa
= (ctx
->opcode
>> 2) & 0x7;
11563 sa
= sa
== 0 ? 8 : sa
;
11564 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11565 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11566 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11567 op1
= offset
= ctx
->opcode
& 0x1f;
11572 case M16_OPC_ADDIUSP
:
11574 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11576 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11579 case M16_OPC_ADDIUPC
:
11580 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11583 offset
= (ctx
->opcode
& 0x7ff) << 1;
11584 offset
= (int16_t)(offset
<< 4) >> 4;
11585 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11586 /* No delay slot, so just process as a normal instruction */
11589 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11590 offset
= (((ctx
->opcode
& 0x1f) << 21)
11591 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11593 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11594 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11598 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11599 ((int8_t)ctx
->opcode
) << 1, 0);
11600 /* No delay slot, so just process as a normal instruction */
11602 case M16_OPC_BNEQZ
:
11603 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11604 ((int8_t)ctx
->opcode
) << 1, 0);
11605 /* No delay slot, so just process as a normal instruction */
11607 case M16_OPC_SHIFT
:
11608 switch (ctx
->opcode
& 0x3) {
11610 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11613 #if defined(TARGET_MIPS64)
11614 check_insn(ctx
, ISA_MIPS3
);
11615 check_mips_64(ctx
);
11616 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11618 generate_exception_end(ctx
, EXCP_RI
);
11622 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11625 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11629 #if defined(TARGET_MIPS64)
11631 check_insn(ctx
, ISA_MIPS3
);
11632 check_mips_64(ctx
);
11633 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11638 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11640 if ((ctx
->opcode
>> 4) & 1) {
11641 #if defined(TARGET_MIPS64)
11642 check_insn(ctx
, ISA_MIPS3
);
11643 check_mips_64(ctx
);
11644 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11646 generate_exception_end(ctx
, EXCP_RI
);
11649 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11653 case M16_OPC_ADDIU8
:
11655 int16_t imm
= (int8_t) ctx
->opcode
;
11657 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11662 int16_t imm
= (uint8_t) ctx
->opcode
;
11663 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11666 case M16_OPC_SLTIU
:
11668 int16_t imm
= (uint8_t) ctx
->opcode
;
11669 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11676 funct
= (ctx
->opcode
>> 8) & 0x7;
11679 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11680 ((int8_t)ctx
->opcode
) << 1, 0);
11683 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11684 ((int8_t)ctx
->opcode
) << 1, 0);
11687 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11690 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11691 ((int8_t)ctx
->opcode
) << 3);
11694 check_insn(ctx
, ISA_MIPS32
);
11696 int do_ra
= ctx
->opcode
& (1 << 6);
11697 int do_s0
= ctx
->opcode
& (1 << 5);
11698 int do_s1
= ctx
->opcode
& (1 << 4);
11699 int framesize
= ctx
->opcode
& 0xf;
11701 if (framesize
== 0) {
11704 framesize
= framesize
<< 3;
11707 if (ctx
->opcode
& (1 << 7)) {
11708 gen_mips16_save(ctx
, 0, 0,
11709 do_ra
, do_s0
, do_s1
, framesize
);
11711 gen_mips16_restore(ctx
, 0, 0,
11712 do_ra
, do_s0
, do_s1
, framesize
);
11718 int rz
= xlat(ctx
->opcode
& 0x7);
11720 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11721 ((ctx
->opcode
>> 5) & 0x7);
11722 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11726 reg32
= ctx
->opcode
& 0x1f;
11727 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11730 generate_exception_end(ctx
, EXCP_RI
);
11737 int16_t imm
= (uint8_t) ctx
->opcode
;
11739 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11744 int16_t imm
= (uint8_t) ctx
->opcode
;
11745 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11748 #if defined(TARGET_MIPS64)
11750 check_insn(ctx
, ISA_MIPS3
);
11751 check_mips_64(ctx
);
11752 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11756 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11759 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11762 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11765 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11768 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11771 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11774 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11776 #if defined (TARGET_MIPS64)
11778 check_insn(ctx
, ISA_MIPS3
);
11779 check_mips_64(ctx
);
11780 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11784 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11787 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11790 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11793 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11797 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11800 switch (ctx
->opcode
& 0x3) {
11802 mips32_op
= OPC_ADDU
;
11805 mips32_op
= OPC_SUBU
;
11807 #if defined(TARGET_MIPS64)
11809 mips32_op
= OPC_DADDU
;
11810 check_insn(ctx
, ISA_MIPS3
);
11811 check_mips_64(ctx
);
11814 mips32_op
= OPC_DSUBU
;
11815 check_insn(ctx
, ISA_MIPS3
);
11816 check_mips_64(ctx
);
11820 generate_exception_end(ctx
, EXCP_RI
);
11824 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11833 int nd
= (ctx
->opcode
>> 7) & 0x1;
11834 int link
= (ctx
->opcode
>> 6) & 0x1;
11835 int ra
= (ctx
->opcode
>> 5) & 0x1;
11838 check_insn(ctx
, ISA_MIPS32
);
11847 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11852 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11853 gen_helper_do_semihosting(cpu_env
);
11855 /* XXX: not clear which exception should be raised
11856 * when in debug mode...
11858 check_insn(ctx
, ISA_MIPS32
);
11859 generate_exception_end(ctx
, EXCP_DBp
);
11863 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11866 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11869 generate_exception_end(ctx
, EXCP_BREAK
);
11872 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11875 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11878 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11880 #if defined (TARGET_MIPS64)
11882 check_insn(ctx
, ISA_MIPS3
);
11883 check_mips_64(ctx
);
11884 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11888 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11891 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11894 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11897 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11900 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11903 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11906 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11909 check_insn(ctx
, ISA_MIPS32
);
11911 case RR_RY_CNVT_ZEB
:
11912 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11914 case RR_RY_CNVT_ZEH
:
11915 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11917 case RR_RY_CNVT_SEB
:
11918 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11920 case RR_RY_CNVT_SEH
:
11921 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11923 #if defined (TARGET_MIPS64)
11924 case RR_RY_CNVT_ZEW
:
11925 check_insn(ctx
, ISA_MIPS64
);
11926 check_mips_64(ctx
);
11927 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11929 case RR_RY_CNVT_SEW
:
11930 check_insn(ctx
, ISA_MIPS64
);
11931 check_mips_64(ctx
);
11932 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11936 generate_exception_end(ctx
, EXCP_RI
);
11941 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11943 #if defined (TARGET_MIPS64)
11945 check_insn(ctx
, ISA_MIPS3
);
11946 check_mips_64(ctx
);
11947 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11950 check_insn(ctx
, ISA_MIPS3
);
11951 check_mips_64(ctx
);
11952 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11955 check_insn(ctx
, ISA_MIPS3
);
11956 check_mips_64(ctx
);
11957 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11960 check_insn(ctx
, ISA_MIPS3
);
11961 check_mips_64(ctx
);
11962 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11966 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11969 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11972 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11975 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11977 #if defined (TARGET_MIPS64)
11979 check_insn(ctx
, ISA_MIPS3
);
11980 check_mips_64(ctx
);
11981 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11984 check_insn(ctx
, ISA_MIPS3
);
11985 check_mips_64(ctx
);
11986 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11989 check_insn(ctx
, ISA_MIPS3
);
11990 check_mips_64(ctx
);
11991 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11994 check_insn(ctx
, ISA_MIPS3
);
11995 check_mips_64(ctx
);
11996 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12000 generate_exception_end(ctx
, EXCP_RI
);
12004 case M16_OPC_EXTEND
:
12005 decode_extended_mips16_opc(env
, ctx
);
12008 #if defined(TARGET_MIPS64)
12010 funct
= (ctx
->opcode
>> 8) & 0x7;
12011 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12015 generate_exception_end(ctx
, EXCP_RI
);
12022 /* microMIPS extension to MIPS32/MIPS64 */
12025 * microMIPS32/microMIPS64 major opcodes
12027 * 1. MIPS Architecture for Programmers Volume II-B:
12028 * The microMIPS32 Instruction Set (Revision 3.05)
12030 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12032 * 2. MIPS Architecture For Programmers Volume II-A:
12033 * The MIPS64 Instruction Set (Revision 3.51)
12063 POOL32S
= 0x16, /* MIPS64 */
12064 DADDIU32
= 0x17, /* MIPS64 */
12093 /* 0x29 is reserved */
12106 /* 0x31 is reserved */
12119 SD32
= 0x36, /* MIPS64 */
12120 LD32
= 0x37, /* MIPS64 */
12122 /* 0x39 is reserved */
12138 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12148 /* POOL32A encoding of minor opcode field */
12151 /* These opcodes are distinguished only by bits 9..6; those bits are
12152 * what are recorded below. */
12189 /* The following can be distinguished by their lower 6 bits. */
12199 /* POOL32AXF encoding of minor opcode field extension */
12202 * 1. MIPS Architecture for Programmers Volume II-B:
12203 * The microMIPS32 Instruction Set (Revision 3.05)
12205 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12207 * 2. MIPS Architecture for Programmers VolumeIV-e:
12208 * The MIPS DSP Application-Specific Extension
12209 * to the microMIPS32 Architecture (Revision 2.34)
12211 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12226 /* begin of microMIPS32 DSP */
12228 /* bits 13..12 for 0x01 */
12234 /* bits 13..12 for 0x2a */
12240 /* bits 13..12 for 0x32 */
12244 /* end of microMIPS32 DSP */
12246 /* bits 15..12 for 0x2c */
12263 /* bits 15..12 for 0x34 */
12271 /* bits 15..12 for 0x3c */
12273 JR
= 0x0, /* alias */
12281 /* bits 15..12 for 0x05 */
12285 /* bits 15..12 for 0x0d */
12297 /* bits 15..12 for 0x15 */
12303 /* bits 15..12 for 0x1d */
12307 /* bits 15..12 for 0x2d */
12312 /* bits 15..12 for 0x35 */
12319 /* POOL32B encoding of minor opcode field (bits 15..12) */
12335 /* POOL32C encoding of minor opcode field (bits 15..12) */
12343 /* 0xa is reserved */
12350 /* 0x6 is reserved */
12356 /* POOL32F encoding of minor opcode field (bits 5..0) */
12359 /* These are the bit 7..6 values */
12368 /* These are the bit 8..6 values */
12393 MOVZ_FMT_05
= 0x05,
12427 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12434 /* POOL32Fxf encoding of minor opcode extension field */
12472 /* POOL32I encoding of minor opcode field (bits 25..21) */
12502 /* These overlap and are distinguished by bit16 of the instruction */
12511 /* POOL16A encoding of minor opcode field */
12518 /* POOL16B encoding of minor opcode field */
12525 /* POOL16C encoding of minor opcode field */
12545 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12565 /* POOL16D encoding of minor opcode field */
12572 /* POOL16E encoding of minor opcode field */
12579 static int mmreg (int r
)
12581 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12586 /* Used for 16-bit store instructions. */
12587 static int mmreg2 (int r
)
12589 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12594 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12595 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12596 #define uMIPS_RS2(op) uMIPS_RS(op)
12597 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12598 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12599 #define uMIPS_RS5(op) (op & 0x1f)
12601 /* Signed immediate */
12602 #define SIMM(op, start, width) \
12603 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12606 /* Zero-extended immediate */
12607 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12609 static void gen_addiur1sp(DisasContext
*ctx
)
12611 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12613 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12616 static void gen_addiur2(DisasContext
*ctx
)
12618 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12619 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12620 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12622 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12625 static void gen_addiusp(DisasContext
*ctx
)
12627 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12630 if (encoded
<= 1) {
12631 decoded
= 256 + encoded
;
12632 } else if (encoded
<= 255) {
12634 } else if (encoded
<= 509) {
12635 decoded
= encoded
- 512;
12637 decoded
= encoded
- 768;
12640 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12643 static void gen_addius5(DisasContext
*ctx
)
12645 int imm
= SIMM(ctx
->opcode
, 1, 4);
12646 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12648 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12651 static void gen_andi16(DisasContext
*ctx
)
12653 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12654 31, 32, 63, 64, 255, 32768, 65535 };
12655 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12656 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12657 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12659 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12662 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12663 int base
, int16_t offset
)
12668 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12669 generate_exception_end(ctx
, EXCP_RI
);
12673 t0
= tcg_temp_new();
12675 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12677 t1
= tcg_const_tl(reglist
);
12678 t2
= tcg_const_i32(ctx
->mem_idx
);
12680 save_cpu_state(ctx
, 1);
12683 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12686 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12688 #ifdef TARGET_MIPS64
12690 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12693 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12699 tcg_temp_free_i32(t2
);
12703 static void gen_pool16c_insn(DisasContext
*ctx
)
12705 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12706 int rs
= mmreg(ctx
->opcode
& 0x7);
12708 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12713 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12719 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12725 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12731 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12738 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12739 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12741 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12750 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12751 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12753 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12760 int reg
= ctx
->opcode
& 0x1f;
12762 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12768 int reg
= ctx
->opcode
& 0x1f;
12769 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12770 /* Let normal delay slot handling in our caller take us
12771 to the branch target. */
12776 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12777 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12781 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12782 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12786 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12790 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12793 generate_exception_end(ctx
, EXCP_BREAK
);
12796 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12797 gen_helper_do_semihosting(cpu_env
);
12799 /* XXX: not clear which exception should be raised
12800 * when in debug mode...
12802 check_insn(ctx
, ISA_MIPS32
);
12803 generate_exception_end(ctx
, EXCP_DBp
);
12806 case JRADDIUSP
+ 0:
12807 case JRADDIUSP
+ 1:
12809 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12810 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12811 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12812 /* Let normal delay slot handling in our caller take us
12813 to the branch target. */
12817 generate_exception_end(ctx
, EXCP_RI
);
12822 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12825 int rd
, rs
, re
, rt
;
12826 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12827 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12828 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12829 rd
= rd_enc
[enc_dest
];
12830 re
= re_enc
[enc_dest
];
12831 rs
= rs_rt_enc
[enc_rs
];
12832 rt
= rs_rt_enc
[enc_rt
];
12834 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12836 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12839 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12841 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12845 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12847 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12848 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12850 switch (ctx
->opcode
& 0xf) {
12852 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12855 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12859 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12860 int offset
= extract32(ctx
->opcode
, 4, 4);
12861 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12864 case R6_JRC16
: /* JRCADDIUSP */
12865 if ((ctx
->opcode
>> 4) & 1) {
12867 int imm
= extract32(ctx
->opcode
, 5, 5);
12868 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12869 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12872 int rs
= extract32(ctx
->opcode
, 5, 5);
12873 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12876 case MOVEP
... MOVEP_07
:
12877 case MOVEP_0C
... MOVEP_0F
:
12879 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12880 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12881 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12882 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12886 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12889 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12893 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12894 int offset
= extract32(ctx
->opcode
, 4, 4);
12895 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12898 case JALRC16
: /* BREAK16, SDBBP16 */
12899 switch (ctx
->opcode
& 0x3f) {
12901 case JALRC16
+ 0x20:
12903 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12908 generate_exception(ctx
, EXCP_BREAK
);
12912 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12913 gen_helper_do_semihosting(cpu_env
);
12915 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12916 generate_exception(ctx
, EXCP_RI
);
12918 generate_exception(ctx
, EXCP_DBp
);
12925 generate_exception(ctx
, EXCP_RI
);
12930 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12932 TCGv t0
= tcg_temp_new();
12933 TCGv t1
= tcg_temp_new();
12935 gen_load_gpr(t0
, base
);
12938 gen_load_gpr(t1
, index
);
12939 tcg_gen_shli_tl(t1
, t1
, 2);
12940 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12943 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12944 gen_store_gpr(t1
, rd
);
12950 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12951 int base
, int16_t offset
)
12955 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12956 generate_exception_end(ctx
, EXCP_RI
);
12960 t0
= tcg_temp_new();
12961 t1
= tcg_temp_new();
12963 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12968 generate_exception_end(ctx
, EXCP_RI
);
12971 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12972 gen_store_gpr(t1
, rd
);
12973 tcg_gen_movi_tl(t1
, 4);
12974 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12975 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12976 gen_store_gpr(t1
, rd
+1);
12979 gen_load_gpr(t1
, rd
);
12980 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12981 tcg_gen_movi_tl(t1
, 4);
12982 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12983 gen_load_gpr(t1
, rd
+1);
12984 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12986 #ifdef TARGET_MIPS64
12989 generate_exception_end(ctx
, EXCP_RI
);
12992 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12993 gen_store_gpr(t1
, rd
);
12994 tcg_gen_movi_tl(t1
, 8);
12995 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12996 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12997 gen_store_gpr(t1
, rd
+1);
13000 gen_load_gpr(t1
, rd
);
13001 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13002 tcg_gen_movi_tl(t1
, 8);
13003 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13004 gen_load_gpr(t1
, rd
+1);
13005 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13013 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13015 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13016 int minor
= (ctx
->opcode
>> 12) & 0xf;
13017 uint32_t mips32_op
;
13019 switch (extension
) {
13021 mips32_op
= OPC_TEQ
;
13024 mips32_op
= OPC_TGE
;
13027 mips32_op
= OPC_TGEU
;
13030 mips32_op
= OPC_TLT
;
13033 mips32_op
= OPC_TLTU
;
13036 mips32_op
= OPC_TNE
;
13038 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13040 #ifndef CONFIG_USER_ONLY
13043 check_cp0_enabled(ctx
);
13045 /* Treat as NOP. */
13048 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13052 check_cp0_enabled(ctx
);
13054 TCGv t0
= tcg_temp_new();
13056 gen_load_gpr(t0
, rt
);
13057 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13063 switch (minor
& 3) {
13065 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13068 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13071 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13074 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13077 goto pool32axf_invalid
;
13081 switch (minor
& 3) {
13083 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13086 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13089 goto pool32axf_invalid
;
13095 check_insn(ctx
, ISA_MIPS32R6
);
13096 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13099 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13102 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13105 mips32_op
= OPC_CLO
;
13108 mips32_op
= OPC_CLZ
;
13110 check_insn(ctx
, ISA_MIPS32
);
13111 gen_cl(ctx
, mips32_op
, rt
, rs
);
13114 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13115 gen_rdhwr(ctx
, rt
, rs
, 0);
13118 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13121 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13122 mips32_op
= OPC_MULT
;
13125 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13126 mips32_op
= OPC_MULTU
;
13129 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13130 mips32_op
= OPC_DIV
;
13133 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13134 mips32_op
= OPC_DIVU
;
13137 check_insn(ctx
, ISA_MIPS32
);
13138 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13141 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13142 mips32_op
= OPC_MADD
;
13145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13146 mips32_op
= OPC_MADDU
;
13149 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13150 mips32_op
= OPC_MSUB
;
13153 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13154 mips32_op
= OPC_MSUBU
;
13156 check_insn(ctx
, ISA_MIPS32
);
13157 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13160 goto pool32axf_invalid
;
13171 generate_exception_err(ctx
, EXCP_CpU
, 2);
13174 goto pool32axf_invalid
;
13179 case JALR
: /* JALRC */
13180 case JALR_HB
: /* JALRC_HB */
13181 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13182 /* JALRC, JALRC_HB */
13183 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13185 /* JALR, JALR_HB */
13186 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13187 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13193 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13194 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13197 goto pool32axf_invalid
;
13203 check_cp0_enabled(ctx
);
13204 check_insn(ctx
, ISA_MIPS32R2
);
13205 gen_load_srsgpr(rs
, rt
);
13208 check_cp0_enabled(ctx
);
13209 check_insn(ctx
, ISA_MIPS32R2
);
13210 gen_store_srsgpr(rs
, rt
);
13213 goto pool32axf_invalid
;
13216 #ifndef CONFIG_USER_ONLY
13220 mips32_op
= OPC_TLBP
;
13223 mips32_op
= OPC_TLBR
;
13226 mips32_op
= OPC_TLBWI
;
13229 mips32_op
= OPC_TLBWR
;
13232 mips32_op
= OPC_TLBINV
;
13235 mips32_op
= OPC_TLBINVF
;
13238 mips32_op
= OPC_WAIT
;
13241 mips32_op
= OPC_DERET
;
13244 mips32_op
= OPC_ERET
;
13246 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13249 goto pool32axf_invalid
;
13255 check_cp0_enabled(ctx
);
13257 TCGv t0
= tcg_temp_new();
13259 save_cpu_state(ctx
, 1);
13260 gen_helper_di(t0
, cpu_env
);
13261 gen_store_gpr(t0
, rs
);
13262 /* Stop translation as we may have switched the execution mode */
13263 ctx
->bstate
= BS_STOP
;
13268 check_cp0_enabled(ctx
);
13270 TCGv t0
= tcg_temp_new();
13272 save_cpu_state(ctx
, 1);
13273 gen_helper_ei(t0
, cpu_env
);
13274 gen_store_gpr(t0
, rs
);
13275 /* Stop translation as we may have switched the execution mode */
13276 ctx
->bstate
= BS_STOP
;
13281 goto pool32axf_invalid
;
13291 generate_exception_end(ctx
, EXCP_SYSCALL
);
13294 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13295 gen_helper_do_semihosting(cpu_env
);
13297 check_insn(ctx
, ISA_MIPS32
);
13298 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13299 generate_exception_end(ctx
, EXCP_RI
);
13301 generate_exception_end(ctx
, EXCP_DBp
);
13306 goto pool32axf_invalid
;
13310 switch (minor
& 3) {
13312 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13315 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13318 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13321 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13324 goto pool32axf_invalid
;
13328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13331 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13334 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13337 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13340 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13343 goto pool32axf_invalid
;
13348 MIPS_INVAL("pool32axf");
13349 generate_exception_end(ctx
, EXCP_RI
);
13354 /* Values for microMIPS fmt field. Variable-width, depending on which
13355 formats the instruction supports. */
13374 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13376 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13377 uint32_t mips32_op
;
13379 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13380 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13381 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13383 switch (extension
) {
13384 case FLOAT_1BIT_FMT(CFC1
, 0):
13385 mips32_op
= OPC_CFC1
;
13387 case FLOAT_1BIT_FMT(CTC1
, 0):
13388 mips32_op
= OPC_CTC1
;
13390 case FLOAT_1BIT_FMT(MFC1
, 0):
13391 mips32_op
= OPC_MFC1
;
13393 case FLOAT_1BIT_FMT(MTC1
, 0):
13394 mips32_op
= OPC_MTC1
;
13396 case FLOAT_1BIT_FMT(MFHC1
, 0):
13397 mips32_op
= OPC_MFHC1
;
13399 case FLOAT_1BIT_FMT(MTHC1
, 0):
13400 mips32_op
= OPC_MTHC1
;
13402 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13405 /* Reciprocal square root */
13406 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13407 mips32_op
= OPC_RSQRT_S
;
13409 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13410 mips32_op
= OPC_RSQRT_D
;
13414 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13415 mips32_op
= OPC_SQRT_S
;
13417 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13418 mips32_op
= OPC_SQRT_D
;
13422 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13423 mips32_op
= OPC_RECIP_S
;
13425 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13426 mips32_op
= OPC_RECIP_D
;
13430 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13431 mips32_op
= OPC_FLOOR_L_S
;
13433 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13434 mips32_op
= OPC_FLOOR_L_D
;
13436 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13437 mips32_op
= OPC_FLOOR_W_S
;
13439 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13440 mips32_op
= OPC_FLOOR_W_D
;
13444 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13445 mips32_op
= OPC_CEIL_L_S
;
13447 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13448 mips32_op
= OPC_CEIL_L_D
;
13450 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13451 mips32_op
= OPC_CEIL_W_S
;
13453 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13454 mips32_op
= OPC_CEIL_W_D
;
13458 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13459 mips32_op
= OPC_TRUNC_L_S
;
13461 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13462 mips32_op
= OPC_TRUNC_L_D
;
13464 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13465 mips32_op
= OPC_TRUNC_W_S
;
13467 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13468 mips32_op
= OPC_TRUNC_W_D
;
13472 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13473 mips32_op
= OPC_ROUND_L_S
;
13475 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13476 mips32_op
= OPC_ROUND_L_D
;
13478 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13479 mips32_op
= OPC_ROUND_W_S
;
13481 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13482 mips32_op
= OPC_ROUND_W_D
;
13485 /* Integer to floating-point conversion */
13486 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13487 mips32_op
= OPC_CVT_L_S
;
13489 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13490 mips32_op
= OPC_CVT_L_D
;
13492 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13493 mips32_op
= OPC_CVT_W_S
;
13495 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13496 mips32_op
= OPC_CVT_W_D
;
13499 /* Paired-foo conversions */
13500 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13501 mips32_op
= OPC_CVT_S_PL
;
13503 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13504 mips32_op
= OPC_CVT_S_PU
;
13506 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13507 mips32_op
= OPC_CVT_PW_PS
;
13509 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13510 mips32_op
= OPC_CVT_PS_PW
;
13513 /* Floating-point moves */
13514 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13515 mips32_op
= OPC_MOV_S
;
13517 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13518 mips32_op
= OPC_MOV_D
;
13520 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13521 mips32_op
= OPC_MOV_PS
;
13524 /* Absolute value */
13525 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13526 mips32_op
= OPC_ABS_S
;
13528 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13529 mips32_op
= OPC_ABS_D
;
13531 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13532 mips32_op
= OPC_ABS_PS
;
13536 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13537 mips32_op
= OPC_NEG_S
;
13539 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13540 mips32_op
= OPC_NEG_D
;
13542 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13543 mips32_op
= OPC_NEG_PS
;
13546 /* Reciprocal square root step */
13547 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13548 mips32_op
= OPC_RSQRT1_S
;
13550 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13551 mips32_op
= OPC_RSQRT1_D
;
13553 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13554 mips32_op
= OPC_RSQRT1_PS
;
13557 /* Reciprocal step */
13558 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13559 mips32_op
= OPC_RECIP1_S
;
13561 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13562 mips32_op
= OPC_RECIP1_S
;
13564 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13565 mips32_op
= OPC_RECIP1_PS
;
13568 /* Conversions from double */
13569 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13570 mips32_op
= OPC_CVT_D_S
;
13572 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13573 mips32_op
= OPC_CVT_D_W
;
13575 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13576 mips32_op
= OPC_CVT_D_L
;
13579 /* Conversions from single */
13580 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13581 mips32_op
= OPC_CVT_S_D
;
13583 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13584 mips32_op
= OPC_CVT_S_W
;
13586 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13587 mips32_op
= OPC_CVT_S_L
;
13589 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13592 /* Conditional moves on floating-point codes */
13593 case COND_FLOAT_MOV(MOVT
, 0):
13594 case COND_FLOAT_MOV(MOVT
, 1):
13595 case COND_FLOAT_MOV(MOVT
, 2):
13596 case COND_FLOAT_MOV(MOVT
, 3):
13597 case COND_FLOAT_MOV(MOVT
, 4):
13598 case COND_FLOAT_MOV(MOVT
, 5):
13599 case COND_FLOAT_MOV(MOVT
, 6):
13600 case COND_FLOAT_MOV(MOVT
, 7):
13601 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13602 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13604 case COND_FLOAT_MOV(MOVF
, 0):
13605 case COND_FLOAT_MOV(MOVF
, 1):
13606 case COND_FLOAT_MOV(MOVF
, 2):
13607 case COND_FLOAT_MOV(MOVF
, 3):
13608 case COND_FLOAT_MOV(MOVF
, 4):
13609 case COND_FLOAT_MOV(MOVF
, 5):
13610 case COND_FLOAT_MOV(MOVF
, 6):
13611 case COND_FLOAT_MOV(MOVF
, 7):
13612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13613 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13616 MIPS_INVAL("pool32fxf");
13617 generate_exception_end(ctx
, EXCP_RI
);
13622 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13626 int rt
, rs
, rd
, rr
;
13628 uint32_t op
, minor
, mips32_op
;
13629 uint32_t cond
, fmt
, cc
;
13631 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13632 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13634 rt
= (ctx
->opcode
>> 21) & 0x1f;
13635 rs
= (ctx
->opcode
>> 16) & 0x1f;
13636 rd
= (ctx
->opcode
>> 11) & 0x1f;
13637 rr
= (ctx
->opcode
>> 6) & 0x1f;
13638 imm
= (int16_t) ctx
->opcode
;
13640 op
= (ctx
->opcode
>> 26) & 0x3f;
13643 minor
= ctx
->opcode
& 0x3f;
13646 minor
= (ctx
->opcode
>> 6) & 0xf;
13649 mips32_op
= OPC_SLL
;
13652 mips32_op
= OPC_SRA
;
13655 mips32_op
= OPC_SRL
;
13658 mips32_op
= OPC_ROTR
;
13660 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13663 check_insn(ctx
, ISA_MIPS32R6
);
13664 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13667 check_insn(ctx
, ISA_MIPS32R6
);
13668 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13671 check_insn(ctx
, ISA_MIPS32R6
);
13672 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13675 goto pool32a_invalid
;
13679 minor
= (ctx
->opcode
>> 6) & 0xf;
13683 mips32_op
= OPC_ADD
;
13686 mips32_op
= OPC_ADDU
;
13689 mips32_op
= OPC_SUB
;
13692 mips32_op
= OPC_SUBU
;
13695 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13696 mips32_op
= OPC_MUL
;
13698 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13702 mips32_op
= OPC_SLLV
;
13705 mips32_op
= OPC_SRLV
;
13708 mips32_op
= OPC_SRAV
;
13711 mips32_op
= OPC_ROTRV
;
13713 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13715 /* Logical operations */
13717 mips32_op
= OPC_AND
;
13720 mips32_op
= OPC_OR
;
13723 mips32_op
= OPC_NOR
;
13726 mips32_op
= OPC_XOR
;
13728 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13730 /* Set less than */
13732 mips32_op
= OPC_SLT
;
13735 mips32_op
= OPC_SLTU
;
13737 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13740 goto pool32a_invalid
;
13744 minor
= (ctx
->opcode
>> 6) & 0xf;
13746 /* Conditional moves */
13747 case MOVN
: /* MUL */
13748 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13750 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13753 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13756 case MOVZ
: /* MUH */
13757 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13759 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13762 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13766 check_insn(ctx
, ISA_MIPS32R6
);
13767 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13770 check_insn(ctx
, ISA_MIPS32R6
);
13771 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13773 case LWXS
: /* DIV */
13774 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13776 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13779 gen_ldxs(ctx
, rs
, rt
, rd
);
13783 check_insn(ctx
, ISA_MIPS32R6
);
13784 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13787 check_insn(ctx
, ISA_MIPS32R6
);
13788 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13791 check_insn(ctx
, ISA_MIPS32R6
);
13792 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13795 goto pool32a_invalid
;
13799 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13802 check_insn(ctx
, ISA_MIPS32R6
);
13803 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13804 extract32(ctx
->opcode
, 9, 2));
13807 check_insn(ctx
, ISA_MIPS32R6
);
13808 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13809 extract32(ctx
->opcode
, 9, 2));
13812 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13815 gen_pool32axf(env
, ctx
, rt
, rs
);
13818 generate_exception_end(ctx
, EXCP_BREAK
);
13821 check_insn(ctx
, ISA_MIPS32R6
);
13822 generate_exception_end(ctx
, EXCP_RI
);
13826 MIPS_INVAL("pool32a");
13827 generate_exception_end(ctx
, EXCP_RI
);
13832 minor
= (ctx
->opcode
>> 12) & 0xf;
13835 check_cp0_enabled(ctx
);
13836 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13837 gen_cache_operation(ctx
, rt
, rs
, imm
);
13842 /* COP2: Not implemented. */
13843 generate_exception_err(ctx
, EXCP_CpU
, 2);
13845 #ifdef TARGET_MIPS64
13848 check_insn(ctx
, ISA_MIPS3
);
13849 check_mips_64(ctx
);
13854 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13856 #ifdef TARGET_MIPS64
13859 check_insn(ctx
, ISA_MIPS3
);
13860 check_mips_64(ctx
);
13865 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13868 MIPS_INVAL("pool32b");
13869 generate_exception_end(ctx
, EXCP_RI
);
13874 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13875 minor
= ctx
->opcode
& 0x3f;
13876 check_cp1_enabled(ctx
);
13879 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13880 mips32_op
= OPC_ALNV_PS
;
13883 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13884 mips32_op
= OPC_MADD_S
;
13887 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13888 mips32_op
= OPC_MADD_D
;
13891 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13892 mips32_op
= OPC_MADD_PS
;
13895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13896 mips32_op
= OPC_MSUB_S
;
13899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13900 mips32_op
= OPC_MSUB_D
;
13903 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13904 mips32_op
= OPC_MSUB_PS
;
13907 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13908 mips32_op
= OPC_NMADD_S
;
13911 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13912 mips32_op
= OPC_NMADD_D
;
13915 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13916 mips32_op
= OPC_NMADD_PS
;
13919 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13920 mips32_op
= OPC_NMSUB_S
;
13923 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13924 mips32_op
= OPC_NMSUB_D
;
13927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13928 mips32_op
= OPC_NMSUB_PS
;
13930 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13932 case CABS_COND_FMT
:
13933 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13934 cond
= (ctx
->opcode
>> 6) & 0xf;
13935 cc
= (ctx
->opcode
>> 13) & 0x7;
13936 fmt
= (ctx
->opcode
>> 10) & 0x3;
13939 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13942 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13945 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13948 goto pool32f_invalid
;
13952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13953 cond
= (ctx
->opcode
>> 6) & 0xf;
13954 cc
= (ctx
->opcode
>> 13) & 0x7;
13955 fmt
= (ctx
->opcode
>> 10) & 0x3;
13958 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13961 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13964 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13967 goto pool32f_invalid
;
13971 check_insn(ctx
, ISA_MIPS32R6
);
13972 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13975 check_insn(ctx
, ISA_MIPS32R6
);
13976 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13979 gen_pool32fxf(ctx
, rt
, rs
);
13983 switch ((ctx
->opcode
>> 6) & 0x7) {
13985 mips32_op
= OPC_PLL_PS
;
13988 mips32_op
= OPC_PLU_PS
;
13991 mips32_op
= OPC_PUL_PS
;
13994 mips32_op
= OPC_PUU_PS
;
13997 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13998 mips32_op
= OPC_CVT_PS_S
;
14000 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14003 goto pool32f_invalid
;
14007 check_insn(ctx
, ISA_MIPS32R6
);
14008 switch ((ctx
->opcode
>> 9) & 0x3) {
14010 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14013 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14016 goto pool32f_invalid
;
14021 switch ((ctx
->opcode
>> 6) & 0x7) {
14023 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14024 mips32_op
= OPC_LWXC1
;
14027 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14028 mips32_op
= OPC_SWXC1
;
14031 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14032 mips32_op
= OPC_LDXC1
;
14035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14036 mips32_op
= OPC_SDXC1
;
14039 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14040 mips32_op
= OPC_LUXC1
;
14043 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14044 mips32_op
= OPC_SUXC1
;
14046 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14049 goto pool32f_invalid
;
14053 check_insn(ctx
, ISA_MIPS32R6
);
14054 switch ((ctx
->opcode
>> 9) & 0x3) {
14056 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14059 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14062 goto pool32f_invalid
;
14067 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14068 fmt
= (ctx
->opcode
>> 9) & 0x3;
14069 switch ((ctx
->opcode
>> 6) & 0x7) {
14073 mips32_op
= OPC_RSQRT2_S
;
14076 mips32_op
= OPC_RSQRT2_D
;
14079 mips32_op
= OPC_RSQRT2_PS
;
14082 goto pool32f_invalid
;
14088 mips32_op
= OPC_RECIP2_S
;
14091 mips32_op
= OPC_RECIP2_D
;
14094 mips32_op
= OPC_RECIP2_PS
;
14097 goto pool32f_invalid
;
14101 mips32_op
= OPC_ADDR_PS
;
14104 mips32_op
= OPC_MULR_PS
;
14106 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14109 goto pool32f_invalid
;
14113 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14114 cc
= (ctx
->opcode
>> 13) & 0x7;
14115 fmt
= (ctx
->opcode
>> 9) & 0x3;
14116 switch ((ctx
->opcode
>> 6) & 0x7) {
14117 case MOVF_FMT
: /* RINT_FMT */
14118 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14122 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14125 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14128 goto pool32f_invalid
;
14134 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14137 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14141 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14144 goto pool32f_invalid
;
14148 case MOVT_FMT
: /* CLASS_FMT */
14149 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14153 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14156 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14159 goto pool32f_invalid
;
14165 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14168 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14172 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14175 goto pool32f_invalid
;
14180 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14183 goto pool32f_invalid
;
14186 #define FINSN_3ARG_SDPS(prfx) \
14187 switch ((ctx->opcode >> 8) & 0x3) { \
14189 mips32_op = OPC_##prfx##_S; \
14192 mips32_op = OPC_##prfx##_D; \
14194 case FMT_SDPS_PS: \
14196 mips32_op = OPC_##prfx##_PS; \
14199 goto pool32f_invalid; \
14202 check_insn(ctx
, ISA_MIPS32R6
);
14203 switch ((ctx
->opcode
>> 9) & 0x3) {
14205 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14208 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14211 goto pool32f_invalid
;
14215 check_insn(ctx
, ISA_MIPS32R6
);
14216 switch ((ctx
->opcode
>> 9) & 0x3) {
14218 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14221 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14224 goto pool32f_invalid
;
14228 /* regular FP ops */
14229 switch ((ctx
->opcode
>> 6) & 0x3) {
14231 FINSN_3ARG_SDPS(ADD
);
14234 FINSN_3ARG_SDPS(SUB
);
14237 FINSN_3ARG_SDPS(MUL
);
14240 fmt
= (ctx
->opcode
>> 8) & 0x3;
14242 mips32_op
= OPC_DIV_D
;
14243 } else if (fmt
== 0) {
14244 mips32_op
= OPC_DIV_S
;
14246 goto pool32f_invalid
;
14250 goto pool32f_invalid
;
14255 switch ((ctx
->opcode
>> 6) & 0x7) {
14256 case MOVN_FMT
: /* SELNEZ_FMT */
14257 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14259 switch ((ctx
->opcode
>> 9) & 0x3) {
14261 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14264 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14267 goto pool32f_invalid
;
14271 FINSN_3ARG_SDPS(MOVN
);
14275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14276 FINSN_3ARG_SDPS(MOVN
);
14278 case MOVZ_FMT
: /* SELEQZ_FMT */
14279 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14281 switch ((ctx
->opcode
>> 9) & 0x3) {
14283 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14286 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14289 goto pool32f_invalid
;
14293 FINSN_3ARG_SDPS(MOVZ
);
14297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14298 FINSN_3ARG_SDPS(MOVZ
);
14301 check_insn(ctx
, ISA_MIPS32R6
);
14302 switch ((ctx
->opcode
>> 9) & 0x3) {
14304 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14307 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14310 goto pool32f_invalid
;
14314 check_insn(ctx
, ISA_MIPS32R6
);
14315 switch ((ctx
->opcode
>> 9) & 0x3) {
14317 mips32_op
= OPC_MADDF_S
;
14320 mips32_op
= OPC_MADDF_D
;
14323 goto pool32f_invalid
;
14327 check_insn(ctx
, ISA_MIPS32R6
);
14328 switch ((ctx
->opcode
>> 9) & 0x3) {
14330 mips32_op
= OPC_MSUBF_S
;
14333 mips32_op
= OPC_MSUBF_D
;
14336 goto pool32f_invalid
;
14340 goto pool32f_invalid
;
14344 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14348 MIPS_INVAL("pool32f");
14349 generate_exception_end(ctx
, EXCP_RI
);
14353 generate_exception_err(ctx
, EXCP_CpU
, 1);
14357 minor
= (ctx
->opcode
>> 21) & 0x1f;
14360 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14361 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14364 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14365 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14366 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14370 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14371 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14374 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14375 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14379 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14380 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14383 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14384 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14385 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14388 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14389 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14392 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14393 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14397 case TLTI
: /* BC1EQZC */
14398 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14400 check_cp1_enabled(ctx
);
14401 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14404 mips32_op
= OPC_TLTI
;
14408 case TGEI
: /* BC1NEZC */
14409 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14411 check_cp1_enabled(ctx
);
14412 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14415 mips32_op
= OPC_TGEI
;
14420 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14421 mips32_op
= OPC_TLTIU
;
14424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14425 mips32_op
= OPC_TGEIU
;
14427 case TNEI
: /* SYNCI */
14428 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14430 /* Break the TB to be able to sync copied instructions
14432 ctx
->bstate
= BS_STOP
;
14435 mips32_op
= OPC_TNEI
;
14440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14441 mips32_op
= OPC_TEQI
;
14443 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14449 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14450 4, rs
, 0, imm
<< 1, 0);
14451 /* Compact branches don't have a delay slot, so just let
14452 the normal delay slot handling take us to the branch
14456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14457 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14460 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14461 /* Break the TB to be able to sync copied instructions
14463 ctx
->bstate
= BS_STOP
;
14467 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14468 /* COP2: Not implemented. */
14469 generate_exception_err(ctx
, EXCP_CpU
, 2);
14472 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14473 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14476 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14477 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14480 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14481 mips32_op
= OPC_BC1FANY4
;
14484 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14485 mips32_op
= OPC_BC1TANY4
;
14488 check_insn(ctx
, ASE_MIPS3D
);
14491 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14492 check_cp1_enabled(ctx
);
14493 gen_compute_branch1(ctx
, mips32_op
,
14494 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14496 generate_exception_err(ctx
, EXCP_CpU
, 1);
14501 /* MIPS DSP: not implemented */
14504 MIPS_INVAL("pool32i");
14505 generate_exception_end(ctx
, EXCP_RI
);
14510 minor
= (ctx
->opcode
>> 12) & 0xf;
14511 offset
= sextract32(ctx
->opcode
, 0,
14512 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14516 mips32_op
= OPC_LWL
;
14519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14520 mips32_op
= OPC_SWL
;
14523 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14524 mips32_op
= OPC_LWR
;
14527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14528 mips32_op
= OPC_SWR
;
14530 #if defined(TARGET_MIPS64)
14532 check_insn(ctx
, ISA_MIPS3
);
14533 check_mips_64(ctx
);
14534 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14535 mips32_op
= OPC_LDL
;
14538 check_insn(ctx
, ISA_MIPS3
);
14539 check_mips_64(ctx
);
14540 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14541 mips32_op
= OPC_SDL
;
14544 check_insn(ctx
, ISA_MIPS3
);
14545 check_mips_64(ctx
);
14546 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14547 mips32_op
= OPC_LDR
;
14550 check_insn(ctx
, ISA_MIPS3
);
14551 check_mips_64(ctx
);
14552 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14553 mips32_op
= OPC_SDR
;
14556 check_insn(ctx
, ISA_MIPS3
);
14557 check_mips_64(ctx
);
14558 mips32_op
= OPC_LWU
;
14561 check_insn(ctx
, ISA_MIPS3
);
14562 check_mips_64(ctx
);
14563 mips32_op
= OPC_LLD
;
14567 mips32_op
= OPC_LL
;
14570 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14573 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14576 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14578 #if defined(TARGET_MIPS64)
14580 check_insn(ctx
, ISA_MIPS3
);
14581 check_mips_64(ctx
);
14582 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14586 /* Treat as no-op */
14587 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14588 /* hint codes 24-31 are reserved and signal RI */
14589 generate_exception(ctx
, EXCP_RI
);
14593 MIPS_INVAL("pool32c");
14594 generate_exception_end(ctx
, EXCP_RI
);
14598 case ADDI32
: /* AUI, LUI */
14599 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14601 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14604 mips32_op
= OPC_ADDI
;
14609 mips32_op
= OPC_ADDIU
;
14611 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14614 /* Logical operations */
14616 mips32_op
= OPC_ORI
;
14619 mips32_op
= OPC_XORI
;
14622 mips32_op
= OPC_ANDI
;
14624 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14627 /* Set less than immediate */
14629 mips32_op
= OPC_SLTI
;
14632 mips32_op
= OPC_SLTIU
;
14634 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14637 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14638 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14639 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14640 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14642 case JALS32
: /* BOVC, BEQC, BEQZALC */
14643 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14646 mips32_op
= OPC_BOVC
;
14647 } else if (rs
< rt
&& rs
== 0) {
14649 mips32_op
= OPC_BEQZALC
;
14652 mips32_op
= OPC_BEQC
;
14654 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14657 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14658 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14659 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14662 case BEQ32
: /* BC */
14663 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14665 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14666 sextract32(ctx
->opcode
<< 1, 0, 27));
14669 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14672 case BNE32
: /* BALC */
14673 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14675 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14676 sextract32(ctx
->opcode
<< 1, 0, 27));
14679 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14682 case J32
: /* BGTZC, BLTZC, BLTC */
14683 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14684 if (rs
== 0 && rt
!= 0) {
14686 mips32_op
= OPC_BGTZC
;
14687 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14689 mips32_op
= OPC_BLTZC
;
14692 mips32_op
= OPC_BLTC
;
14694 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14697 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14698 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14701 case JAL32
: /* BLEZC, BGEZC, BGEC */
14702 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14703 if (rs
== 0 && rt
!= 0) {
14705 mips32_op
= OPC_BLEZC
;
14706 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14708 mips32_op
= OPC_BGEZC
;
14711 mips32_op
= OPC_BGEC
;
14713 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14716 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14717 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14718 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14721 /* Floating point (COP1) */
14723 mips32_op
= OPC_LWC1
;
14726 mips32_op
= OPC_LDC1
;
14729 mips32_op
= OPC_SWC1
;
14732 mips32_op
= OPC_SDC1
;
14734 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14736 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14737 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14738 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14739 switch ((ctx
->opcode
>> 16) & 0x1f) {
14740 case ADDIUPC_00
... ADDIUPC_07
:
14741 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14744 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14747 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14749 case LWPC_08
... LWPC_0F
:
14750 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14753 generate_exception(ctx
, EXCP_RI
);
14758 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14759 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14761 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14764 case BNVC
: /* BNEC, BNEZALC */
14765 check_insn(ctx
, ISA_MIPS32R6
);
14768 mips32_op
= OPC_BNVC
;
14769 } else if (rs
< rt
&& rs
== 0) {
14771 mips32_op
= OPC_BNEZALC
;
14774 mips32_op
= OPC_BNEC
;
14776 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14778 case R6_BNEZC
: /* JIALC */
14779 check_insn(ctx
, ISA_MIPS32R6
);
14782 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14783 sextract32(ctx
->opcode
<< 1, 0, 22));
14786 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14789 case R6_BEQZC
: /* JIC */
14790 check_insn(ctx
, ISA_MIPS32R6
);
14793 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14794 sextract32(ctx
->opcode
<< 1, 0, 22));
14797 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14800 case BLEZALC
: /* BGEZALC, BGEUC */
14801 check_insn(ctx
, ISA_MIPS32R6
);
14802 if (rs
== 0 && rt
!= 0) {
14804 mips32_op
= OPC_BLEZALC
;
14805 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14807 mips32_op
= OPC_BGEZALC
;
14810 mips32_op
= OPC_BGEUC
;
14812 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14814 case BGTZALC
: /* BLTZALC, BLTUC */
14815 check_insn(ctx
, ISA_MIPS32R6
);
14816 if (rs
== 0 && rt
!= 0) {
14818 mips32_op
= OPC_BGTZALC
;
14819 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14821 mips32_op
= OPC_BLTZALC
;
14824 mips32_op
= OPC_BLTUC
;
14826 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14828 /* Loads and stores */
14830 mips32_op
= OPC_LB
;
14833 mips32_op
= OPC_LBU
;
14836 mips32_op
= OPC_LH
;
14839 mips32_op
= OPC_LHU
;
14842 mips32_op
= OPC_LW
;
14844 #ifdef TARGET_MIPS64
14846 check_insn(ctx
, ISA_MIPS3
);
14847 check_mips_64(ctx
);
14848 mips32_op
= OPC_LD
;
14851 check_insn(ctx
, ISA_MIPS3
);
14852 check_mips_64(ctx
);
14853 mips32_op
= OPC_SD
;
14857 mips32_op
= OPC_SB
;
14860 mips32_op
= OPC_SH
;
14863 mips32_op
= OPC_SW
;
14866 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14869 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14872 generate_exception_end(ctx
, EXCP_RI
);
14877 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14881 /* make sure instructions are on a halfword boundary */
14882 if (ctx
->pc
& 0x1) {
14883 env
->CP0_BadVAddr
= ctx
->pc
;
14884 generate_exception_end(ctx
, EXCP_AdEL
);
14888 op
= (ctx
->opcode
>> 10) & 0x3f;
14889 /* Enforce properly-sized instructions in a delay slot */
14890 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14891 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14893 /* POOL32A, POOL32B, POOL32I, POOL32C */
14895 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14897 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14899 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14901 /* LB32, LH32, LWC132, LDC132, LW32 */
14902 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14903 generate_exception_end(ctx
, EXCP_RI
);
14908 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14910 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14912 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14913 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14914 generate_exception_end(ctx
, EXCP_RI
);
14924 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14925 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14926 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14929 switch (ctx
->opcode
& 0x1) {
14937 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14938 /* In the Release 6 the register number location in
14939 * the instruction encoding has changed.
14941 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14943 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14949 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14950 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14951 int amount
= (ctx
->opcode
>> 1) & 0x7;
14953 amount
= amount
== 0 ? 8 : amount
;
14955 switch (ctx
->opcode
& 0x1) {
14964 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14968 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14969 gen_pool16c_r6_insn(ctx
);
14971 gen_pool16c_insn(ctx
);
14976 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14977 int rb
= 28; /* GP */
14978 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14980 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14984 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14985 if (ctx
->opcode
& 1) {
14986 generate_exception_end(ctx
, EXCP_RI
);
14989 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14990 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14991 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14992 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14997 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14998 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14999 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15000 offset
= (offset
== 0xf ? -1 : offset
);
15002 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15007 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15008 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15009 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15011 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15016 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15017 int rb
= 29; /* SP */
15018 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15020 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15025 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15026 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15027 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15029 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15034 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15035 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15036 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15038 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15043 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15044 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15045 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15047 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15052 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15053 int rb
= 29; /* SP */
15054 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15056 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15061 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15062 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15063 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15065 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15070 int rd
= uMIPS_RD5(ctx
->opcode
);
15071 int rs
= uMIPS_RS5(ctx
->opcode
);
15073 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15080 switch (ctx
->opcode
& 0x1) {
15090 switch (ctx
->opcode
& 0x1) {
15095 gen_addiur1sp(ctx
);
15099 case B16
: /* BC16 */
15100 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15101 sextract32(ctx
->opcode
, 0, 10) << 1,
15102 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15104 case BNEZ16
: /* BNEZC16 */
15105 case BEQZ16
: /* BEQZC16 */
15106 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15107 mmreg(uMIPS_RD(ctx
->opcode
)),
15108 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15109 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15114 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15115 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15117 imm
= (imm
== 0x7f ? -1 : imm
);
15118 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15124 generate_exception_end(ctx
, EXCP_RI
);
15127 decode_micromips32_opc(env
, ctx
);
15134 /* SmartMIPS extension to MIPS32 */
15136 #if defined(TARGET_MIPS64)
15138 /* MDMX extension to MIPS64 */
15142 /* MIPSDSP functions. */
15143 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15144 int rd
, int base
, int offset
)
15149 t0
= tcg_temp_new();
15152 gen_load_gpr(t0
, offset
);
15153 } else if (offset
== 0) {
15154 gen_load_gpr(t0
, base
);
15156 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15161 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15162 gen_store_gpr(t0
, rd
);
15165 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15166 gen_store_gpr(t0
, rd
);
15169 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15170 gen_store_gpr(t0
, rd
);
15172 #if defined(TARGET_MIPS64)
15174 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15175 gen_store_gpr(t0
, rd
);
15182 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15183 int ret
, int v1
, int v2
)
15189 /* Treat as NOP. */
15193 v1_t
= tcg_temp_new();
15194 v2_t
= tcg_temp_new();
15196 gen_load_gpr(v1_t
, v1
);
15197 gen_load_gpr(v2_t
, v2
);
15200 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15201 case OPC_MULT_G_2E
:
15205 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15207 case OPC_ADDUH_R_QB
:
15208 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15211 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15213 case OPC_ADDQH_R_PH
:
15214 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15217 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15219 case OPC_ADDQH_R_W
:
15220 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15223 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15225 case OPC_SUBUH_R_QB
:
15226 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15229 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15231 case OPC_SUBQH_R_PH
:
15232 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15235 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15237 case OPC_SUBQH_R_W
:
15238 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15242 case OPC_ABSQ_S_PH_DSP
:
15244 case OPC_ABSQ_S_QB
:
15246 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15248 case OPC_ABSQ_S_PH
:
15250 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15254 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15256 case OPC_PRECEQ_W_PHL
:
15258 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15259 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15261 case OPC_PRECEQ_W_PHR
:
15263 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15264 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15265 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15267 case OPC_PRECEQU_PH_QBL
:
15269 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15271 case OPC_PRECEQU_PH_QBR
:
15273 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15275 case OPC_PRECEQU_PH_QBLA
:
15277 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15279 case OPC_PRECEQU_PH_QBRA
:
15281 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15283 case OPC_PRECEU_PH_QBL
:
15285 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15287 case OPC_PRECEU_PH_QBR
:
15289 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15291 case OPC_PRECEU_PH_QBLA
:
15293 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15295 case OPC_PRECEU_PH_QBRA
:
15297 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15301 case OPC_ADDU_QB_DSP
:
15305 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15307 case OPC_ADDQ_S_PH
:
15309 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15313 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15317 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15319 case OPC_ADDU_S_QB
:
15321 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15325 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15327 case OPC_ADDU_S_PH
:
15329 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15333 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15335 case OPC_SUBQ_S_PH
:
15337 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15341 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15345 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15347 case OPC_SUBU_S_QB
:
15349 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15353 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15355 case OPC_SUBU_S_PH
:
15357 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15361 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15365 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15369 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15371 case OPC_RADDU_W_QB
:
15373 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15377 case OPC_CMPU_EQ_QB_DSP
:
15379 case OPC_PRECR_QB_PH
:
15381 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15383 case OPC_PRECRQ_QB_PH
:
15385 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15387 case OPC_PRECR_SRA_PH_W
:
15390 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15391 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15393 tcg_temp_free_i32(sa_t
);
15396 case OPC_PRECR_SRA_R_PH_W
:
15399 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15400 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15402 tcg_temp_free_i32(sa_t
);
15405 case OPC_PRECRQ_PH_W
:
15407 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15409 case OPC_PRECRQ_RS_PH_W
:
15411 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15413 case OPC_PRECRQU_S_QB_PH
:
15415 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15419 #ifdef TARGET_MIPS64
15420 case OPC_ABSQ_S_QH_DSP
:
15422 case OPC_PRECEQ_L_PWL
:
15424 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15426 case OPC_PRECEQ_L_PWR
:
15428 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15430 case OPC_PRECEQ_PW_QHL
:
15432 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15434 case OPC_PRECEQ_PW_QHR
:
15436 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15438 case OPC_PRECEQ_PW_QHLA
:
15440 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15442 case OPC_PRECEQ_PW_QHRA
:
15444 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15446 case OPC_PRECEQU_QH_OBL
:
15448 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15450 case OPC_PRECEQU_QH_OBR
:
15452 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15454 case OPC_PRECEQU_QH_OBLA
:
15456 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15458 case OPC_PRECEQU_QH_OBRA
:
15460 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15462 case OPC_PRECEU_QH_OBL
:
15464 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15466 case OPC_PRECEU_QH_OBR
:
15468 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15470 case OPC_PRECEU_QH_OBLA
:
15472 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15474 case OPC_PRECEU_QH_OBRA
:
15476 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15478 case OPC_ABSQ_S_OB
:
15480 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15482 case OPC_ABSQ_S_PW
:
15484 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15486 case OPC_ABSQ_S_QH
:
15488 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15492 case OPC_ADDU_OB_DSP
:
15494 case OPC_RADDU_L_OB
:
15496 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15500 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15502 case OPC_SUBQ_S_PW
:
15504 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15508 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15510 case OPC_SUBQ_S_QH
:
15512 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15516 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15518 case OPC_SUBU_S_OB
:
15520 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15524 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15526 case OPC_SUBU_S_QH
:
15528 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15532 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15534 case OPC_SUBUH_R_OB
:
15536 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15540 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15542 case OPC_ADDQ_S_PW
:
15544 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15548 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15550 case OPC_ADDQ_S_QH
:
15552 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15556 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15558 case OPC_ADDU_S_OB
:
15560 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15564 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15566 case OPC_ADDU_S_QH
:
15568 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15572 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15574 case OPC_ADDUH_R_OB
:
15576 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15580 case OPC_CMPU_EQ_OB_DSP
:
15582 case OPC_PRECR_OB_QH
:
15584 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15586 case OPC_PRECR_SRA_QH_PW
:
15589 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15590 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15591 tcg_temp_free_i32(ret_t
);
15594 case OPC_PRECR_SRA_R_QH_PW
:
15597 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15598 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15599 tcg_temp_free_i32(sa_v
);
15602 case OPC_PRECRQ_OB_QH
:
15604 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15606 case OPC_PRECRQ_PW_L
:
15608 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15610 case OPC_PRECRQ_QH_PW
:
15612 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15614 case OPC_PRECRQ_RS_QH_PW
:
15616 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15618 case OPC_PRECRQU_S_OB_QH
:
15620 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15627 tcg_temp_free(v1_t
);
15628 tcg_temp_free(v2_t
);
15631 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15632 int ret
, int v1
, int v2
)
15640 /* Treat as NOP. */
15644 t0
= tcg_temp_new();
15645 v1_t
= tcg_temp_new();
15646 v2_t
= tcg_temp_new();
15648 tcg_gen_movi_tl(t0
, v1
);
15649 gen_load_gpr(v1_t
, v1
);
15650 gen_load_gpr(v2_t
, v2
);
15653 case OPC_SHLL_QB_DSP
:
15655 op2
= MASK_SHLL_QB(ctx
->opcode
);
15659 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15663 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15667 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15671 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15673 case OPC_SHLL_S_PH
:
15675 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15677 case OPC_SHLLV_S_PH
:
15679 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15683 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15685 case OPC_SHLLV_S_W
:
15687 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15691 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15695 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15699 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15703 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15707 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15709 case OPC_SHRA_R_QB
:
15711 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15715 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15717 case OPC_SHRAV_R_QB
:
15719 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15723 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15725 case OPC_SHRA_R_PH
:
15727 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15731 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15733 case OPC_SHRAV_R_PH
:
15735 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15739 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15741 case OPC_SHRAV_R_W
:
15743 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15745 default: /* Invalid */
15746 MIPS_INVAL("MASK SHLL.QB");
15747 generate_exception_end(ctx
, EXCP_RI
);
15752 #ifdef TARGET_MIPS64
15753 case OPC_SHLL_OB_DSP
:
15754 op2
= MASK_SHLL_OB(ctx
->opcode
);
15758 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15762 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15764 case OPC_SHLL_S_PW
:
15766 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15768 case OPC_SHLLV_S_PW
:
15770 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15774 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15778 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15782 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15786 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15788 case OPC_SHLL_S_QH
:
15790 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15792 case OPC_SHLLV_S_QH
:
15794 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15798 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15802 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15804 case OPC_SHRA_R_OB
:
15806 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15808 case OPC_SHRAV_R_OB
:
15810 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15814 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15818 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15820 case OPC_SHRA_R_PW
:
15822 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15824 case OPC_SHRAV_R_PW
:
15826 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15830 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15834 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15836 case OPC_SHRA_R_QH
:
15838 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15840 case OPC_SHRAV_R_QH
:
15842 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15846 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15850 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15854 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15858 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15860 default: /* Invalid */
15861 MIPS_INVAL("MASK SHLL.OB");
15862 generate_exception_end(ctx
, EXCP_RI
);
15870 tcg_temp_free(v1_t
);
15871 tcg_temp_free(v2_t
);
15874 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15875 int ret
, int v1
, int v2
, int check_ret
)
15881 if ((ret
== 0) && (check_ret
== 1)) {
15882 /* Treat as NOP. */
15886 t0
= tcg_temp_new_i32();
15887 v1_t
= tcg_temp_new();
15888 v2_t
= tcg_temp_new();
15890 tcg_gen_movi_i32(t0
, ret
);
15891 gen_load_gpr(v1_t
, v1
);
15892 gen_load_gpr(v2_t
, v2
);
15895 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15896 * the same mask and op1. */
15897 case OPC_MULT_G_2E
:
15901 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15904 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15907 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15909 case OPC_MULQ_RS_W
:
15910 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15914 case OPC_DPA_W_PH_DSP
:
15916 case OPC_DPAU_H_QBL
:
15918 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15920 case OPC_DPAU_H_QBR
:
15922 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15924 case OPC_DPSU_H_QBL
:
15926 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15928 case OPC_DPSU_H_QBR
:
15930 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15934 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15936 case OPC_DPAX_W_PH
:
15938 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15940 case OPC_DPAQ_S_W_PH
:
15942 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15944 case OPC_DPAQX_S_W_PH
:
15946 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15948 case OPC_DPAQX_SA_W_PH
:
15950 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15954 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15956 case OPC_DPSX_W_PH
:
15958 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15960 case OPC_DPSQ_S_W_PH
:
15962 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15964 case OPC_DPSQX_S_W_PH
:
15966 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15968 case OPC_DPSQX_SA_W_PH
:
15970 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15972 case OPC_MULSAQ_S_W_PH
:
15974 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15976 case OPC_DPAQ_SA_L_W
:
15978 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15980 case OPC_DPSQ_SA_L_W
:
15982 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15984 case OPC_MAQ_S_W_PHL
:
15986 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15988 case OPC_MAQ_S_W_PHR
:
15990 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15992 case OPC_MAQ_SA_W_PHL
:
15994 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15996 case OPC_MAQ_SA_W_PHR
:
15998 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16000 case OPC_MULSA_W_PH
:
16002 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16006 #ifdef TARGET_MIPS64
16007 case OPC_DPAQ_W_QH_DSP
:
16009 int ac
= ret
& 0x03;
16010 tcg_gen_movi_i32(t0
, ac
);
16015 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16019 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16023 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16027 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16031 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16033 case OPC_DPAQ_S_W_QH
:
16035 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16037 case OPC_DPAQ_SA_L_PW
:
16039 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16041 case OPC_DPAU_H_OBL
:
16043 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16045 case OPC_DPAU_H_OBR
:
16047 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16051 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16053 case OPC_DPSQ_S_W_QH
:
16055 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16057 case OPC_DPSQ_SA_L_PW
:
16059 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16061 case OPC_DPSU_H_OBL
:
16063 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16065 case OPC_DPSU_H_OBR
:
16067 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16069 case OPC_MAQ_S_L_PWL
:
16071 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16073 case OPC_MAQ_S_L_PWR
:
16075 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16077 case OPC_MAQ_S_W_QHLL
:
16079 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16081 case OPC_MAQ_SA_W_QHLL
:
16083 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16085 case OPC_MAQ_S_W_QHLR
:
16087 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16089 case OPC_MAQ_SA_W_QHLR
:
16091 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16093 case OPC_MAQ_S_W_QHRL
:
16095 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16097 case OPC_MAQ_SA_W_QHRL
:
16099 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16101 case OPC_MAQ_S_W_QHRR
:
16103 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16105 case OPC_MAQ_SA_W_QHRR
:
16107 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16109 case OPC_MULSAQ_S_L_PW
:
16111 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16113 case OPC_MULSAQ_S_W_QH
:
16115 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16121 case OPC_ADDU_QB_DSP
:
16123 case OPC_MULEU_S_PH_QBL
:
16125 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16127 case OPC_MULEU_S_PH_QBR
:
16129 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16131 case OPC_MULQ_RS_PH
:
16133 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16135 case OPC_MULEQ_S_W_PHL
:
16137 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16139 case OPC_MULEQ_S_W_PHR
:
16141 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16143 case OPC_MULQ_S_PH
:
16145 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16149 #ifdef TARGET_MIPS64
16150 case OPC_ADDU_OB_DSP
:
16152 case OPC_MULEQ_S_PW_QHL
:
16154 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16156 case OPC_MULEQ_S_PW_QHR
:
16158 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16160 case OPC_MULEU_S_QH_OBL
:
16162 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16164 case OPC_MULEU_S_QH_OBR
:
16166 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16168 case OPC_MULQ_RS_QH
:
16170 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16177 tcg_temp_free_i32(t0
);
16178 tcg_temp_free(v1_t
);
16179 tcg_temp_free(v2_t
);
16182 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16190 /* Treat as NOP. */
16194 t0
= tcg_temp_new();
16195 val_t
= tcg_temp_new();
16196 gen_load_gpr(val_t
, val
);
16199 case OPC_ABSQ_S_PH_DSP
:
16203 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16208 target_long result
;
16209 imm
= (ctx
->opcode
>> 16) & 0xFF;
16210 result
= (uint32_t)imm
<< 24 |
16211 (uint32_t)imm
<< 16 |
16212 (uint32_t)imm
<< 8 |
16214 result
= (int32_t)result
;
16215 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16220 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16221 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16222 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16223 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16224 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16225 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16230 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16231 imm
= (int16_t)(imm
<< 6) >> 6;
16232 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16233 (target_long
)((int32_t)imm
<< 16 | \
16239 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16240 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16241 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16242 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16246 #ifdef TARGET_MIPS64
16247 case OPC_ABSQ_S_QH_DSP
:
16254 imm
= (ctx
->opcode
>> 16) & 0xFF;
16255 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16256 temp
= (temp
<< 16) | temp
;
16257 temp
= (temp
<< 32) | temp
;
16258 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16266 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16267 imm
= (int16_t)(imm
<< 6) >> 6;
16268 temp
= ((target_long
)imm
<< 32) \
16269 | ((target_long
)imm
& 0xFFFFFFFF);
16270 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16278 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16279 imm
= (int16_t)(imm
<< 6) >> 6;
16281 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16282 ((uint64_t)(uint16_t)imm
<< 32) |
16283 ((uint64_t)(uint16_t)imm
<< 16) |
16284 (uint64_t)(uint16_t)imm
;
16285 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16290 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16291 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16292 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16293 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16294 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16295 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16296 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16300 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16301 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16302 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16306 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16307 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16308 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16309 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16310 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16317 tcg_temp_free(val_t
);
16320 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16321 uint32_t op1
, uint32_t op2
,
16322 int ret
, int v1
, int v2
, int check_ret
)
16328 if ((ret
== 0) && (check_ret
== 1)) {
16329 /* Treat as NOP. */
16333 t1
= tcg_temp_new();
16334 v1_t
= tcg_temp_new();
16335 v2_t
= tcg_temp_new();
16337 gen_load_gpr(v1_t
, v1
);
16338 gen_load_gpr(v2_t
, v2
);
16341 case OPC_CMPU_EQ_QB_DSP
:
16343 case OPC_CMPU_EQ_QB
:
16345 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16347 case OPC_CMPU_LT_QB
:
16349 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16351 case OPC_CMPU_LE_QB
:
16353 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16355 case OPC_CMPGU_EQ_QB
:
16357 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16359 case OPC_CMPGU_LT_QB
:
16361 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16363 case OPC_CMPGU_LE_QB
:
16365 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16367 case OPC_CMPGDU_EQ_QB
:
16369 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16370 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16371 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16372 tcg_gen_shli_tl(t1
, t1
, 24);
16373 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16375 case OPC_CMPGDU_LT_QB
:
16377 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16378 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16379 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16380 tcg_gen_shli_tl(t1
, t1
, 24);
16381 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16383 case OPC_CMPGDU_LE_QB
:
16385 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16386 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16387 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16388 tcg_gen_shli_tl(t1
, t1
, 24);
16389 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16391 case OPC_CMP_EQ_PH
:
16393 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16395 case OPC_CMP_LT_PH
:
16397 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16399 case OPC_CMP_LE_PH
:
16401 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16405 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16409 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16411 case OPC_PACKRL_PH
:
16413 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16417 #ifdef TARGET_MIPS64
16418 case OPC_CMPU_EQ_OB_DSP
:
16420 case OPC_CMP_EQ_PW
:
16422 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16424 case OPC_CMP_LT_PW
:
16426 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16428 case OPC_CMP_LE_PW
:
16430 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16432 case OPC_CMP_EQ_QH
:
16434 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16436 case OPC_CMP_LT_QH
:
16438 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16440 case OPC_CMP_LE_QH
:
16442 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16444 case OPC_CMPGDU_EQ_OB
:
16446 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16448 case OPC_CMPGDU_LT_OB
:
16450 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16452 case OPC_CMPGDU_LE_OB
:
16454 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16456 case OPC_CMPGU_EQ_OB
:
16458 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16460 case OPC_CMPGU_LT_OB
:
16462 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16464 case OPC_CMPGU_LE_OB
:
16466 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16468 case OPC_CMPU_EQ_OB
:
16470 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16472 case OPC_CMPU_LT_OB
:
16474 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16476 case OPC_CMPU_LE_OB
:
16478 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16480 case OPC_PACKRL_PW
:
16482 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16486 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16490 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16494 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16502 tcg_temp_free(v1_t
);
16503 tcg_temp_free(v2_t
);
16506 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16507 uint32_t op1
, int rt
, int rs
, int sa
)
16514 /* Treat as NOP. */
16518 t0
= tcg_temp_new();
16519 gen_load_gpr(t0
, rs
);
16522 case OPC_APPEND_DSP
:
16523 switch (MASK_APPEND(ctx
->opcode
)) {
16526 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16528 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16532 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16533 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16534 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16535 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16537 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16541 if (sa
!= 0 && sa
!= 2) {
16542 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16543 tcg_gen_ext32u_tl(t0
, t0
);
16544 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16545 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16547 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16549 default: /* Invalid */
16550 MIPS_INVAL("MASK APPEND");
16551 generate_exception_end(ctx
, EXCP_RI
);
16555 #ifdef TARGET_MIPS64
16556 case OPC_DAPPEND_DSP
:
16557 switch (MASK_DAPPEND(ctx
->opcode
)) {
16560 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16564 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16565 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16566 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16570 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16571 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16572 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16577 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16578 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16579 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16580 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16583 default: /* Invalid */
16584 MIPS_INVAL("MASK DAPPEND");
16585 generate_exception_end(ctx
, EXCP_RI
);
16594 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16595 int ret
, int v1
, int v2
, int check_ret
)
16604 if ((ret
== 0) && (check_ret
== 1)) {
16605 /* Treat as NOP. */
16609 t0
= tcg_temp_new();
16610 t1
= tcg_temp_new();
16611 v1_t
= tcg_temp_new();
16612 v2_t
= tcg_temp_new();
16614 gen_load_gpr(v1_t
, v1
);
16615 gen_load_gpr(v2_t
, v2
);
16618 case OPC_EXTR_W_DSP
:
16622 tcg_gen_movi_tl(t0
, v2
);
16623 tcg_gen_movi_tl(t1
, v1
);
16624 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16627 tcg_gen_movi_tl(t0
, v2
);
16628 tcg_gen_movi_tl(t1
, v1
);
16629 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16631 case OPC_EXTR_RS_W
:
16632 tcg_gen_movi_tl(t0
, v2
);
16633 tcg_gen_movi_tl(t1
, v1
);
16634 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16637 tcg_gen_movi_tl(t0
, v2
);
16638 tcg_gen_movi_tl(t1
, v1
);
16639 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16641 case OPC_EXTRV_S_H
:
16642 tcg_gen_movi_tl(t0
, v2
);
16643 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16646 tcg_gen_movi_tl(t0
, v2
);
16647 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16649 case OPC_EXTRV_R_W
:
16650 tcg_gen_movi_tl(t0
, v2
);
16651 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16653 case OPC_EXTRV_RS_W
:
16654 tcg_gen_movi_tl(t0
, v2
);
16655 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16658 tcg_gen_movi_tl(t0
, v2
);
16659 tcg_gen_movi_tl(t1
, v1
);
16660 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16663 tcg_gen_movi_tl(t0
, v2
);
16664 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16667 tcg_gen_movi_tl(t0
, v2
);
16668 tcg_gen_movi_tl(t1
, v1
);
16669 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16672 tcg_gen_movi_tl(t0
, v2
);
16673 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16676 imm
= (ctx
->opcode
>> 20) & 0x3F;
16677 tcg_gen_movi_tl(t0
, ret
);
16678 tcg_gen_movi_tl(t1
, imm
);
16679 gen_helper_shilo(t0
, t1
, cpu_env
);
16682 tcg_gen_movi_tl(t0
, ret
);
16683 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16686 tcg_gen_movi_tl(t0
, ret
);
16687 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16690 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16691 tcg_gen_movi_tl(t0
, imm
);
16692 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16695 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16696 tcg_gen_movi_tl(t0
, imm
);
16697 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16701 #ifdef TARGET_MIPS64
16702 case OPC_DEXTR_W_DSP
:
16706 tcg_gen_movi_tl(t0
, ret
);
16707 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16711 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16712 int ac
= (ctx
->opcode
>> 11) & 0x03;
16713 tcg_gen_movi_tl(t0
, shift
);
16714 tcg_gen_movi_tl(t1
, ac
);
16715 gen_helper_dshilo(t0
, t1
, cpu_env
);
16720 int ac
= (ctx
->opcode
>> 11) & 0x03;
16721 tcg_gen_movi_tl(t0
, ac
);
16722 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16726 tcg_gen_movi_tl(t0
, v2
);
16727 tcg_gen_movi_tl(t1
, v1
);
16729 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16732 tcg_gen_movi_tl(t0
, v2
);
16733 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16736 tcg_gen_movi_tl(t0
, v2
);
16737 tcg_gen_movi_tl(t1
, v1
);
16738 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16741 tcg_gen_movi_tl(t0
, v2
);
16742 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16745 tcg_gen_movi_tl(t0
, v2
);
16746 tcg_gen_movi_tl(t1
, v1
);
16747 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16749 case OPC_DEXTR_R_L
:
16750 tcg_gen_movi_tl(t0
, v2
);
16751 tcg_gen_movi_tl(t1
, v1
);
16752 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16754 case OPC_DEXTR_RS_L
:
16755 tcg_gen_movi_tl(t0
, v2
);
16756 tcg_gen_movi_tl(t1
, v1
);
16757 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16760 tcg_gen_movi_tl(t0
, v2
);
16761 tcg_gen_movi_tl(t1
, v1
);
16762 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16764 case OPC_DEXTR_R_W
:
16765 tcg_gen_movi_tl(t0
, v2
);
16766 tcg_gen_movi_tl(t1
, v1
);
16767 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16769 case OPC_DEXTR_RS_W
:
16770 tcg_gen_movi_tl(t0
, v2
);
16771 tcg_gen_movi_tl(t1
, v1
);
16772 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16774 case OPC_DEXTR_S_H
:
16775 tcg_gen_movi_tl(t0
, v2
);
16776 tcg_gen_movi_tl(t1
, v1
);
16777 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16779 case OPC_DEXTRV_S_H
:
16780 tcg_gen_movi_tl(t0
, v2
);
16781 tcg_gen_movi_tl(t1
, v1
);
16782 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16785 tcg_gen_movi_tl(t0
, v2
);
16786 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16788 case OPC_DEXTRV_R_L
:
16789 tcg_gen_movi_tl(t0
, v2
);
16790 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16792 case OPC_DEXTRV_RS_L
:
16793 tcg_gen_movi_tl(t0
, v2
);
16794 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16797 tcg_gen_movi_tl(t0
, v2
);
16798 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16800 case OPC_DEXTRV_R_W
:
16801 tcg_gen_movi_tl(t0
, v2
);
16802 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16804 case OPC_DEXTRV_RS_W
:
16805 tcg_gen_movi_tl(t0
, v2
);
16806 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16815 tcg_temp_free(v1_t
);
16816 tcg_temp_free(v2_t
);
16819 /* End MIPSDSP functions. */
16821 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16823 int rs
, rt
, rd
, sa
;
16826 rs
= (ctx
->opcode
>> 21) & 0x1f;
16827 rt
= (ctx
->opcode
>> 16) & 0x1f;
16828 rd
= (ctx
->opcode
>> 11) & 0x1f;
16829 sa
= (ctx
->opcode
>> 6) & 0x1f;
16831 op1
= MASK_SPECIAL(ctx
->opcode
);
16834 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16836 case OPC_MULT
... OPC_DIVU
:
16837 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16847 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16850 MIPS_INVAL("special_r6 muldiv");
16851 generate_exception_end(ctx
, EXCP_RI
);
16857 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16861 if (rt
== 0 && sa
== 1) {
16862 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16863 We need additionally to check other fields */
16864 gen_cl(ctx
, op1
, rd
, rs
);
16866 generate_exception_end(ctx
, EXCP_RI
);
16870 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16871 gen_helper_do_semihosting(cpu_env
);
16873 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16874 generate_exception_end(ctx
, EXCP_RI
);
16876 generate_exception_end(ctx
, EXCP_DBp
);
16880 #if defined(TARGET_MIPS64)
16882 check_mips_64(ctx
);
16883 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16887 if (rt
== 0 && sa
== 1) {
16888 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16889 We need additionally to check other fields */
16890 check_mips_64(ctx
);
16891 gen_cl(ctx
, op1
, rd
, rs
);
16893 generate_exception_end(ctx
, EXCP_RI
);
16896 case OPC_DMULT
... OPC_DDIVU
:
16897 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16907 check_mips_64(ctx
);
16908 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16911 MIPS_INVAL("special_r6 muldiv");
16912 generate_exception_end(ctx
, EXCP_RI
);
16917 default: /* Invalid */
16918 MIPS_INVAL("special_r6");
16919 generate_exception_end(ctx
, EXCP_RI
);
16924 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16926 int rs
, rt
, rd
, sa
;
16929 rs
= (ctx
->opcode
>> 21) & 0x1f;
16930 rt
= (ctx
->opcode
>> 16) & 0x1f;
16931 rd
= (ctx
->opcode
>> 11) & 0x1f;
16932 sa
= (ctx
->opcode
>> 6) & 0x1f;
16934 op1
= MASK_SPECIAL(ctx
->opcode
);
16936 case OPC_MOVN
: /* Conditional move */
16938 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16939 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16940 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16942 case OPC_MFHI
: /* Move from HI/LO */
16944 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16947 case OPC_MTLO
: /* Move to HI/LO */
16948 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16951 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16952 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16953 check_cp1_enabled(ctx
);
16954 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16955 (ctx
->opcode
>> 16) & 1);
16957 generate_exception_err(ctx
, EXCP_CpU
, 1);
16963 check_insn(ctx
, INSN_VR54XX
);
16964 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16965 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16967 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16972 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16974 #if defined(TARGET_MIPS64)
16975 case OPC_DMULT
... OPC_DDIVU
:
16976 check_insn(ctx
, ISA_MIPS3
);
16977 check_mips_64(ctx
);
16978 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16982 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16985 #ifdef MIPS_STRICT_STANDARD
16986 MIPS_INVAL("SPIM");
16987 generate_exception_end(ctx
, EXCP_RI
);
16989 /* Implemented as RI exception for now. */
16990 MIPS_INVAL("spim (unofficial)");
16991 generate_exception_end(ctx
, EXCP_RI
);
16994 default: /* Invalid */
16995 MIPS_INVAL("special_legacy");
16996 generate_exception_end(ctx
, EXCP_RI
);
17001 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17003 int rs
, rt
, rd
, sa
;
17006 rs
= (ctx
->opcode
>> 21) & 0x1f;
17007 rt
= (ctx
->opcode
>> 16) & 0x1f;
17008 rd
= (ctx
->opcode
>> 11) & 0x1f;
17009 sa
= (ctx
->opcode
>> 6) & 0x1f;
17011 op1
= MASK_SPECIAL(ctx
->opcode
);
17013 case OPC_SLL
: /* Shift with immediate */
17014 if (sa
== 5 && rd
== 0 &&
17015 rs
== 0 && rt
== 0) { /* PAUSE */
17016 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17017 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17018 generate_exception_end(ctx
, EXCP_RI
);
17024 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17027 switch ((ctx
->opcode
>> 21) & 0x1f) {
17029 /* rotr is decoded as srl on non-R2 CPUs */
17030 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17035 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17038 generate_exception_end(ctx
, EXCP_RI
);
17042 case OPC_ADD
... OPC_SUBU
:
17043 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17045 case OPC_SLLV
: /* Shifts */
17047 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17050 switch ((ctx
->opcode
>> 6) & 0x1f) {
17052 /* rotrv is decoded as srlv on non-R2 CPUs */
17053 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17058 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17061 generate_exception_end(ctx
, EXCP_RI
);
17065 case OPC_SLT
: /* Set on less than */
17067 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17069 case OPC_AND
: /* Logic*/
17073 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17076 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17078 case OPC_TGE
... OPC_TEQ
: /* Traps */
17080 check_insn(ctx
, ISA_MIPS2
);
17081 gen_trap(ctx
, op1
, rs
, rt
, -1);
17083 case OPC_LSA
: /* OPC_PMON */
17084 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17085 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17086 decode_opc_special_r6(env
, ctx
);
17088 /* Pmon entry point, also R4010 selsl */
17089 #ifdef MIPS_STRICT_STANDARD
17090 MIPS_INVAL("PMON / selsl");
17091 generate_exception_end(ctx
, EXCP_RI
);
17093 gen_helper_0e0i(pmon
, sa
);
17098 generate_exception_end(ctx
, EXCP_SYSCALL
);
17101 generate_exception_end(ctx
, EXCP_BREAK
);
17104 check_insn(ctx
, ISA_MIPS2
);
17105 /* Treat as NOP. */
17108 #if defined(TARGET_MIPS64)
17109 /* MIPS64 specific opcodes */
17114 check_insn(ctx
, ISA_MIPS3
);
17115 check_mips_64(ctx
);
17116 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17119 switch ((ctx
->opcode
>> 21) & 0x1f) {
17121 /* drotr is decoded as dsrl on non-R2 CPUs */
17122 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17127 check_insn(ctx
, ISA_MIPS3
);
17128 check_mips_64(ctx
);
17129 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17132 generate_exception_end(ctx
, EXCP_RI
);
17137 switch ((ctx
->opcode
>> 21) & 0x1f) {
17139 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17140 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17145 check_insn(ctx
, ISA_MIPS3
);
17146 check_mips_64(ctx
);
17147 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17150 generate_exception_end(ctx
, EXCP_RI
);
17154 case OPC_DADD
... OPC_DSUBU
:
17155 check_insn(ctx
, ISA_MIPS3
);
17156 check_mips_64(ctx
);
17157 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17161 check_insn(ctx
, ISA_MIPS3
);
17162 check_mips_64(ctx
);
17163 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17166 switch ((ctx
->opcode
>> 6) & 0x1f) {
17168 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17169 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17174 check_insn(ctx
, ISA_MIPS3
);
17175 check_mips_64(ctx
);
17176 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17179 generate_exception_end(ctx
, EXCP_RI
);
17184 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17185 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17186 decode_opc_special_r6(env
, ctx
);
17191 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17192 decode_opc_special_r6(env
, ctx
);
17194 decode_opc_special_legacy(env
, ctx
);
17199 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17206 rs
= (ctx
->opcode
>> 21) & 0x1f;
17207 rt
= (ctx
->opcode
>> 16) & 0x1f;
17208 rd
= (ctx
->opcode
>> 11) & 0x1f;
17210 op1
= MASK_SPECIAL2(ctx
->opcode
);
17212 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17213 case OPC_MSUB
... OPC_MSUBU
:
17214 check_insn(ctx
, ISA_MIPS32
);
17215 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17218 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17221 case OPC_DIVU_G_2F
:
17222 case OPC_MULT_G_2F
:
17223 case OPC_MULTU_G_2F
:
17225 case OPC_MODU_G_2F
:
17226 check_insn(ctx
, INSN_LOONGSON2F
);
17227 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17231 check_insn(ctx
, ISA_MIPS32
);
17232 gen_cl(ctx
, op1
, rd
, rs
);
17235 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17236 gen_helper_do_semihosting(cpu_env
);
17238 /* XXX: not clear which exception should be raised
17239 * when in debug mode...
17241 check_insn(ctx
, ISA_MIPS32
);
17242 generate_exception_end(ctx
, EXCP_DBp
);
17245 #if defined(TARGET_MIPS64)
17248 check_insn(ctx
, ISA_MIPS64
);
17249 check_mips_64(ctx
);
17250 gen_cl(ctx
, op1
, rd
, rs
);
17252 case OPC_DMULT_G_2F
:
17253 case OPC_DMULTU_G_2F
:
17254 case OPC_DDIV_G_2F
:
17255 case OPC_DDIVU_G_2F
:
17256 case OPC_DMOD_G_2F
:
17257 case OPC_DMODU_G_2F
:
17258 check_insn(ctx
, INSN_LOONGSON2F
);
17259 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17262 default: /* Invalid */
17263 MIPS_INVAL("special2_legacy");
17264 generate_exception_end(ctx
, EXCP_RI
);
17269 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17271 int rs
, rt
, rd
, sa
;
17275 rs
= (ctx
->opcode
>> 21) & 0x1f;
17276 rt
= (ctx
->opcode
>> 16) & 0x1f;
17277 rd
= (ctx
->opcode
>> 11) & 0x1f;
17278 sa
= (ctx
->opcode
>> 6) & 0x1f;
17279 imm
= (int16_t)ctx
->opcode
>> 7;
17281 op1
= MASK_SPECIAL3(ctx
->opcode
);
17285 /* hint codes 24-31 are reserved and signal RI */
17286 generate_exception_end(ctx
, EXCP_RI
);
17288 /* Treat as NOP. */
17291 check_cp0_enabled(ctx
);
17292 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17293 gen_cache_operation(ctx
, rt
, rs
, imm
);
17297 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17300 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17305 /* Treat as NOP. */
17308 op2
= MASK_BSHFL(ctx
->opcode
);
17310 case OPC_ALIGN
... OPC_ALIGN_END
:
17311 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17314 gen_bitswap(ctx
, op2
, rd
, rt
);
17319 #if defined(TARGET_MIPS64)
17321 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17324 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17327 check_mips_64(ctx
);
17330 /* Treat as NOP. */
17333 op2
= MASK_DBSHFL(ctx
->opcode
);
17335 case OPC_DALIGN
... OPC_DALIGN_END
:
17336 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17339 gen_bitswap(ctx
, op2
, rd
, rt
);
17346 default: /* Invalid */
17347 MIPS_INVAL("special3_r6");
17348 generate_exception_end(ctx
, EXCP_RI
);
17353 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17358 rs
= (ctx
->opcode
>> 21) & 0x1f;
17359 rt
= (ctx
->opcode
>> 16) & 0x1f;
17360 rd
= (ctx
->opcode
>> 11) & 0x1f;
17362 op1
= MASK_SPECIAL3(ctx
->opcode
);
17364 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17365 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17366 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17367 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17368 * the same mask and op1. */
17369 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17370 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17373 case OPC_ADDUH_R_QB
:
17375 case OPC_ADDQH_R_PH
:
17377 case OPC_ADDQH_R_W
:
17379 case OPC_SUBUH_R_QB
:
17381 case OPC_SUBQH_R_PH
:
17383 case OPC_SUBQH_R_W
:
17384 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17389 case OPC_MULQ_RS_W
:
17390 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17393 MIPS_INVAL("MASK ADDUH.QB");
17394 generate_exception_end(ctx
, EXCP_RI
);
17397 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17398 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17400 generate_exception_end(ctx
, EXCP_RI
);
17404 op2
= MASK_LX(ctx
->opcode
);
17406 #if defined(TARGET_MIPS64)
17412 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17414 default: /* Invalid */
17415 MIPS_INVAL("MASK LX");
17416 generate_exception_end(ctx
, EXCP_RI
);
17420 case OPC_ABSQ_S_PH_DSP
:
17421 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17423 case OPC_ABSQ_S_QB
:
17424 case OPC_ABSQ_S_PH
:
17426 case OPC_PRECEQ_W_PHL
:
17427 case OPC_PRECEQ_W_PHR
:
17428 case OPC_PRECEQU_PH_QBL
:
17429 case OPC_PRECEQU_PH_QBR
:
17430 case OPC_PRECEQU_PH_QBLA
:
17431 case OPC_PRECEQU_PH_QBRA
:
17432 case OPC_PRECEU_PH_QBL
:
17433 case OPC_PRECEU_PH_QBR
:
17434 case OPC_PRECEU_PH_QBLA
:
17435 case OPC_PRECEU_PH_QBRA
:
17436 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17443 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17446 MIPS_INVAL("MASK ABSQ_S.PH");
17447 generate_exception_end(ctx
, EXCP_RI
);
17451 case OPC_ADDU_QB_DSP
:
17452 op2
= MASK_ADDU_QB(ctx
->opcode
);
17455 case OPC_ADDQ_S_PH
:
17458 case OPC_ADDU_S_QB
:
17460 case OPC_ADDU_S_PH
:
17462 case OPC_SUBQ_S_PH
:
17465 case OPC_SUBU_S_QB
:
17467 case OPC_SUBU_S_PH
:
17471 case OPC_RADDU_W_QB
:
17472 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17474 case OPC_MULEU_S_PH_QBL
:
17475 case OPC_MULEU_S_PH_QBR
:
17476 case OPC_MULQ_RS_PH
:
17477 case OPC_MULEQ_S_W_PHL
:
17478 case OPC_MULEQ_S_W_PHR
:
17479 case OPC_MULQ_S_PH
:
17480 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17482 default: /* Invalid */
17483 MIPS_INVAL("MASK ADDU.QB");
17484 generate_exception_end(ctx
, EXCP_RI
);
17489 case OPC_CMPU_EQ_QB_DSP
:
17490 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17492 case OPC_PRECR_SRA_PH_W
:
17493 case OPC_PRECR_SRA_R_PH_W
:
17494 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17496 case OPC_PRECR_QB_PH
:
17497 case OPC_PRECRQ_QB_PH
:
17498 case OPC_PRECRQ_PH_W
:
17499 case OPC_PRECRQ_RS_PH_W
:
17500 case OPC_PRECRQU_S_QB_PH
:
17501 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17503 case OPC_CMPU_EQ_QB
:
17504 case OPC_CMPU_LT_QB
:
17505 case OPC_CMPU_LE_QB
:
17506 case OPC_CMP_EQ_PH
:
17507 case OPC_CMP_LT_PH
:
17508 case OPC_CMP_LE_PH
:
17509 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17511 case OPC_CMPGU_EQ_QB
:
17512 case OPC_CMPGU_LT_QB
:
17513 case OPC_CMPGU_LE_QB
:
17514 case OPC_CMPGDU_EQ_QB
:
17515 case OPC_CMPGDU_LT_QB
:
17516 case OPC_CMPGDU_LE_QB
:
17519 case OPC_PACKRL_PH
:
17520 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17522 default: /* Invalid */
17523 MIPS_INVAL("MASK CMPU.EQ.QB");
17524 generate_exception_end(ctx
, EXCP_RI
);
17528 case OPC_SHLL_QB_DSP
:
17529 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17531 case OPC_DPA_W_PH_DSP
:
17532 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17534 case OPC_DPAU_H_QBL
:
17535 case OPC_DPAU_H_QBR
:
17536 case OPC_DPSU_H_QBL
:
17537 case OPC_DPSU_H_QBR
:
17539 case OPC_DPAX_W_PH
:
17540 case OPC_DPAQ_S_W_PH
:
17541 case OPC_DPAQX_S_W_PH
:
17542 case OPC_DPAQX_SA_W_PH
:
17544 case OPC_DPSX_W_PH
:
17545 case OPC_DPSQ_S_W_PH
:
17546 case OPC_DPSQX_S_W_PH
:
17547 case OPC_DPSQX_SA_W_PH
:
17548 case OPC_MULSAQ_S_W_PH
:
17549 case OPC_DPAQ_SA_L_W
:
17550 case OPC_DPSQ_SA_L_W
:
17551 case OPC_MAQ_S_W_PHL
:
17552 case OPC_MAQ_S_W_PHR
:
17553 case OPC_MAQ_SA_W_PHL
:
17554 case OPC_MAQ_SA_W_PHR
:
17555 case OPC_MULSA_W_PH
:
17556 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17558 default: /* Invalid */
17559 MIPS_INVAL("MASK DPAW.PH");
17560 generate_exception_end(ctx
, EXCP_RI
);
17565 op2
= MASK_INSV(ctx
->opcode
);
17576 t0
= tcg_temp_new();
17577 t1
= tcg_temp_new();
17579 gen_load_gpr(t0
, rt
);
17580 gen_load_gpr(t1
, rs
);
17582 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17588 default: /* Invalid */
17589 MIPS_INVAL("MASK INSV");
17590 generate_exception_end(ctx
, EXCP_RI
);
17594 case OPC_APPEND_DSP
:
17595 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17597 case OPC_EXTR_W_DSP
:
17598 op2
= MASK_EXTR_W(ctx
->opcode
);
17602 case OPC_EXTR_RS_W
:
17604 case OPC_EXTRV_S_H
:
17606 case OPC_EXTRV_R_W
:
17607 case OPC_EXTRV_RS_W
:
17612 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17615 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17621 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17623 default: /* Invalid */
17624 MIPS_INVAL("MASK EXTR.W");
17625 generate_exception_end(ctx
, EXCP_RI
);
17629 #if defined(TARGET_MIPS64)
17630 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17631 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17632 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17633 check_insn(ctx
, INSN_LOONGSON2E
);
17634 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17636 case OPC_ABSQ_S_QH_DSP
:
17637 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17639 case OPC_PRECEQ_L_PWL
:
17640 case OPC_PRECEQ_L_PWR
:
17641 case OPC_PRECEQ_PW_QHL
:
17642 case OPC_PRECEQ_PW_QHR
:
17643 case OPC_PRECEQ_PW_QHLA
:
17644 case OPC_PRECEQ_PW_QHRA
:
17645 case OPC_PRECEQU_QH_OBL
:
17646 case OPC_PRECEQU_QH_OBR
:
17647 case OPC_PRECEQU_QH_OBLA
:
17648 case OPC_PRECEQU_QH_OBRA
:
17649 case OPC_PRECEU_QH_OBL
:
17650 case OPC_PRECEU_QH_OBR
:
17651 case OPC_PRECEU_QH_OBLA
:
17652 case OPC_PRECEU_QH_OBRA
:
17653 case OPC_ABSQ_S_OB
:
17654 case OPC_ABSQ_S_PW
:
17655 case OPC_ABSQ_S_QH
:
17656 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17664 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17666 default: /* Invalid */
17667 MIPS_INVAL("MASK ABSQ_S.QH");
17668 generate_exception_end(ctx
, EXCP_RI
);
17672 case OPC_ADDU_OB_DSP
:
17673 op2
= MASK_ADDU_OB(ctx
->opcode
);
17675 case OPC_RADDU_L_OB
:
17677 case OPC_SUBQ_S_PW
:
17679 case OPC_SUBQ_S_QH
:
17681 case OPC_SUBU_S_OB
:
17683 case OPC_SUBU_S_QH
:
17685 case OPC_SUBUH_R_OB
:
17687 case OPC_ADDQ_S_PW
:
17689 case OPC_ADDQ_S_QH
:
17691 case OPC_ADDU_S_OB
:
17693 case OPC_ADDU_S_QH
:
17695 case OPC_ADDUH_R_OB
:
17696 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17698 case OPC_MULEQ_S_PW_QHL
:
17699 case OPC_MULEQ_S_PW_QHR
:
17700 case OPC_MULEU_S_QH_OBL
:
17701 case OPC_MULEU_S_QH_OBR
:
17702 case OPC_MULQ_RS_QH
:
17703 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17705 default: /* Invalid */
17706 MIPS_INVAL("MASK ADDU.OB");
17707 generate_exception_end(ctx
, EXCP_RI
);
17711 case OPC_CMPU_EQ_OB_DSP
:
17712 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17714 case OPC_PRECR_SRA_QH_PW
:
17715 case OPC_PRECR_SRA_R_QH_PW
:
17716 /* Return value is rt. */
17717 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17719 case OPC_PRECR_OB_QH
:
17720 case OPC_PRECRQ_OB_QH
:
17721 case OPC_PRECRQ_PW_L
:
17722 case OPC_PRECRQ_QH_PW
:
17723 case OPC_PRECRQ_RS_QH_PW
:
17724 case OPC_PRECRQU_S_OB_QH
:
17725 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17727 case OPC_CMPU_EQ_OB
:
17728 case OPC_CMPU_LT_OB
:
17729 case OPC_CMPU_LE_OB
:
17730 case OPC_CMP_EQ_QH
:
17731 case OPC_CMP_LT_QH
:
17732 case OPC_CMP_LE_QH
:
17733 case OPC_CMP_EQ_PW
:
17734 case OPC_CMP_LT_PW
:
17735 case OPC_CMP_LE_PW
:
17736 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17738 case OPC_CMPGDU_EQ_OB
:
17739 case OPC_CMPGDU_LT_OB
:
17740 case OPC_CMPGDU_LE_OB
:
17741 case OPC_CMPGU_EQ_OB
:
17742 case OPC_CMPGU_LT_OB
:
17743 case OPC_CMPGU_LE_OB
:
17744 case OPC_PACKRL_PW
:
17748 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17750 default: /* Invalid */
17751 MIPS_INVAL("MASK CMPU_EQ.OB");
17752 generate_exception_end(ctx
, EXCP_RI
);
17756 case OPC_DAPPEND_DSP
:
17757 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17759 case OPC_DEXTR_W_DSP
:
17760 op2
= MASK_DEXTR_W(ctx
->opcode
);
17767 case OPC_DEXTR_R_L
:
17768 case OPC_DEXTR_RS_L
:
17770 case OPC_DEXTR_R_W
:
17771 case OPC_DEXTR_RS_W
:
17772 case OPC_DEXTR_S_H
:
17774 case OPC_DEXTRV_R_L
:
17775 case OPC_DEXTRV_RS_L
:
17776 case OPC_DEXTRV_S_H
:
17778 case OPC_DEXTRV_R_W
:
17779 case OPC_DEXTRV_RS_W
:
17780 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17785 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17787 default: /* Invalid */
17788 MIPS_INVAL("MASK EXTR.W");
17789 generate_exception_end(ctx
, EXCP_RI
);
17793 case OPC_DPAQ_W_QH_DSP
:
17794 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17796 case OPC_DPAU_H_OBL
:
17797 case OPC_DPAU_H_OBR
:
17798 case OPC_DPSU_H_OBL
:
17799 case OPC_DPSU_H_OBR
:
17801 case OPC_DPAQ_S_W_QH
:
17803 case OPC_DPSQ_S_W_QH
:
17804 case OPC_MULSAQ_S_W_QH
:
17805 case OPC_DPAQ_SA_L_PW
:
17806 case OPC_DPSQ_SA_L_PW
:
17807 case OPC_MULSAQ_S_L_PW
:
17808 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17810 case OPC_MAQ_S_W_QHLL
:
17811 case OPC_MAQ_S_W_QHLR
:
17812 case OPC_MAQ_S_W_QHRL
:
17813 case OPC_MAQ_S_W_QHRR
:
17814 case OPC_MAQ_SA_W_QHLL
:
17815 case OPC_MAQ_SA_W_QHLR
:
17816 case OPC_MAQ_SA_W_QHRL
:
17817 case OPC_MAQ_SA_W_QHRR
:
17818 case OPC_MAQ_S_L_PWL
:
17819 case OPC_MAQ_S_L_PWR
:
17824 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17826 default: /* Invalid */
17827 MIPS_INVAL("MASK DPAQ.W.QH");
17828 generate_exception_end(ctx
, EXCP_RI
);
17832 case OPC_DINSV_DSP
:
17833 op2
= MASK_INSV(ctx
->opcode
);
17844 t0
= tcg_temp_new();
17845 t1
= tcg_temp_new();
17847 gen_load_gpr(t0
, rt
);
17848 gen_load_gpr(t1
, rs
);
17850 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17856 default: /* Invalid */
17857 MIPS_INVAL("MASK DINSV");
17858 generate_exception_end(ctx
, EXCP_RI
);
17862 case OPC_SHLL_OB_DSP
:
17863 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17866 default: /* Invalid */
17867 MIPS_INVAL("special3_legacy");
17868 generate_exception_end(ctx
, EXCP_RI
);
17873 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17875 int rs
, rt
, rd
, sa
;
17878 rs
= (ctx
->opcode
>> 21) & 0x1f;
17879 rt
= (ctx
->opcode
>> 16) & 0x1f;
17880 rd
= (ctx
->opcode
>> 11) & 0x1f;
17881 sa
= (ctx
->opcode
>> 6) & 0x1f;
17883 op1
= MASK_SPECIAL3(ctx
->opcode
);
17887 check_insn(ctx
, ISA_MIPS32R2
);
17888 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17891 op2
= MASK_BSHFL(ctx
->opcode
);
17893 case OPC_ALIGN
... OPC_ALIGN_END
:
17895 check_insn(ctx
, ISA_MIPS32R6
);
17896 decode_opc_special3_r6(env
, ctx
);
17899 check_insn(ctx
, ISA_MIPS32R2
);
17900 gen_bshfl(ctx
, op2
, rt
, rd
);
17904 #if defined(TARGET_MIPS64)
17905 case OPC_DEXTM
... OPC_DEXT
:
17906 case OPC_DINSM
... OPC_DINS
:
17907 check_insn(ctx
, ISA_MIPS64R2
);
17908 check_mips_64(ctx
);
17909 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17912 op2
= MASK_DBSHFL(ctx
->opcode
);
17914 case OPC_DALIGN
... OPC_DALIGN_END
:
17916 check_insn(ctx
, ISA_MIPS32R6
);
17917 decode_opc_special3_r6(env
, ctx
);
17920 check_insn(ctx
, ISA_MIPS64R2
);
17921 check_mips_64(ctx
);
17922 op2
= MASK_DBSHFL(ctx
->opcode
);
17923 gen_bshfl(ctx
, op2
, rt
, rd
);
17929 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17932 check_insn(ctx
, ASE_MT
);
17934 TCGv t0
= tcg_temp_new();
17935 TCGv t1
= tcg_temp_new();
17937 gen_load_gpr(t0
, rt
);
17938 gen_load_gpr(t1
, rs
);
17939 gen_helper_fork(t0
, t1
);
17945 check_insn(ctx
, ASE_MT
);
17947 TCGv t0
= tcg_temp_new();
17949 gen_load_gpr(t0
, rs
);
17950 gen_helper_yield(t0
, cpu_env
, t0
);
17951 gen_store_gpr(t0
, rd
);
17956 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17957 decode_opc_special3_r6(env
, ctx
);
17959 decode_opc_special3_legacy(env
, ctx
);
17964 /* MIPS SIMD Architecture (MSA) */
17965 static inline int check_msa_access(DisasContext
*ctx
)
17967 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17968 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17969 generate_exception_end(ctx
, EXCP_RI
);
17973 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17974 if (ctx
->insn_flags
& ASE_MSA
) {
17975 generate_exception_end(ctx
, EXCP_MSADIS
);
17978 generate_exception_end(ctx
, EXCP_RI
);
17985 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17987 /* generates tcg ops to check if any element is 0 */
17988 /* Note this function only works with MSA_WRLEN = 128 */
17989 uint64_t eval_zero_or_big
= 0;
17990 uint64_t eval_big
= 0;
17991 TCGv_i64 t0
= tcg_temp_new_i64();
17992 TCGv_i64 t1
= tcg_temp_new_i64();
17995 eval_zero_or_big
= 0x0101010101010101ULL
;
17996 eval_big
= 0x8080808080808080ULL
;
17999 eval_zero_or_big
= 0x0001000100010001ULL
;
18000 eval_big
= 0x8000800080008000ULL
;
18003 eval_zero_or_big
= 0x0000000100000001ULL
;
18004 eval_big
= 0x8000000080000000ULL
;
18007 eval_zero_or_big
= 0x0000000000000001ULL
;
18008 eval_big
= 0x8000000000000000ULL
;
18011 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18012 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18013 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18014 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18015 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18016 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18017 tcg_gen_or_i64(t0
, t0
, t1
);
18018 /* if all bits are zero then all elements are not zero */
18019 /* if some bit is non-zero then some element is zero */
18020 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18021 tcg_gen_trunc_i64_tl(tresult
, t0
);
18022 tcg_temp_free_i64(t0
);
18023 tcg_temp_free_i64(t1
);
18026 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18028 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18029 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18030 int64_t s16
= (int16_t)ctx
->opcode
;
18032 check_msa_access(ctx
);
18034 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18035 generate_exception_end(ctx
, EXCP_RI
);
18042 TCGv_i64 t0
= tcg_temp_new_i64();
18043 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18044 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18045 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18046 tcg_gen_trunc_i64_tl(bcond
, t0
);
18047 tcg_temp_free_i64(t0
);
18054 gen_check_zero_element(bcond
, df
, wt
);
18060 gen_check_zero_element(bcond
, df
, wt
);
18061 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18065 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18067 ctx
->hflags
|= MIPS_HFLAG_BC
;
18068 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18071 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18073 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18074 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18075 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18076 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18078 TCGv_i32 twd
= tcg_const_i32(wd
);
18079 TCGv_i32 tws
= tcg_const_i32(ws
);
18080 TCGv_i32 ti8
= tcg_const_i32(i8
);
18082 switch (MASK_MSA_I8(ctx
->opcode
)) {
18084 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18087 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18090 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18093 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18096 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18099 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18102 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18108 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18109 if (df
== DF_DOUBLE
) {
18110 generate_exception_end(ctx
, EXCP_RI
);
18112 TCGv_i32 tdf
= tcg_const_i32(df
);
18113 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18114 tcg_temp_free_i32(tdf
);
18119 MIPS_INVAL("MSA instruction");
18120 generate_exception_end(ctx
, EXCP_RI
);
18124 tcg_temp_free_i32(twd
);
18125 tcg_temp_free_i32(tws
);
18126 tcg_temp_free_i32(ti8
);
18129 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18131 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18132 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18133 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18134 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18135 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18136 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18138 TCGv_i32 tdf
= tcg_const_i32(df
);
18139 TCGv_i32 twd
= tcg_const_i32(wd
);
18140 TCGv_i32 tws
= tcg_const_i32(ws
);
18141 TCGv_i32 timm
= tcg_temp_new_i32();
18142 tcg_gen_movi_i32(timm
, u5
);
18144 switch (MASK_MSA_I5(ctx
->opcode
)) {
18146 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18149 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18151 case OPC_MAXI_S_df
:
18152 tcg_gen_movi_i32(timm
, s5
);
18153 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18155 case OPC_MAXI_U_df
:
18156 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18158 case OPC_MINI_S_df
:
18159 tcg_gen_movi_i32(timm
, s5
);
18160 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18162 case OPC_MINI_U_df
:
18163 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18166 tcg_gen_movi_i32(timm
, s5
);
18167 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18169 case OPC_CLTI_S_df
:
18170 tcg_gen_movi_i32(timm
, s5
);
18171 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18173 case OPC_CLTI_U_df
:
18174 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18176 case OPC_CLEI_S_df
:
18177 tcg_gen_movi_i32(timm
, s5
);
18178 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18180 case OPC_CLEI_U_df
:
18181 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18185 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18186 tcg_gen_movi_i32(timm
, s10
);
18187 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18191 MIPS_INVAL("MSA instruction");
18192 generate_exception_end(ctx
, EXCP_RI
);
18196 tcg_temp_free_i32(tdf
);
18197 tcg_temp_free_i32(twd
);
18198 tcg_temp_free_i32(tws
);
18199 tcg_temp_free_i32(timm
);
18202 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18204 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18205 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18206 uint32_t df
= 0, m
= 0;
18207 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18208 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18215 if ((dfm
& 0x40) == 0x00) {
18218 } else if ((dfm
& 0x60) == 0x40) {
18221 } else if ((dfm
& 0x70) == 0x60) {
18224 } else if ((dfm
& 0x78) == 0x70) {
18228 generate_exception_end(ctx
, EXCP_RI
);
18232 tdf
= tcg_const_i32(df
);
18233 tm
= tcg_const_i32(m
);
18234 twd
= tcg_const_i32(wd
);
18235 tws
= tcg_const_i32(ws
);
18237 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18239 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18242 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18245 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18248 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18251 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18254 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18256 case OPC_BINSLI_df
:
18257 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18259 case OPC_BINSRI_df
:
18260 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18263 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18266 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18269 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18272 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18275 MIPS_INVAL("MSA instruction");
18276 generate_exception_end(ctx
, EXCP_RI
);
18280 tcg_temp_free_i32(tdf
);
18281 tcg_temp_free_i32(tm
);
18282 tcg_temp_free_i32(twd
);
18283 tcg_temp_free_i32(tws
);
18286 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18288 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18289 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18290 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18291 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18292 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18294 TCGv_i32 tdf
= tcg_const_i32(df
);
18295 TCGv_i32 twd
= tcg_const_i32(wd
);
18296 TCGv_i32 tws
= tcg_const_i32(ws
);
18297 TCGv_i32 twt
= tcg_const_i32(wt
);
18299 switch (MASK_MSA_3R(ctx
->opcode
)) {
18301 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18304 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18307 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18310 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18312 case OPC_SUBS_S_df
:
18313 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18316 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18319 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18322 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18325 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18328 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18330 case OPC_ADDS_A_df
:
18331 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18333 case OPC_SUBS_U_df
:
18334 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18337 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18340 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18343 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18346 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18349 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18352 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18354 case OPC_ADDS_S_df
:
18355 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18357 case OPC_SUBSUS_U_df
:
18358 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18361 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18364 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18367 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18370 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18373 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18376 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18378 case OPC_ADDS_U_df
:
18379 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18381 case OPC_SUBSUU_S_df
:
18382 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18385 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18388 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18391 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18394 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18397 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18399 case OPC_ASUB_S_df
:
18400 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18403 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18406 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18409 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18412 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18415 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18418 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18420 case OPC_ASUB_U_df
:
18421 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18424 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18427 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18430 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18433 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18435 case OPC_AVER_S_df
:
18436 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18439 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18442 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18445 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18448 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18450 case OPC_AVER_U_df
:
18451 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18454 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18457 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18460 case OPC_DOTP_S_df
:
18461 case OPC_DOTP_U_df
:
18462 case OPC_DPADD_S_df
:
18463 case OPC_DPADD_U_df
:
18464 case OPC_DPSUB_S_df
:
18465 case OPC_HADD_S_df
:
18466 case OPC_DPSUB_U_df
:
18467 case OPC_HADD_U_df
:
18468 case OPC_HSUB_S_df
:
18469 case OPC_HSUB_U_df
:
18470 if (df
== DF_BYTE
) {
18471 generate_exception_end(ctx
, EXCP_RI
);
18474 switch (MASK_MSA_3R(ctx
->opcode
)) {
18475 case OPC_DOTP_S_df
:
18476 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18478 case OPC_DOTP_U_df
:
18479 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18481 case OPC_DPADD_S_df
:
18482 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18484 case OPC_DPADD_U_df
:
18485 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18487 case OPC_DPSUB_S_df
:
18488 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18490 case OPC_HADD_S_df
:
18491 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18493 case OPC_DPSUB_U_df
:
18494 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18496 case OPC_HADD_U_df
:
18497 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18499 case OPC_HSUB_S_df
:
18500 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18502 case OPC_HSUB_U_df
:
18503 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18508 MIPS_INVAL("MSA instruction");
18509 generate_exception_end(ctx
, EXCP_RI
);
18512 tcg_temp_free_i32(twd
);
18513 tcg_temp_free_i32(tws
);
18514 tcg_temp_free_i32(twt
);
18515 tcg_temp_free_i32(tdf
);
18518 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18520 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18521 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18522 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18523 TCGv telm
= tcg_temp_new();
18524 TCGv_i32 tsr
= tcg_const_i32(source
);
18525 TCGv_i32 tdt
= tcg_const_i32(dest
);
18527 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18529 gen_load_gpr(telm
, source
);
18530 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18533 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18534 gen_store_gpr(telm
, dest
);
18537 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18540 MIPS_INVAL("MSA instruction");
18541 generate_exception_end(ctx
, EXCP_RI
);
18545 tcg_temp_free(telm
);
18546 tcg_temp_free_i32(tdt
);
18547 tcg_temp_free_i32(tsr
);
18550 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18553 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18554 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18555 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18557 TCGv_i32 tws
= tcg_const_i32(ws
);
18558 TCGv_i32 twd
= tcg_const_i32(wd
);
18559 TCGv_i32 tn
= tcg_const_i32(n
);
18560 TCGv_i32 tdf
= tcg_const_i32(df
);
18562 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18564 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18566 case OPC_SPLATI_df
:
18567 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18570 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18572 case OPC_COPY_S_df
:
18573 case OPC_COPY_U_df
:
18574 case OPC_INSERT_df
:
18575 #if !defined(TARGET_MIPS64)
18576 /* Double format valid only for MIPS64 */
18577 if (df
== DF_DOUBLE
) {
18578 generate_exception_end(ctx
, EXCP_RI
);
18582 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18583 case OPC_COPY_S_df
:
18584 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18586 case OPC_COPY_U_df
:
18587 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18589 case OPC_INSERT_df
:
18590 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18595 MIPS_INVAL("MSA instruction");
18596 generate_exception_end(ctx
, EXCP_RI
);
18598 tcg_temp_free_i32(twd
);
18599 tcg_temp_free_i32(tws
);
18600 tcg_temp_free_i32(tn
);
18601 tcg_temp_free_i32(tdf
);
18604 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18606 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18607 uint32_t df
= 0, n
= 0;
18609 if ((dfn
& 0x30) == 0x00) {
18612 } else if ((dfn
& 0x38) == 0x20) {
18615 } else if ((dfn
& 0x3c) == 0x30) {
18618 } else if ((dfn
& 0x3e) == 0x38) {
18621 } else if (dfn
== 0x3E) {
18622 /* CTCMSA, CFCMSA, MOVE.V */
18623 gen_msa_elm_3e(env
, ctx
);
18626 generate_exception_end(ctx
, EXCP_RI
);
18630 gen_msa_elm_df(env
, ctx
, df
, n
);
18633 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18635 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18636 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18637 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18638 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18639 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18641 TCGv_i32 twd
= tcg_const_i32(wd
);
18642 TCGv_i32 tws
= tcg_const_i32(ws
);
18643 TCGv_i32 twt
= tcg_const_i32(wt
);
18644 TCGv_i32 tdf
= tcg_temp_new_i32();
18646 /* adjust df value for floating-point instruction */
18647 tcg_gen_movi_i32(tdf
, df
+ 2);
18649 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18651 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18654 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18657 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18660 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18663 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18666 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18669 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18672 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18675 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18678 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18681 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18684 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18687 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18690 tcg_gen_movi_i32(tdf
, df
+ 1);
18691 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18694 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18697 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18699 case OPC_MADD_Q_df
:
18700 tcg_gen_movi_i32(tdf
, df
+ 1);
18701 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18704 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18706 case OPC_MSUB_Q_df
:
18707 tcg_gen_movi_i32(tdf
, df
+ 1);
18708 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18711 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18714 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18717 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18720 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18723 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18726 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18729 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18732 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18735 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18738 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18741 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18744 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18747 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18749 case OPC_MULR_Q_df
:
18750 tcg_gen_movi_i32(tdf
, df
+ 1);
18751 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18754 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18756 case OPC_FMIN_A_df
:
18757 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18759 case OPC_MADDR_Q_df
:
18760 tcg_gen_movi_i32(tdf
, df
+ 1);
18761 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18764 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18767 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18769 case OPC_MSUBR_Q_df
:
18770 tcg_gen_movi_i32(tdf
, df
+ 1);
18771 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18774 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18776 case OPC_FMAX_A_df
:
18777 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18780 MIPS_INVAL("MSA instruction");
18781 generate_exception_end(ctx
, EXCP_RI
);
18785 tcg_temp_free_i32(twd
);
18786 tcg_temp_free_i32(tws
);
18787 tcg_temp_free_i32(twt
);
18788 tcg_temp_free_i32(tdf
);
18791 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18793 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18794 (op & (0x7 << 18)))
18795 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18796 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18797 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18798 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18799 TCGv_i32 twd
= tcg_const_i32(wd
);
18800 TCGv_i32 tws
= tcg_const_i32(ws
);
18801 TCGv_i32 twt
= tcg_const_i32(wt
);
18802 TCGv_i32 tdf
= tcg_const_i32(df
);
18804 switch (MASK_MSA_2R(ctx
->opcode
)) {
18806 #if !defined(TARGET_MIPS64)
18807 /* Double format valid only for MIPS64 */
18808 if (df
== DF_DOUBLE
) {
18809 generate_exception_end(ctx
, EXCP_RI
);
18813 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18816 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18819 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18822 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18825 MIPS_INVAL("MSA instruction");
18826 generate_exception_end(ctx
, EXCP_RI
);
18830 tcg_temp_free_i32(twd
);
18831 tcg_temp_free_i32(tws
);
18832 tcg_temp_free_i32(twt
);
18833 tcg_temp_free_i32(tdf
);
18836 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18838 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18839 (op & (0xf << 17)))
18840 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18841 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18842 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18843 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18844 TCGv_i32 twd
= tcg_const_i32(wd
);
18845 TCGv_i32 tws
= tcg_const_i32(ws
);
18846 TCGv_i32 twt
= tcg_const_i32(wt
);
18847 /* adjust df value for floating-point instruction */
18848 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18850 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18851 case OPC_FCLASS_df
:
18852 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18854 case OPC_FTRUNC_S_df
:
18855 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18857 case OPC_FTRUNC_U_df
:
18858 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18861 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18863 case OPC_FRSQRT_df
:
18864 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18867 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18870 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18873 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18875 case OPC_FEXUPL_df
:
18876 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18878 case OPC_FEXUPR_df
:
18879 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18882 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18885 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18887 case OPC_FTINT_S_df
:
18888 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18890 case OPC_FTINT_U_df
:
18891 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18893 case OPC_FFINT_S_df
:
18894 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18896 case OPC_FFINT_U_df
:
18897 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18901 tcg_temp_free_i32(twd
);
18902 tcg_temp_free_i32(tws
);
18903 tcg_temp_free_i32(twt
);
18904 tcg_temp_free_i32(tdf
);
18907 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18909 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18910 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18911 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18912 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18913 TCGv_i32 twd
= tcg_const_i32(wd
);
18914 TCGv_i32 tws
= tcg_const_i32(ws
);
18915 TCGv_i32 twt
= tcg_const_i32(wt
);
18917 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18919 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18922 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18925 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18928 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18931 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18934 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18937 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18940 MIPS_INVAL("MSA instruction");
18941 generate_exception_end(ctx
, EXCP_RI
);
18945 tcg_temp_free_i32(twd
);
18946 tcg_temp_free_i32(tws
);
18947 tcg_temp_free_i32(twt
);
18950 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18952 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18960 gen_msa_vec_v(env
, ctx
);
18963 gen_msa_2r(env
, ctx
);
18966 gen_msa_2rf(env
, ctx
);
18969 MIPS_INVAL("MSA instruction");
18970 generate_exception_end(ctx
, EXCP_RI
);
18975 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18977 uint32_t opcode
= ctx
->opcode
;
18978 check_insn(ctx
, ASE_MSA
);
18979 check_msa_access(ctx
);
18981 switch (MASK_MSA_MINOR(opcode
)) {
18982 case OPC_MSA_I8_00
:
18983 case OPC_MSA_I8_01
:
18984 case OPC_MSA_I8_02
:
18985 gen_msa_i8(env
, ctx
);
18987 case OPC_MSA_I5_06
:
18988 case OPC_MSA_I5_07
:
18989 gen_msa_i5(env
, ctx
);
18991 case OPC_MSA_BIT_09
:
18992 case OPC_MSA_BIT_0A
:
18993 gen_msa_bit(env
, ctx
);
18995 case OPC_MSA_3R_0D
:
18996 case OPC_MSA_3R_0E
:
18997 case OPC_MSA_3R_0F
:
18998 case OPC_MSA_3R_10
:
18999 case OPC_MSA_3R_11
:
19000 case OPC_MSA_3R_12
:
19001 case OPC_MSA_3R_13
:
19002 case OPC_MSA_3R_14
:
19003 case OPC_MSA_3R_15
:
19004 gen_msa_3r(env
, ctx
);
19007 gen_msa_elm(env
, ctx
);
19009 case OPC_MSA_3RF_1A
:
19010 case OPC_MSA_3RF_1B
:
19011 case OPC_MSA_3RF_1C
:
19012 gen_msa_3rf(env
, ctx
);
19015 gen_msa_vec(env
, ctx
);
19026 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19027 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19028 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19029 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19031 TCGv_i32 twd
= tcg_const_i32(wd
);
19032 TCGv taddr
= tcg_temp_new();
19033 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19035 switch (MASK_MSA_MINOR(opcode
)) {
19037 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19040 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19043 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19046 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19049 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19052 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19055 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19058 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19062 tcg_temp_free_i32(twd
);
19063 tcg_temp_free(taddr
);
19067 MIPS_INVAL("MSA instruction");
19068 generate_exception_end(ctx
, EXCP_RI
);
19074 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19077 int rs
, rt
, rd
, sa
;
19081 /* make sure instructions are on a word boundary */
19082 if (ctx
->pc
& 0x3) {
19083 env
->CP0_BadVAddr
= ctx
->pc
;
19084 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19088 /* Handle blikely not taken case */
19089 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19090 TCGLabel
*l1
= gen_new_label();
19092 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19093 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19094 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19098 op
= MASK_OP_MAJOR(ctx
->opcode
);
19099 rs
= (ctx
->opcode
>> 21) & 0x1f;
19100 rt
= (ctx
->opcode
>> 16) & 0x1f;
19101 rd
= (ctx
->opcode
>> 11) & 0x1f;
19102 sa
= (ctx
->opcode
>> 6) & 0x1f;
19103 imm
= (int16_t)ctx
->opcode
;
19106 decode_opc_special(env
, ctx
);
19109 decode_opc_special2_legacy(env
, ctx
);
19112 decode_opc_special3(env
, ctx
);
19115 op1
= MASK_REGIMM(ctx
->opcode
);
19117 case OPC_BLTZL
: /* REGIMM branches */
19121 check_insn(ctx
, ISA_MIPS2
);
19122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19126 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19130 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19132 /* OPC_NAL, OPC_BAL */
19133 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19135 generate_exception_end(ctx
, EXCP_RI
);
19138 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19141 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19143 check_insn(ctx
, ISA_MIPS2
);
19144 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19145 gen_trap(ctx
, op1
, rs
, -1, imm
);
19148 check_insn(ctx
, ISA_MIPS32R6
);
19149 generate_exception_end(ctx
, EXCP_RI
);
19152 check_insn(ctx
, ISA_MIPS32R2
);
19153 /* Break the TB to be able to sync copied instructions
19155 ctx
->bstate
= BS_STOP
;
19157 case OPC_BPOSGE32
: /* MIPS DSP branch */
19158 #if defined(TARGET_MIPS64)
19162 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19164 #if defined(TARGET_MIPS64)
19166 check_insn(ctx
, ISA_MIPS32R6
);
19167 check_mips_64(ctx
);
19169 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19173 check_insn(ctx
, ISA_MIPS32R6
);
19174 check_mips_64(ctx
);
19176 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19180 default: /* Invalid */
19181 MIPS_INVAL("regimm");
19182 generate_exception_end(ctx
, EXCP_RI
);
19187 check_cp0_enabled(ctx
);
19188 op1
= MASK_CP0(ctx
->opcode
);
19196 #if defined(TARGET_MIPS64)
19200 #ifndef CONFIG_USER_ONLY
19201 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19202 #endif /* !CONFIG_USER_ONLY */
19204 case OPC_C0_FIRST
... OPC_C0_LAST
:
19205 #ifndef CONFIG_USER_ONLY
19206 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19207 #endif /* !CONFIG_USER_ONLY */
19210 #ifndef CONFIG_USER_ONLY
19213 TCGv t0
= tcg_temp_new();
19215 op2
= MASK_MFMC0(ctx
->opcode
);
19218 check_insn(ctx
, ASE_MT
);
19219 gen_helper_dmt(t0
);
19220 gen_store_gpr(t0
, rt
);
19223 check_insn(ctx
, ASE_MT
);
19224 gen_helper_emt(t0
);
19225 gen_store_gpr(t0
, rt
);
19228 check_insn(ctx
, ASE_MT
);
19229 gen_helper_dvpe(t0
, cpu_env
);
19230 gen_store_gpr(t0
, rt
);
19233 check_insn(ctx
, ASE_MT
);
19234 gen_helper_evpe(t0
, cpu_env
);
19235 gen_store_gpr(t0
, rt
);
19238 check_insn(ctx
, ISA_MIPS32R6
);
19240 gen_helper_dvp(t0
, cpu_env
);
19241 gen_store_gpr(t0
, rt
);
19245 check_insn(ctx
, ISA_MIPS32R6
);
19247 gen_helper_evp(t0
, cpu_env
);
19248 gen_store_gpr(t0
, rt
);
19252 check_insn(ctx
, ISA_MIPS32R2
);
19253 save_cpu_state(ctx
, 1);
19254 gen_helper_di(t0
, cpu_env
);
19255 gen_store_gpr(t0
, rt
);
19256 /* Stop translation as we may have switched
19257 the execution mode. */
19258 ctx
->bstate
= BS_STOP
;
19261 check_insn(ctx
, ISA_MIPS32R2
);
19262 save_cpu_state(ctx
, 1);
19263 gen_helper_ei(t0
, cpu_env
);
19264 gen_store_gpr(t0
, rt
);
19265 /* Stop translation as we may have switched
19266 the execution mode. */
19267 ctx
->bstate
= BS_STOP
;
19269 default: /* Invalid */
19270 MIPS_INVAL("mfmc0");
19271 generate_exception_end(ctx
, EXCP_RI
);
19276 #endif /* !CONFIG_USER_ONLY */
19279 check_insn(ctx
, ISA_MIPS32R2
);
19280 gen_load_srsgpr(rt
, rd
);
19283 check_insn(ctx
, ISA_MIPS32R2
);
19284 gen_store_srsgpr(rt
, rd
);
19288 generate_exception_end(ctx
, EXCP_RI
);
19292 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19293 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19294 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19295 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19298 /* Arithmetic with immediate opcode */
19299 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19303 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19305 case OPC_SLTI
: /* Set on less than with immediate opcode */
19307 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19309 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19310 case OPC_LUI
: /* OPC_AUI */
19313 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19315 case OPC_J
... OPC_JAL
: /* Jump */
19316 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19317 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19320 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19321 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19323 generate_exception_end(ctx
, EXCP_RI
);
19326 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19327 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19330 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19333 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19334 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19336 generate_exception_end(ctx
, EXCP_RI
);
19339 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19340 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19343 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19346 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19349 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19351 check_insn(ctx
, ISA_MIPS32R6
);
19352 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19353 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19356 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19359 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19361 check_insn(ctx
, ISA_MIPS32R6
);
19362 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19363 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19368 check_insn(ctx
, ISA_MIPS2
);
19369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19373 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19375 case OPC_LL
: /* Load and stores */
19376 check_insn(ctx
, ISA_MIPS2
);
19380 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19382 case OPC_LB
... OPC_LH
:
19383 case OPC_LW
... OPC_LHU
:
19384 gen_ld(ctx
, op
, rt
, rs
, imm
);
19388 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19390 case OPC_SB
... OPC_SH
:
19392 gen_st(ctx
, op
, rt
, rs
, imm
);
19395 check_insn(ctx
, ISA_MIPS2
);
19396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19397 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19400 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19401 check_cp0_enabled(ctx
);
19402 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19403 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19404 gen_cache_operation(ctx
, rt
, rs
, imm
);
19406 /* Treat as NOP. */
19409 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19410 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19411 /* Treat as NOP. */
19414 /* Floating point (COP1). */
19419 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19423 op1
= MASK_CP1(ctx
->opcode
);
19428 check_cp1_enabled(ctx
);
19429 check_insn(ctx
, ISA_MIPS32R2
);
19434 check_cp1_enabled(ctx
);
19435 gen_cp1(ctx
, op1
, rt
, rd
);
19437 #if defined(TARGET_MIPS64)
19440 check_cp1_enabled(ctx
);
19441 check_insn(ctx
, ISA_MIPS3
);
19442 check_mips_64(ctx
);
19443 gen_cp1(ctx
, op1
, rt
, rd
);
19446 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19447 check_cp1_enabled(ctx
);
19448 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19450 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19455 check_insn(ctx
, ASE_MIPS3D
);
19456 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19457 (rt
>> 2) & 0x7, imm
<< 2);
19461 check_cp1_enabled(ctx
);
19462 check_insn(ctx
, ISA_MIPS32R6
);
19463 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19467 check_cp1_enabled(ctx
);
19468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19470 check_insn(ctx
, ASE_MIPS3D
);
19473 check_cp1_enabled(ctx
);
19474 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19475 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19476 (rt
>> 2) & 0x7, imm
<< 2);
19483 check_cp1_enabled(ctx
);
19484 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19490 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19491 check_cp1_enabled(ctx
);
19492 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19494 case R6_OPC_CMP_AF_S
:
19495 case R6_OPC_CMP_UN_S
:
19496 case R6_OPC_CMP_EQ_S
:
19497 case R6_OPC_CMP_UEQ_S
:
19498 case R6_OPC_CMP_LT_S
:
19499 case R6_OPC_CMP_ULT_S
:
19500 case R6_OPC_CMP_LE_S
:
19501 case R6_OPC_CMP_ULE_S
:
19502 case R6_OPC_CMP_SAF_S
:
19503 case R6_OPC_CMP_SUN_S
:
19504 case R6_OPC_CMP_SEQ_S
:
19505 case R6_OPC_CMP_SEUQ_S
:
19506 case R6_OPC_CMP_SLT_S
:
19507 case R6_OPC_CMP_SULT_S
:
19508 case R6_OPC_CMP_SLE_S
:
19509 case R6_OPC_CMP_SULE_S
:
19510 case R6_OPC_CMP_OR_S
:
19511 case R6_OPC_CMP_UNE_S
:
19512 case R6_OPC_CMP_NE_S
:
19513 case R6_OPC_CMP_SOR_S
:
19514 case R6_OPC_CMP_SUNE_S
:
19515 case R6_OPC_CMP_SNE_S
:
19516 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19518 case R6_OPC_CMP_AF_D
:
19519 case R6_OPC_CMP_UN_D
:
19520 case R6_OPC_CMP_EQ_D
:
19521 case R6_OPC_CMP_UEQ_D
:
19522 case R6_OPC_CMP_LT_D
:
19523 case R6_OPC_CMP_ULT_D
:
19524 case R6_OPC_CMP_LE_D
:
19525 case R6_OPC_CMP_ULE_D
:
19526 case R6_OPC_CMP_SAF_D
:
19527 case R6_OPC_CMP_SUN_D
:
19528 case R6_OPC_CMP_SEQ_D
:
19529 case R6_OPC_CMP_SEUQ_D
:
19530 case R6_OPC_CMP_SLT_D
:
19531 case R6_OPC_CMP_SULT_D
:
19532 case R6_OPC_CMP_SLE_D
:
19533 case R6_OPC_CMP_SULE_D
:
19534 case R6_OPC_CMP_OR_D
:
19535 case R6_OPC_CMP_UNE_D
:
19536 case R6_OPC_CMP_NE_D
:
19537 case R6_OPC_CMP_SOR_D
:
19538 case R6_OPC_CMP_SUNE_D
:
19539 case R6_OPC_CMP_SNE_D
:
19540 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19543 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19544 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19549 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19564 check_insn(ctx
, ASE_MSA
);
19565 gen_msa_branch(env
, ctx
, op1
);
19569 generate_exception_end(ctx
, EXCP_RI
);
19574 /* Compact branches [R6] and COP2 [non-R6] */
19575 case OPC_BC
: /* OPC_LWC2 */
19576 case OPC_BALC
: /* OPC_SWC2 */
19577 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19578 /* OPC_BC, OPC_BALC */
19579 gen_compute_compact_branch(ctx
, op
, 0, 0,
19580 sextract32(ctx
->opcode
<< 2, 0, 28));
19582 /* OPC_LWC2, OPC_SWC2 */
19583 /* COP2: Not implemented. */
19584 generate_exception_err(ctx
, EXCP_CpU
, 2);
19587 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19588 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19591 /* OPC_BEQZC, OPC_BNEZC */
19592 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19593 sextract32(ctx
->opcode
<< 2, 0, 23));
19595 /* OPC_JIC, OPC_JIALC */
19596 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19599 /* OPC_LWC2, OPC_SWC2 */
19600 /* COP2: Not implemented. */
19601 generate_exception_err(ctx
, EXCP_CpU
, 2);
19605 check_insn(ctx
, INSN_LOONGSON2F
);
19606 /* Note that these instructions use different fields. */
19607 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19611 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19612 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19613 check_cp1_enabled(ctx
);
19614 op1
= MASK_CP3(ctx
->opcode
);
19618 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19624 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19625 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19628 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19629 /* Treat as NOP. */
19632 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19646 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19647 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19651 generate_exception_end(ctx
, EXCP_RI
);
19655 generate_exception_err(ctx
, EXCP_CpU
, 1);
19659 #if defined(TARGET_MIPS64)
19660 /* MIPS64 opcodes */
19661 case OPC_LDL
... OPC_LDR
:
19663 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19667 check_insn(ctx
, ISA_MIPS3
);
19668 check_mips_64(ctx
);
19669 gen_ld(ctx
, op
, rt
, rs
, imm
);
19671 case OPC_SDL
... OPC_SDR
:
19672 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19675 check_insn(ctx
, ISA_MIPS3
);
19676 check_mips_64(ctx
);
19677 gen_st(ctx
, op
, rt
, rs
, imm
);
19680 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19681 check_insn(ctx
, ISA_MIPS3
);
19682 check_mips_64(ctx
);
19683 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19685 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19686 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19687 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19688 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19691 check_insn(ctx
, ISA_MIPS3
);
19692 check_mips_64(ctx
);
19693 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19697 check_insn(ctx
, ISA_MIPS3
);
19698 check_mips_64(ctx
);
19699 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19702 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19703 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19704 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19706 MIPS_INVAL("major opcode");
19707 generate_exception_end(ctx
, EXCP_RI
);
19711 case OPC_DAUI
: /* OPC_JALX */
19712 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19713 #if defined(TARGET_MIPS64)
19715 check_mips_64(ctx
);
19717 generate_exception(ctx
, EXCP_RI
);
19718 } else if (rt
!= 0) {
19719 TCGv t0
= tcg_temp_new();
19720 gen_load_gpr(t0
, rs
);
19721 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19725 generate_exception_end(ctx
, EXCP_RI
);
19726 MIPS_INVAL("major opcode");
19730 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19731 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19732 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19735 case OPC_MSA
: /* OPC_MDMX */
19736 /* MDMX: Not implemented. */
19740 check_insn(ctx
, ISA_MIPS32R6
);
19741 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19743 default: /* Invalid */
19744 MIPS_INVAL("major opcode");
19745 generate_exception_end(ctx
, EXCP_RI
);
19750 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19752 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19753 CPUState
*cs
= CPU(cpu
);
19755 target_ulong pc_start
;
19756 target_ulong next_page_start
;
19763 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19766 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19767 ctx
.insn_flags
= env
->insn_flags
;
19768 ctx
.CP0_Config1
= env
->CP0_Config1
;
19770 ctx
.bstate
= BS_NONE
;
19772 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19773 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19774 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19775 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19776 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19777 ctx
.PAMask
= env
->PAMask
;
19778 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19779 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19780 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19781 /* Restore delay slot state from the tb context. */
19782 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19783 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19784 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19785 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19786 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19787 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19788 restore_cpu_state(env
, &ctx
);
19789 #ifdef CONFIG_USER_ONLY
19790 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19792 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19794 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19795 MO_UNALN
: MO_ALIGN
;
19797 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19798 if (max_insns
== 0) {
19799 max_insns
= CF_COUNT_MASK
;
19801 if (max_insns
> TCG_MAX_INSNS
) {
19802 max_insns
= TCG_MAX_INSNS
;
19805 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19807 while (ctx
.bstate
== BS_NONE
) {
19808 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19811 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19812 save_cpu_state(&ctx
, 1);
19813 ctx
.bstate
= BS_BRANCH
;
19814 gen_helper_raise_exception_debug(cpu_env
);
19815 /* The address covered by the breakpoint must be included in
19816 [tb->pc, tb->pc + tb->size) in order to for it to be
19817 properly cleared -- thus we increment the PC here so that
19818 the logic setting tb->size below does the right thing. */
19820 goto done_generating
;
19823 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19827 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19828 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19829 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19831 decode_opc(env
, &ctx
);
19832 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19833 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19834 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19835 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19836 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19837 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19839 generate_exception_end(&ctx
, EXCP_RI
);
19843 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19844 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19845 MIPS_HFLAG_FBNSLOT
))) {
19846 /* force to generate branch as there is neither delay nor
19850 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19851 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19852 /* Force to generate branch as microMIPS R6 doesn't restrict
19853 branches in the forbidden slot. */
19858 gen_branch(&ctx
, insn_bytes
);
19860 ctx
.pc
+= insn_bytes
;
19862 /* Execute a branch and its delay slot as a single instruction.
19863 This is what GDB expects and is consistent with what the
19864 hardware does (e.g. if a delay slot instruction faults, the
19865 reported PC is the PC of the branch). */
19866 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19870 if (ctx
.pc
>= next_page_start
) {
19874 if (tcg_op_buf_full()) {
19878 if (num_insns
>= max_insns
)
19884 if (tb
->cflags
& CF_LAST_IO
) {
19887 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19888 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19889 gen_helper_raise_exception_debug(cpu_env
);
19891 switch (ctx
.bstate
) {
19893 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19896 save_cpu_state(&ctx
, 0);
19897 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19900 tcg_gen_exit_tb(0);
19908 gen_tb_end(tb
, num_insns
);
19910 tb
->size
= ctx
.pc
- pc_start
;
19911 tb
->icount
= num_insns
;
19915 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19916 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19917 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19923 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19927 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19929 #define printfpr(fp) \
19932 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19933 " fd:%13g fs:%13g psu: %13g\n", \
19934 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19935 (double)(fp)->fd, \
19936 (double)(fp)->fs[FP_ENDIAN_IDX], \
19937 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19940 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19941 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19942 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19943 " fd:%13g fs:%13g psu:%13g\n", \
19944 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19946 (double)tmp.fs[FP_ENDIAN_IDX], \
19947 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19952 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19953 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19954 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19955 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19956 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19957 printfpr(&env
->active_fpu
.fpr
[i
]);
19963 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19966 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19967 CPUMIPSState
*env
= &cpu
->env
;
19970 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19971 " LO=0x" TARGET_FMT_lx
" ds %04x "
19972 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19973 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19974 env
->hflags
, env
->btarget
, env
->bcond
);
19975 for (i
= 0; i
< 32; i
++) {
19977 cpu_fprintf(f
, "GPR%02d:", i
);
19978 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19980 cpu_fprintf(f
, "\n");
19983 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19984 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19985 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19987 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19988 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19989 env
->CP0_Config2
, env
->CP0_Config3
);
19990 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19991 env
->CP0_Config4
, env
->CP0_Config5
);
19992 if (env
->hflags
& MIPS_HFLAG_FPU
)
19993 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19996 void mips_tcg_init(void)
20001 /* Initialize various static tables. */
20005 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20007 TCGV_UNUSED(cpu_gpr
[0]);
20008 for (i
= 1; i
< 32; i
++)
20009 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20010 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20013 for (i
= 0; i
< 32; i
++) {
20014 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20016 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20017 /* The scalar floating-point unit (FPU) registers are mapped on
20018 * the MSA vector registers. */
20019 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20020 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20021 msa_wr_d
[i
* 2 + 1] =
20022 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20025 cpu_PC
= tcg_global_mem_new(cpu_env
,
20026 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20027 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20028 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20029 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20031 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20032 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20035 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20036 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20038 bcond
= tcg_global_mem_new(cpu_env
,
20039 offsetof(CPUMIPSState
, bcond
), "bcond");
20040 btarget
= tcg_global_mem_new(cpu_env
,
20041 offsetof(CPUMIPSState
, btarget
), "btarget");
20042 hflags
= tcg_global_mem_new_i32(cpu_env
,
20043 offsetof(CPUMIPSState
, hflags
), "hflags");
20045 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20046 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20048 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20049 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20055 #include "translate_init.c"
20057 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20061 const mips_def_t
*def
;
20063 def
= cpu_mips_find_by_name(cpu_model
);
20066 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20068 env
->cpu_model
= def
;
20070 #ifndef CONFIG_USER_ONLY
20071 mmu_init(env
, def
);
20073 fpu_init(env
, def
);
20074 mvp_init(env
, def
);
20076 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20081 bool cpu_supports_cps_smp(const char *cpu_model
)
20083 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20088 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20091 void cpu_state_reset(CPUMIPSState
*env
)
20093 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20094 CPUState
*cs
= CPU(cpu
);
20096 /* Reset registers to their default values */
20097 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20098 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20099 #ifdef TARGET_WORDS_BIGENDIAN
20100 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20102 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20103 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20104 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20105 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20106 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20107 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20108 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20109 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20110 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20111 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20112 << env
->cpu_model
->CP0_LLAddr_shift
;
20113 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20114 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20115 env
->CCRes
= env
->cpu_model
->CCRes
;
20116 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20117 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20118 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20119 env
->current_tc
= 0;
20120 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20121 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20122 #if defined(TARGET_MIPS64)
20123 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20124 env
->SEGMask
|= 3ULL << 62;
20127 env
->PABITS
= env
->cpu_model
->PABITS
;
20128 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20129 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20130 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20131 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20132 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20133 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20134 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20135 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20136 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20137 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20138 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20139 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20140 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20141 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20142 env
->msair
= env
->cpu_model
->MSAIR
;
20143 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20145 #if defined(CONFIG_USER_ONLY)
20146 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20147 # ifdef TARGET_MIPS64
20148 /* Enable 64-bit register mode. */
20149 env
->CP0_Status
|= (1 << CP0St_PX
);
20151 # ifdef TARGET_ABI_MIPSN64
20152 /* Enable 64-bit address mode. */
20153 env
->CP0_Status
|= (1 << CP0St_UX
);
20155 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20156 hardware registers. */
20157 env
->CP0_HWREna
|= 0x0000000F;
20158 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20159 env
->CP0_Status
|= (1 << CP0St_CU1
);
20161 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20162 env
->CP0_Status
|= (1 << CP0St_MX
);
20164 # if defined(TARGET_MIPS64)
20165 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20166 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20167 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20168 env
->CP0_Status
|= (1 << CP0St_FR
);
20172 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20173 /* If the exception was raised from a delay slot,
20174 come back to the jump. */
20175 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20176 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20178 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20180 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20181 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20182 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20183 env
->CP0_Wired
= 0;
20184 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20185 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20186 if (kvm_enabled()) {
20187 env
->CP0_EBase
|= 0x40000000;
20189 env
->CP0_EBase
|= 0x80000000;
20191 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20192 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20194 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20195 /* vectored interrupts not implemented, timer on int 7,
20196 no performance counters. */
20197 env
->CP0_IntCtl
= 0xe0000000;
20201 for (i
= 0; i
< 7; i
++) {
20202 env
->CP0_WatchLo
[i
] = 0;
20203 env
->CP0_WatchHi
[i
] = 0x80000000;
20205 env
->CP0_WatchLo
[7] = 0;
20206 env
->CP0_WatchHi
[7] = 0;
20208 /* Count register increments in debug mode, EJTAG version 1 */
20209 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20211 cpu_mips_store_count(env
, 1);
20213 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20216 /* Only TC0 on VPE 0 starts as active. */
20217 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20218 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20219 env
->tcs
[i
].CP0_TCHalt
= 1;
20221 env
->active_tc
.CP0_TCHalt
= 1;
20224 if (cs
->cpu_index
== 0) {
20225 /* VPE0 starts up enabled. */
20226 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20227 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20229 /* TC0 starts up unhalted. */
20231 env
->active_tc
.CP0_TCHalt
= 0;
20232 env
->tcs
[0].CP0_TCHalt
= 0;
20233 /* With thread 0 active. */
20234 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20235 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20239 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20240 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20241 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20242 env
->CP0_Status
|= (1 << CP0St_FR
);
20246 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20250 compute_hflags(env
);
20251 restore_rounding_mode(env
);
20252 restore_flush_mode(env
);
20253 restore_pamask(env
);
20254 cs
->exception_index
= EXCP_NONE
;
20256 if (semihosting_get_argc()) {
20257 /* UHI interface can be used to obtain argc and argv */
20258 env
->active_tc
.gpr
[4] = -1;
20262 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20263 target_ulong
*data
)
20265 env
->active_tc
.PC
= data
[0];
20266 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20267 env
->hflags
|= data
[1];
20268 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20269 case MIPS_HFLAG_BR
:
20271 case MIPS_HFLAG_BC
:
20272 case MIPS_HFLAG_BL
:
20274 env
->btarget
= data
[2];