2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "disas/disas.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "sysemu/kvm.h"
32 #include "exec/semihost.h"
34 #include "trace-tcg.h"
36 #define MIPS_DEBUG_DISAS 0
38 /* MIPS major opcodes */
39 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
42 /* indirect opcode tables */
43 OPC_SPECIAL
= (0x00 << 26),
44 OPC_REGIMM
= (0x01 << 26),
45 OPC_CP0
= (0x10 << 26),
46 OPC_CP1
= (0x11 << 26),
47 OPC_CP2
= (0x12 << 26),
48 OPC_CP3
= (0x13 << 26),
49 OPC_SPECIAL2
= (0x1C << 26),
50 OPC_SPECIAL3
= (0x1F << 26),
51 /* arithmetic with immediate */
52 OPC_ADDI
= (0x08 << 26),
53 OPC_ADDIU
= (0x09 << 26),
54 OPC_SLTI
= (0x0A << 26),
55 OPC_SLTIU
= (0x0B << 26),
56 /* logic with immediate */
57 OPC_ANDI
= (0x0C << 26),
58 OPC_ORI
= (0x0D << 26),
59 OPC_XORI
= (0x0E << 26),
60 OPC_LUI
= (0x0F << 26),
61 /* arithmetic with immediate */
62 OPC_DADDI
= (0x18 << 26),
63 OPC_DADDIU
= (0x19 << 26),
64 /* Jump and branches */
66 OPC_JAL
= (0x03 << 26),
67 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
68 OPC_BEQL
= (0x14 << 26),
69 OPC_BNE
= (0x05 << 26),
70 OPC_BNEL
= (0x15 << 26),
71 OPC_BLEZ
= (0x06 << 26),
72 OPC_BLEZL
= (0x16 << 26),
73 OPC_BGTZ
= (0x07 << 26),
74 OPC_BGTZL
= (0x17 << 26),
75 OPC_JALX
= (0x1D << 26),
76 OPC_DAUI
= (0x1D << 26),
78 OPC_LDL
= (0x1A << 26),
79 OPC_LDR
= (0x1B << 26),
80 OPC_LB
= (0x20 << 26),
81 OPC_LH
= (0x21 << 26),
82 OPC_LWL
= (0x22 << 26),
83 OPC_LW
= (0x23 << 26),
84 OPC_LWPC
= OPC_LW
| 0x5,
85 OPC_LBU
= (0x24 << 26),
86 OPC_LHU
= (0x25 << 26),
87 OPC_LWR
= (0x26 << 26),
88 OPC_LWU
= (0x27 << 26),
89 OPC_SB
= (0x28 << 26),
90 OPC_SH
= (0x29 << 26),
91 OPC_SWL
= (0x2A << 26),
92 OPC_SW
= (0x2B << 26),
93 OPC_SDL
= (0x2C << 26),
94 OPC_SDR
= (0x2D << 26),
95 OPC_SWR
= (0x2E << 26),
96 OPC_LL
= (0x30 << 26),
97 OPC_LLD
= (0x34 << 26),
98 OPC_LD
= (0x37 << 26),
99 OPC_LDPC
= OPC_LD
| 0x5,
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* Compact Branches */
113 OPC_BLEZALC
= (0x06 << 26),
114 OPC_BGEZALC
= (0x06 << 26),
115 OPC_BGEUC
= (0x06 << 26),
116 OPC_BGTZALC
= (0x07 << 26),
117 OPC_BLTZALC
= (0x07 << 26),
118 OPC_BLTUC
= (0x07 << 26),
119 OPC_BOVC
= (0x08 << 26),
120 OPC_BEQZALC
= (0x08 << 26),
121 OPC_BEQC
= (0x08 << 26),
122 OPC_BLEZC
= (0x16 << 26),
123 OPC_BGEZC
= (0x16 << 26),
124 OPC_BGEC
= (0x16 << 26),
125 OPC_BGTZC
= (0x17 << 26),
126 OPC_BLTZC
= (0x17 << 26),
127 OPC_BLTC
= (0x17 << 26),
128 OPC_BNVC
= (0x18 << 26),
129 OPC_BNEZALC
= (0x18 << 26),
130 OPC_BNEC
= (0x18 << 26),
131 OPC_BC
= (0x32 << 26),
132 OPC_BEQZC
= (0x36 << 26),
133 OPC_JIC
= (0x36 << 26),
134 OPC_BALC
= (0x3A << 26),
135 OPC_BNEZC
= (0x3E << 26),
136 OPC_JIALC
= (0x3E << 26),
137 /* MDMX ASE specific */
138 OPC_MDMX
= (0x1E << 26),
139 /* MSA ASE, same as MDMX */
141 /* Cache and prefetch */
142 OPC_CACHE
= (0x2F << 26),
143 OPC_PREF
= (0x33 << 26),
144 /* PC-relative address computation / loads */
145 OPC_PCREL
= (0x3B << 26),
148 /* PC-relative address computation / loads */
149 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
150 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
152 /* Instructions determined by bits 19 and 20 */
153 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
154 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
155 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
157 /* Instructions determined by bits 16 ... 20 */
158 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
159 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
162 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
165 /* MIPS special opcodes */
166 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
170 OPC_SLL
= 0x00 | OPC_SPECIAL
,
171 /* NOP is SLL r0, r0, 0 */
172 /* SSNOP is SLL r0, r0, 1 */
173 /* EHB is SLL r0, r0, 3 */
174 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
175 OPC_ROTR
= OPC_SRL
| (1 << 21),
176 OPC_SRA
= 0x03 | OPC_SPECIAL
,
177 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
178 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
179 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
180 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
181 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
182 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
183 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
184 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
185 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
186 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
187 OPC_DROTR
= OPC_DSRL
| (1 << 21),
188 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
189 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
190 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
191 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
192 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
193 /* Multiplication / division */
194 OPC_MULT
= 0x18 | OPC_SPECIAL
,
195 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
196 OPC_DIV
= 0x1A | OPC_SPECIAL
,
197 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
198 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
199 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
200 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
201 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
203 /* 2 registers arithmetic / logic */
204 OPC_ADD
= 0x20 | OPC_SPECIAL
,
205 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
206 OPC_SUB
= 0x22 | OPC_SPECIAL
,
207 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
208 OPC_AND
= 0x24 | OPC_SPECIAL
,
209 OPC_OR
= 0x25 | OPC_SPECIAL
,
210 OPC_XOR
= 0x26 | OPC_SPECIAL
,
211 OPC_NOR
= 0x27 | OPC_SPECIAL
,
212 OPC_SLT
= 0x2A | OPC_SPECIAL
,
213 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
214 OPC_DADD
= 0x2C | OPC_SPECIAL
,
215 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
216 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
217 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
219 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
220 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
222 OPC_TGE
= 0x30 | OPC_SPECIAL
,
223 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
224 OPC_TLT
= 0x32 | OPC_SPECIAL
,
225 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
226 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
227 OPC_TNE
= 0x36 | OPC_SPECIAL
,
228 /* HI / LO registers load & stores */
229 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
230 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
231 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
232 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
233 /* Conditional moves */
234 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
235 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
237 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
238 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
240 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
243 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
244 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
245 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
246 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
247 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
249 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
250 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
251 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
252 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
255 /* R6 Multiply and Divide instructions have the same Opcode
256 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
257 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
260 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
261 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
262 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
263 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
264 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
265 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
266 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
267 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
269 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
270 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
271 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
272 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
273 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
274 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
275 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
276 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
278 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
279 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
280 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
281 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
282 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
284 OPC_LSA
= 0x05 | OPC_SPECIAL
,
285 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
288 /* Multiplication variants of the vr54xx. */
289 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
292 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
293 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
294 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
295 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
297 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
301 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
302 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
303 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
304 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
305 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
308 /* REGIMM (rt field) opcodes */
309 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
312 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
313 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
314 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
315 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
316 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
317 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
318 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
319 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
320 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
321 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
322 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
323 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
324 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
325 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
326 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
328 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
329 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
332 /* Special2 opcodes */
333 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
336 /* Multiply & xxx operations */
337 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
338 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
339 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
340 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
341 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
343 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
344 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
345 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
346 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
347 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
348 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
349 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
350 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
351 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
352 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
353 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
354 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
356 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
357 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
358 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
359 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
361 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
364 /* Special3 opcodes */
365 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
368 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
369 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
370 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
371 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
372 OPC_INS
= 0x04 | OPC_SPECIAL3
,
373 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
374 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
375 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
376 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
377 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
378 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
379 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
380 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
383 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
384 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
385 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
386 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
387 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
388 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
389 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
390 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
391 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
392 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
393 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
394 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
397 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
398 /* MIPS DSP Arithmetic */
399 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
400 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
401 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
402 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
403 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
404 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
405 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
406 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
407 /* MIPS DSP GPR-Based Shift Sub-class */
408 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
409 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
410 /* MIPS DSP Multiply Sub-class insns */
411 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
412 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
413 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
414 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
415 /* DSP Bit/Manipulation Sub-class */
416 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
417 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
418 /* MIPS DSP Append Sub-class */
419 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
420 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
421 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
422 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
423 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
426 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
427 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
428 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
429 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
430 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
431 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
435 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
438 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
439 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
440 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
441 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
442 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
443 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
447 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
450 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
451 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
452 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
453 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
454 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
457 /* MIPS DSP REGIMM opcodes */
459 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
460 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
463 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
466 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
467 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
468 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
469 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
472 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
474 /* MIPS DSP Arithmetic Sub-class */
475 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
476 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
477 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
478 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
482 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
483 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
484 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
489 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
493 /* MIPS DSP Multiply Sub-class insns */
494 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
497 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
502 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
503 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
505 /* MIPS DSP Arithmetic Sub-class */
506 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
507 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
508 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
509 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
525 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
527 /* MIPS DSP Arithmetic Sub-class */
528 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
529 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
530 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
531 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
541 /* DSP Bit/Manipulation Sub-class */
542 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
549 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
551 /* MIPS DSP Arithmetic Sub-class */
552 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
553 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
554 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
555 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 /* DSP Compare-Pick Sub-class */
560 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
577 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
579 /* MIPS DSP GPR-Based Shift Sub-class */
580 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
581 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
582 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
583 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
604 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
606 /* MIPS DSP Multiply Sub-class insns */
607 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
608 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
609 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
610 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
631 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
633 /* DSP Bit/Manipulation Sub-class */
634 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
637 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Append Sub-class */
640 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
641 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
642 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
645 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
647 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
648 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
649 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
650 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
651 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
660 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
661 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
662 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
663 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
664 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
667 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* MIPS DSP Arithmetic Sub-class */
670 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
671 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
672 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
673 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
687 /* DSP Bit/Manipulation Sub-class */
688 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
696 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
698 /* MIPS DSP Multiply Sub-class insns */
699 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
700 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
701 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
702 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
704 /* MIPS DSP Arithmetic Sub-class */
705 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
716 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
728 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
730 /* DSP Compare-Pick Sub-class */
731 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
732 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
733 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
734 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
750 /* MIPS DSP Arithmetic Sub-class */
751 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
761 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Append Sub-class */
764 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
765 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
766 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
767 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
770 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
772 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
773 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
774 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
775 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
776 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
796 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
798 /* DSP Bit/Manipulation Sub-class */
799 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
802 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
804 /* MIPS DSP Multiply Sub-class insns */
805 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
806 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
807 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
808 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
833 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
835 /* MIPS DSP GPR-Based Shift Sub-class */
836 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
837 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
838 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
839 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
864 /* Coprocessor 0 (rs field) */
865 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
868 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
869 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
870 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
871 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
872 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
873 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
874 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
875 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
876 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
877 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
878 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
879 OPC_C0
= (0x10 << 21) | OPC_CP0
,
880 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
881 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
885 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
888 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
889 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
890 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
891 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
892 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
893 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 /* Coprocessor 0 (with rs == C0) */
897 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
900 OPC_TLBR
= 0x01 | OPC_C0
,
901 OPC_TLBWI
= 0x02 | OPC_C0
,
902 OPC_TLBINV
= 0x03 | OPC_C0
,
903 OPC_TLBINVF
= 0x04 | OPC_C0
,
904 OPC_TLBWR
= 0x06 | OPC_C0
,
905 OPC_TLBP
= 0x08 | OPC_C0
,
906 OPC_RFE
= 0x10 | OPC_C0
,
907 OPC_ERET
= 0x18 | OPC_C0
,
908 OPC_DERET
= 0x1F | OPC_C0
,
909 OPC_WAIT
= 0x20 | OPC_C0
,
912 /* Coprocessor 1 (rs field) */
913 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
915 /* Values for the fmt field in FP instructions */
917 /* 0 - 15 are reserved */
918 FMT_S
= 16, /* single fp */
919 FMT_D
= 17, /* double fp */
920 FMT_E
= 18, /* extended fp */
921 FMT_Q
= 19, /* quad fp */
922 FMT_W
= 20, /* 32-bit fixed */
923 FMT_L
= 21, /* 64-bit fixed */
924 FMT_PS
= 22, /* paired single fp */
925 /* 23 - 31 are reserved */
929 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
930 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
931 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
932 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
933 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
934 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
935 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
936 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
937 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
938 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
939 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
940 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
941 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
942 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
943 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
944 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
945 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
946 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
947 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
948 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
949 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
950 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
951 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
952 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
953 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
954 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
955 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
956 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
957 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
958 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
961 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
962 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
965 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
966 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
967 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
968 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
972 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
973 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
977 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
978 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
981 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
984 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
985 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
986 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
987 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
988 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
989 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
990 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
991 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
992 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
993 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
994 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
997 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1000 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1001 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1002 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1003 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1010 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1011 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1012 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1019 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1020 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1021 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1022 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1023 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1024 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1028 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1029 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1030 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1037 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1038 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1039 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1044 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1045 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1046 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1051 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1052 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1053 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1058 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1059 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1060 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1065 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1066 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1067 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1072 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1073 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1074 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1079 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1080 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1081 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1086 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1087 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1088 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1094 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1097 OPC_LWXC1
= 0x00 | OPC_CP3
,
1098 OPC_LDXC1
= 0x01 | OPC_CP3
,
1099 OPC_LUXC1
= 0x05 | OPC_CP3
,
1100 OPC_SWXC1
= 0x08 | OPC_CP3
,
1101 OPC_SDXC1
= 0x09 | OPC_CP3
,
1102 OPC_SUXC1
= 0x0D | OPC_CP3
,
1103 OPC_PREFX
= 0x0F | OPC_CP3
,
1104 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1105 OPC_MADD_S
= 0x20 | OPC_CP3
,
1106 OPC_MADD_D
= 0x21 | OPC_CP3
,
1107 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1108 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1109 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1110 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1111 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1112 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1113 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1114 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1115 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1116 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1120 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1122 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1123 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1124 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1125 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1126 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1127 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1128 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1129 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1130 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1131 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1132 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1133 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1134 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1135 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1136 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1137 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1138 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1139 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1140 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1141 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1142 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1144 /* MI10 instruction */
1145 OPC_LD_B
= (0x20) | OPC_MSA
,
1146 OPC_LD_H
= (0x21) | OPC_MSA
,
1147 OPC_LD_W
= (0x22) | OPC_MSA
,
1148 OPC_LD_D
= (0x23) | OPC_MSA
,
1149 OPC_ST_B
= (0x24) | OPC_MSA
,
1150 OPC_ST_H
= (0x25) | OPC_MSA
,
1151 OPC_ST_W
= (0x26) | OPC_MSA
,
1152 OPC_ST_D
= (0x27) | OPC_MSA
,
1156 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1157 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1158 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1159 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1160 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1161 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1162 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1163 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1164 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1165 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1166 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1168 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1170 /* I8 instruction */
1171 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1172 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1173 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1174 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1175 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1176 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1177 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1178 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1180 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1182 /* VEC/2R/2RF instruction */
1183 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1184 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1185 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1186 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1187 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1188 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1189 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1191 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1192 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1194 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1195 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1196 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1197 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1198 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1200 /* 2RF instruction df(bit 16) = _w, _d */
1201 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1202 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1203 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1204 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1205 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1206 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1207 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1208 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1209 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1210 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1211 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1212 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1213 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1214 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1215 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1216 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1218 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1219 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1220 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1221 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1222 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1223 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1224 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1225 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1226 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1227 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1228 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1229 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1230 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1231 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1232 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1233 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1234 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1235 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1236 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1237 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1238 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1239 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1240 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1241 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1242 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1243 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1244 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1245 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1246 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1247 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1248 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1249 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1250 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1251 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1252 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1253 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1254 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1255 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1256 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1257 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1258 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1259 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1260 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1261 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1262 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1263 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1264 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1265 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1266 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1267 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1268 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1269 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1270 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1271 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1272 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1273 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1274 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1275 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1283 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1284 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1285 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1286 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1287 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1288 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1289 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1290 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 /* 3RF instruction _df(bit 21) = _w, _d */
1295 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1296 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1297 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1298 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1299 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1300 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1303 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1323 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1327 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1330 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1337 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1338 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1339 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1340 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1341 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1342 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1343 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1344 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1347 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1348 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1349 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1352 /* global register indices */
1353 static TCGv_ptr cpu_env
;
1354 static TCGv cpu_gpr
[32], cpu_PC
;
1355 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1356 static TCGv cpu_dspctrl
, btarget
, bcond
;
1357 static TCGv_i32 hflags
;
1358 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1359 static TCGv_i64 fpu_f64
[32];
1360 static TCGv_i64 msa_wr_d
[64];
1362 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
1363 static target_ulong gen_opc_btarget
[OPC_BUF_SIZE
];
1365 #include "exec/gen-icount.h"
1367 #define gen_helper_0e0i(name, arg) do { \
1368 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1369 gen_helper_##name(cpu_env, helper_tmp); \
1370 tcg_temp_free_i32(helper_tmp); \
1373 #define gen_helper_0e1i(name, arg1, arg2) do { \
1374 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1375 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1376 tcg_temp_free_i32(helper_tmp); \
1379 #define gen_helper_1e0i(name, ret, arg1) do { \
1380 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1381 gen_helper_##name(ret, cpu_env, helper_tmp); \
1382 tcg_temp_free_i32(helper_tmp); \
1385 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1386 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1387 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1388 tcg_temp_free_i32(helper_tmp); \
1391 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1392 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1393 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1394 tcg_temp_free_i32(helper_tmp); \
1397 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1398 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1399 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1400 tcg_temp_free_i32(helper_tmp); \
1403 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1404 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1405 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1406 tcg_temp_free_i32(helper_tmp); \
1409 typedef struct DisasContext
{
1410 struct TranslationBlock
*tb
;
1411 target_ulong pc
, saved_pc
;
1413 int singlestep_enabled
;
1415 int32_t CP0_Config1
;
1416 /* Routine used to access memory */
1418 TCGMemOp default_tcg_memop_mask
;
1419 uint32_t hflags
, saved_hflags
;
1421 target_ulong btarget
;
1430 int CP0_LLAddr_shift
;
1435 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1436 * exception condition */
1437 BS_STOP
= 1, /* We want to stop translation for any reason */
1438 BS_BRANCH
= 2, /* We reached a branch condition */
1439 BS_EXCP
= 3, /* We reached an exception condition */
1442 static const char * const regnames
[] = {
1443 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1444 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1445 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1446 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1449 static const char * const regnames_HI
[] = {
1450 "HI0", "HI1", "HI2", "HI3",
1453 static const char * const regnames_LO
[] = {
1454 "LO0", "LO1", "LO2", "LO3",
1457 static const char * const fregnames
[] = {
1458 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1459 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1460 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1461 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1464 static const char * const msaregnames
[] = {
1465 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1466 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1467 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1468 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1469 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1470 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1471 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1472 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1473 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1474 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1475 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1476 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1477 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1478 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1479 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1480 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1483 #define LOG_DISAS(...) \
1485 if (MIPS_DEBUG_DISAS) { \
1486 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1490 #define MIPS_INVAL(op) \
1492 if (MIPS_DEBUG_DISAS) { \
1493 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1494 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1495 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1496 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1500 /* General purpose registers moves. */
1501 static inline void gen_load_gpr (TCGv t
, int reg
)
1504 tcg_gen_movi_tl(t
, 0);
1506 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1509 static inline void gen_store_gpr (TCGv t
, int reg
)
1512 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1515 /* Moves to/from shadow registers. */
1516 static inline void gen_load_srsgpr (int from
, int to
)
1518 TCGv t0
= tcg_temp_new();
1521 tcg_gen_movi_tl(t0
, 0);
1523 TCGv_i32 t2
= tcg_temp_new_i32();
1524 TCGv_ptr addr
= tcg_temp_new_ptr();
1526 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1527 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1528 tcg_gen_andi_i32(t2
, t2
, 0xf);
1529 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1530 tcg_gen_ext_i32_ptr(addr
, t2
);
1531 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1533 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1534 tcg_temp_free_ptr(addr
);
1535 tcg_temp_free_i32(t2
);
1537 gen_store_gpr(t0
, to
);
1541 static inline void gen_store_srsgpr (int from
, int to
)
1544 TCGv t0
= tcg_temp_new();
1545 TCGv_i32 t2
= tcg_temp_new_i32();
1546 TCGv_ptr addr
= tcg_temp_new_ptr();
1548 gen_load_gpr(t0
, from
);
1549 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1550 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1551 tcg_gen_andi_i32(t2
, t2
, 0xf);
1552 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1553 tcg_gen_ext_i32_ptr(addr
, t2
);
1554 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1556 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1557 tcg_temp_free_ptr(addr
);
1558 tcg_temp_free_i32(t2
);
1564 static inline void gen_save_pc(target_ulong pc
)
1566 tcg_gen_movi_tl(cpu_PC
, pc
);
1569 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1571 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1572 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1573 gen_save_pc(ctx
->pc
);
1574 ctx
->saved_pc
= ctx
->pc
;
1576 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1577 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1578 ctx
->saved_hflags
= ctx
->hflags
;
1579 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1585 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1591 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1593 ctx
->saved_hflags
= ctx
->hflags
;
1594 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1600 ctx
->btarget
= env
->btarget
;
1605 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1607 TCGv_i32 texcp
= tcg_const_i32(excp
);
1608 TCGv_i32 terr
= tcg_const_i32(err
);
1609 save_cpu_state(ctx
, 1);
1610 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1611 tcg_temp_free_i32(terr
);
1612 tcg_temp_free_i32(texcp
);
1615 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1617 save_cpu_state(ctx
, 1);
1618 gen_helper_0e0i(raise_exception
, excp
);
1621 /* Floating point register moves. */
1622 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1624 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1625 generate_exception(ctx
, EXCP_RI
);
1627 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1630 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1633 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1634 generate_exception(ctx
, EXCP_RI
);
1636 t64
= tcg_temp_new_i64();
1637 tcg_gen_extu_i32_i64(t64
, t
);
1638 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1639 tcg_temp_free_i64(t64
);
1642 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1644 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1645 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1647 gen_load_fpr32(ctx
, t
, reg
| 1);
1651 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1653 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1654 TCGv_i64 t64
= tcg_temp_new_i64();
1655 tcg_gen_extu_i32_i64(t64
, t
);
1656 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1657 tcg_temp_free_i64(t64
);
1659 gen_store_fpr32(ctx
, t
, reg
| 1);
1663 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1665 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1666 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1668 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1672 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1674 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1675 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1678 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1679 t0
= tcg_temp_new_i64();
1680 tcg_gen_shri_i64(t0
, t
, 32);
1681 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1682 tcg_temp_free_i64(t0
);
1686 static inline int get_fp_bit (int cc
)
1694 /* Addresses computation */
1695 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1697 tcg_gen_add_tl(ret
, arg0
, arg1
);
1699 #if defined(TARGET_MIPS64)
1700 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1701 tcg_gen_ext32s_i64(ret
, ret
);
1706 /* Addresses computation (translation time) */
1707 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1710 target_long sum
= base
+ offset
;
1712 #if defined(TARGET_MIPS64)
1713 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1720 /* Sign-extract the low 32-bits to a target_long. */
1721 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1723 #if defined(TARGET_MIPS64)
1724 tcg_gen_ext32s_i64(ret
, arg
);
1726 tcg_gen_extrl_i64_i32(ret
, arg
);
1730 /* Sign-extract the high 32-bits to a target_long. */
1731 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1733 #if defined(TARGET_MIPS64)
1734 tcg_gen_sari_i64(ret
, arg
, 32);
1736 tcg_gen_extrh_i64_i32(ret
, arg
);
1740 static inline void check_cp0_enabled(DisasContext
*ctx
)
1742 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1743 generate_exception_err(ctx
, EXCP_CpU
, 0);
1746 static inline void check_cp1_enabled(DisasContext
*ctx
)
1748 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1749 generate_exception_err(ctx
, EXCP_CpU
, 1);
1752 /* Verify that the processor is running with COP1X instructions enabled.
1753 This is associated with the nabla symbol in the MIPS32 and MIPS64
1756 static inline void check_cop1x(DisasContext
*ctx
)
1758 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1759 generate_exception(ctx
, EXCP_RI
);
1762 /* Verify that the processor is running with 64-bit floating-point
1763 operations enabled. */
1765 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1767 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1768 generate_exception(ctx
, EXCP_RI
);
1772 * Verify if floating point register is valid; an operation is not defined
1773 * if bit 0 of any register specification is set and the FR bit in the
1774 * Status register equals zero, since the register numbers specify an
1775 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1776 * in the Status register equals one, both even and odd register numbers
1777 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1779 * Multiple 64 bit wide registers can be checked by calling
1780 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1782 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1784 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1785 generate_exception(ctx
, EXCP_RI
);
1788 /* Verify that the processor is running with DSP instructions enabled.
1789 This is enabled by CP0 Status register MX(24) bit.
1792 static inline void check_dsp(DisasContext
*ctx
)
1794 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1795 if (ctx
->insn_flags
& ASE_DSP
) {
1796 generate_exception(ctx
, EXCP_DSPDIS
);
1798 generate_exception(ctx
, EXCP_RI
);
1803 static inline void check_dspr2(DisasContext
*ctx
)
1805 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1806 if (ctx
->insn_flags
& ASE_DSP
) {
1807 generate_exception(ctx
, EXCP_DSPDIS
);
1809 generate_exception(ctx
, EXCP_RI
);
1814 /* This code generates a "reserved instruction" exception if the
1815 CPU does not support the instruction set corresponding to flags. */
1816 static inline void check_insn(DisasContext
*ctx
, int flags
)
1818 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1819 generate_exception(ctx
, EXCP_RI
);
1823 /* This code generates a "reserved instruction" exception if the
1824 CPU has corresponding flag set which indicates that the instruction
1825 has been removed. */
1826 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1828 if (unlikely(ctx
->insn_flags
& flags
)) {
1829 generate_exception(ctx
, EXCP_RI
);
1833 /* This code generates a "reserved instruction" exception if the
1834 CPU does not support 64-bit paired-single (PS) floating point data type */
1835 static inline void check_ps(DisasContext
*ctx
)
1837 if (unlikely(!ctx
->ps
)) {
1838 generate_exception(ctx
, EXCP_RI
);
1840 check_cp1_64bitmode(ctx
);
1843 #ifdef TARGET_MIPS64
1844 /* This code generates a "reserved instruction" exception if 64-bit
1845 instructions are not enabled. */
1846 static inline void check_mips_64(DisasContext
*ctx
)
1848 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1849 generate_exception(ctx
, EXCP_RI
);
1853 #ifndef CONFIG_USER_ONLY
1854 static inline void check_mvh(DisasContext
*ctx
)
1856 if (unlikely(!ctx
->mvh
)) {
1857 generate_exception(ctx
, EXCP_RI
);
1862 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1863 calling interface for 32 and 64-bit FPRs. No sense in changing
1864 all callers for gen_load_fpr32 when we need the CTX parameter for
1866 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1867 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1868 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1869 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1870 int ft, int fs, int cc) \
1872 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1873 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1882 check_cp1_registers(ctx, fs | ft); \
1890 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1891 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1893 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1894 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1895 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1896 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1897 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1898 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1899 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1900 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1901 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1902 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1903 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1904 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1905 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1906 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1907 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1908 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1911 tcg_temp_free_i##bits (fp0); \
1912 tcg_temp_free_i##bits (fp1); \
1915 FOP_CONDS(, 0, d
, FMT_D
, 64)
1916 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1917 FOP_CONDS(, 0, s
, FMT_S
, 32)
1918 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1919 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1920 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1923 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1924 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1925 int ft, int fs, int fd) \
1927 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1928 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1929 if (ifmt == FMT_D) { \
1930 check_cp1_registers(ctx, fs | ft | fd); \
1932 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1933 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1936 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1939 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1942 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1945 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1948 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1951 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1954 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1957 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1960 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1963 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1966 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2005 tcg_temp_free_i ## bits (fp0); \
2006 tcg_temp_free_i ## bits (fp1); \
2009 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2010 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2012 #undef gen_ldcmp_fpr32
2013 #undef gen_ldcmp_fpr64
2015 /* load/store instructions. */
2016 #ifdef CONFIG_USER_ONLY
2017 #define OP_LD_ATOMIC(insn,fname) \
2018 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2020 TCGv t0 = tcg_temp_new(); \
2021 tcg_gen_mov_tl(t0, arg1); \
2022 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2023 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2024 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2025 tcg_temp_free(t0); \
2028 #define OP_LD_ATOMIC(insn,fname) \
2029 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2031 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2034 OP_LD_ATOMIC(ll
,ld32s
);
2035 #if defined(TARGET_MIPS64)
2036 OP_LD_ATOMIC(lld
,ld64
);
2040 #ifdef CONFIG_USER_ONLY
2041 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2042 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2044 TCGv t0 = tcg_temp_new(); \
2045 TCGLabel *l1 = gen_new_label(); \
2046 TCGLabel *l2 = gen_new_label(); \
2048 tcg_gen_andi_tl(t0, arg2, almask); \
2049 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2050 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2051 generate_exception(ctx, EXCP_AdES); \
2052 gen_set_label(l1); \
2053 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2054 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2055 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2056 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2057 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2058 gen_helper_0e0i(raise_exception, EXCP_SC); \
2059 gen_set_label(l2); \
2060 tcg_gen_movi_tl(t0, 0); \
2061 gen_store_gpr(t0, rt); \
2062 tcg_temp_free(t0); \
2065 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2066 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2068 TCGv t0 = tcg_temp_new(); \
2069 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2070 gen_store_gpr(t0, rt); \
2071 tcg_temp_free(t0); \
2074 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2075 #if defined(TARGET_MIPS64)
2076 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2080 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2081 int base
, int16_t offset
)
2084 tcg_gen_movi_tl(addr
, offset
);
2085 } else if (offset
== 0) {
2086 gen_load_gpr(addr
, base
);
2088 tcg_gen_movi_tl(addr
, offset
);
2089 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2093 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2095 target_ulong pc
= ctx
->pc
;
2097 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2098 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2103 pc
&= ~(target_ulong
)3;
2108 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2109 int rt
, int base
, int16_t offset
)
2113 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2114 /* Loongson CPU uses a load to zero register for prefetch.
2115 We emulate it as a NOP. On other CPU we must perform the
2116 actual memory access. */
2120 t0
= tcg_temp_new();
2121 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2124 #if defined(TARGET_MIPS64)
2126 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2127 ctx
->default_tcg_memop_mask
);
2128 gen_store_gpr(t0
, rt
);
2131 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2132 ctx
->default_tcg_memop_mask
);
2133 gen_store_gpr(t0
, rt
);
2137 save_cpu_state(ctx
, 1);
2138 op_ld_lld(t0
, t0
, ctx
);
2139 gen_store_gpr(t0
, rt
);
2142 t1
= tcg_temp_new();
2143 /* Do a byte access to possibly trigger a page
2144 fault with the unaligned address. */
2145 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2146 tcg_gen_andi_tl(t1
, t0
, 7);
2147 #ifndef TARGET_WORDS_BIGENDIAN
2148 tcg_gen_xori_tl(t1
, t1
, 7);
2150 tcg_gen_shli_tl(t1
, t1
, 3);
2151 tcg_gen_andi_tl(t0
, t0
, ~7);
2152 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2153 tcg_gen_shl_tl(t0
, t0
, t1
);
2154 t2
= tcg_const_tl(-1);
2155 tcg_gen_shl_tl(t2
, t2
, t1
);
2156 gen_load_gpr(t1
, rt
);
2157 tcg_gen_andc_tl(t1
, t1
, t2
);
2159 tcg_gen_or_tl(t0
, t0
, t1
);
2161 gen_store_gpr(t0
, rt
);
2164 t1
= tcg_temp_new();
2165 /* Do a byte access to possibly trigger a page
2166 fault with the unaligned address. */
2167 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2168 tcg_gen_andi_tl(t1
, t0
, 7);
2169 #ifdef TARGET_WORDS_BIGENDIAN
2170 tcg_gen_xori_tl(t1
, t1
, 7);
2172 tcg_gen_shli_tl(t1
, t1
, 3);
2173 tcg_gen_andi_tl(t0
, t0
, ~7);
2174 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2175 tcg_gen_shr_tl(t0
, t0
, t1
);
2176 tcg_gen_xori_tl(t1
, t1
, 63);
2177 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2178 tcg_gen_shl_tl(t2
, t2
, t1
);
2179 gen_load_gpr(t1
, rt
);
2180 tcg_gen_and_tl(t1
, t1
, t2
);
2182 tcg_gen_or_tl(t0
, t0
, t1
);
2184 gen_store_gpr(t0
, rt
);
2187 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2188 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2190 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2191 gen_store_gpr(t0
, rt
);
2195 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2196 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2198 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2199 gen_store_gpr(t0
, rt
);
2202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2203 ctx
->default_tcg_memop_mask
);
2204 gen_store_gpr(t0
, rt
);
2207 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2208 ctx
->default_tcg_memop_mask
);
2209 gen_store_gpr(t0
, rt
);
2212 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2213 ctx
->default_tcg_memop_mask
);
2214 gen_store_gpr(t0
, rt
);
2217 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2218 gen_store_gpr(t0
, rt
);
2221 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2222 gen_store_gpr(t0
, rt
);
2225 t1
= tcg_temp_new();
2226 /* Do a byte access to possibly trigger a page
2227 fault with the unaligned address. */
2228 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2229 tcg_gen_andi_tl(t1
, t0
, 3);
2230 #ifndef TARGET_WORDS_BIGENDIAN
2231 tcg_gen_xori_tl(t1
, t1
, 3);
2233 tcg_gen_shli_tl(t1
, t1
, 3);
2234 tcg_gen_andi_tl(t0
, t0
, ~3);
2235 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2236 tcg_gen_shl_tl(t0
, t0
, t1
);
2237 t2
= tcg_const_tl(-1);
2238 tcg_gen_shl_tl(t2
, t2
, t1
);
2239 gen_load_gpr(t1
, rt
);
2240 tcg_gen_andc_tl(t1
, t1
, t2
);
2242 tcg_gen_or_tl(t0
, t0
, t1
);
2244 tcg_gen_ext32s_tl(t0
, t0
);
2245 gen_store_gpr(t0
, rt
);
2248 t1
= tcg_temp_new();
2249 /* Do a byte access to possibly trigger a page
2250 fault with the unaligned address. */
2251 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2252 tcg_gen_andi_tl(t1
, t0
, 3);
2253 #ifdef TARGET_WORDS_BIGENDIAN
2254 tcg_gen_xori_tl(t1
, t1
, 3);
2256 tcg_gen_shli_tl(t1
, t1
, 3);
2257 tcg_gen_andi_tl(t0
, t0
, ~3);
2258 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2259 tcg_gen_shr_tl(t0
, t0
, t1
);
2260 tcg_gen_xori_tl(t1
, t1
, 31);
2261 t2
= tcg_const_tl(0xfffffffeull
);
2262 tcg_gen_shl_tl(t2
, t2
, t1
);
2263 gen_load_gpr(t1
, rt
);
2264 tcg_gen_and_tl(t1
, t1
, t2
);
2266 tcg_gen_or_tl(t0
, t0
, t1
);
2268 tcg_gen_ext32s_tl(t0
, t0
);
2269 gen_store_gpr(t0
, rt
);
2273 save_cpu_state(ctx
, 1);
2274 op_ld_ll(t0
, t0
, ctx
);
2275 gen_store_gpr(t0
, rt
);
2282 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2283 int base
, int16_t offset
)
2285 TCGv t0
= tcg_temp_new();
2286 TCGv t1
= tcg_temp_new();
2288 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2289 gen_load_gpr(t1
, rt
);
2291 #if defined(TARGET_MIPS64)
2293 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2294 ctx
->default_tcg_memop_mask
);
2297 save_cpu_state(ctx
, 1);
2298 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2301 save_cpu_state(ctx
, 1);
2302 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2306 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2307 ctx
->default_tcg_memop_mask
);
2310 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2311 ctx
->default_tcg_memop_mask
);
2314 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2317 save_cpu_state(ctx
, 1);
2318 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2321 save_cpu_state(ctx
, 1);
2322 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2330 /* Store conditional */
2331 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2332 int base
, int16_t offset
)
2336 #ifdef CONFIG_USER_ONLY
2337 t0
= tcg_temp_local_new();
2338 t1
= tcg_temp_local_new();
2340 t0
= tcg_temp_new();
2341 t1
= tcg_temp_new();
2343 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2344 gen_load_gpr(t1
, rt
);
2346 #if defined(TARGET_MIPS64)
2349 save_cpu_state(ctx
, 1);
2350 op_st_scd(t1
, t0
, rt
, ctx
);
2355 save_cpu_state(ctx
, 1);
2356 op_st_sc(t1
, t0
, rt
, ctx
);
2363 /* Load and store */
2364 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2365 int base
, int16_t offset
)
2367 TCGv t0
= tcg_temp_new();
2369 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2370 /* Don't do NOP if destination is zero: we must perform the actual
2375 TCGv_i32 fp0
= tcg_temp_new_i32();
2376 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2377 ctx
->default_tcg_memop_mask
);
2378 gen_store_fpr32(ctx
, fp0
, ft
);
2379 tcg_temp_free_i32(fp0
);
2384 TCGv_i32 fp0
= tcg_temp_new_i32();
2385 gen_load_fpr32(ctx
, fp0
, ft
);
2386 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2387 ctx
->default_tcg_memop_mask
);
2388 tcg_temp_free_i32(fp0
);
2393 TCGv_i64 fp0
= tcg_temp_new_i64();
2394 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2395 ctx
->default_tcg_memop_mask
);
2396 gen_store_fpr64(ctx
, fp0
, ft
);
2397 tcg_temp_free_i64(fp0
);
2402 TCGv_i64 fp0
= tcg_temp_new_i64();
2403 gen_load_fpr64(ctx
, fp0
, ft
);
2404 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2405 ctx
->default_tcg_memop_mask
);
2406 tcg_temp_free_i64(fp0
);
2410 MIPS_INVAL("flt_ldst");
2411 generate_exception(ctx
, EXCP_RI
);
2418 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2419 int rs
, int16_t imm
)
2421 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2422 check_cp1_enabled(ctx
);
2426 check_insn(ctx
, ISA_MIPS2
);
2429 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2432 generate_exception_err(ctx
, EXCP_CpU
, 1);
2436 /* Arithmetic with immediate operand */
2437 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2438 int rt
, int rs
, int16_t imm
)
2440 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2442 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2443 /* If no destination, treat it as a NOP.
2444 For addi, we must generate the overflow exception when needed. */
2450 TCGv t0
= tcg_temp_local_new();
2451 TCGv t1
= tcg_temp_new();
2452 TCGv t2
= tcg_temp_new();
2453 TCGLabel
*l1
= gen_new_label();
2455 gen_load_gpr(t1
, rs
);
2456 tcg_gen_addi_tl(t0
, t1
, uimm
);
2457 tcg_gen_ext32s_tl(t0
, t0
);
2459 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2460 tcg_gen_xori_tl(t2
, t0
, uimm
);
2461 tcg_gen_and_tl(t1
, t1
, t2
);
2463 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2465 /* operands of same sign, result different sign */
2466 generate_exception(ctx
, EXCP_OVERFLOW
);
2468 tcg_gen_ext32s_tl(t0
, t0
);
2469 gen_store_gpr(t0
, rt
);
2475 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2476 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2478 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2481 #if defined(TARGET_MIPS64)
2484 TCGv t0
= tcg_temp_local_new();
2485 TCGv t1
= tcg_temp_new();
2486 TCGv t2
= tcg_temp_new();
2487 TCGLabel
*l1
= gen_new_label();
2489 gen_load_gpr(t1
, rs
);
2490 tcg_gen_addi_tl(t0
, t1
, uimm
);
2492 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2493 tcg_gen_xori_tl(t2
, t0
, uimm
);
2494 tcg_gen_and_tl(t1
, t1
, t2
);
2496 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2498 /* operands of same sign, result different sign */
2499 generate_exception(ctx
, EXCP_OVERFLOW
);
2501 gen_store_gpr(t0
, rt
);
2507 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2509 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2516 /* Logic with immediate operand */
2517 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2518 int rt
, int rs
, int16_t imm
)
2523 /* If no destination, treat it as a NOP. */
2526 uimm
= (uint16_t)imm
;
2529 if (likely(rs
!= 0))
2530 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2532 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2536 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2541 if (likely(rs
!= 0))
2542 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2544 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2547 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2549 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2550 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2552 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2561 /* Set on less than with immediate operand */
2562 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2563 int rt
, int rs
, int16_t imm
)
2565 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2569 /* If no destination, treat it as a NOP. */
2572 t0
= tcg_temp_new();
2573 gen_load_gpr(t0
, rs
);
2576 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2579 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2585 /* Shifts with immediate operand */
2586 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2587 int rt
, int rs
, int16_t imm
)
2589 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2593 /* If no destination, treat it as a NOP. */
2597 t0
= tcg_temp_new();
2598 gen_load_gpr(t0
, rs
);
2601 tcg_gen_shli_tl(t0
, t0
, uimm
);
2602 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2605 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2609 tcg_gen_ext32u_tl(t0
, t0
);
2610 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2612 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2617 TCGv_i32 t1
= tcg_temp_new_i32();
2619 tcg_gen_trunc_tl_i32(t1
, t0
);
2620 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2621 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2622 tcg_temp_free_i32(t1
);
2624 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2627 #if defined(TARGET_MIPS64)
2629 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2632 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2639 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2645 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2648 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2651 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2654 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2662 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2663 int rd
, int rs
, int rt
)
2665 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2666 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2667 /* If no destination, treat it as a NOP.
2668 For add & sub, we must generate the overflow exception when needed. */
2675 TCGv t0
= tcg_temp_local_new();
2676 TCGv t1
= tcg_temp_new();
2677 TCGv t2
= tcg_temp_new();
2678 TCGLabel
*l1
= gen_new_label();
2680 gen_load_gpr(t1
, rs
);
2681 gen_load_gpr(t2
, rt
);
2682 tcg_gen_add_tl(t0
, t1
, t2
);
2683 tcg_gen_ext32s_tl(t0
, t0
);
2684 tcg_gen_xor_tl(t1
, t1
, t2
);
2685 tcg_gen_xor_tl(t2
, t0
, t2
);
2686 tcg_gen_andc_tl(t1
, t2
, t1
);
2688 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2690 /* operands of same sign, result different sign */
2691 generate_exception(ctx
, EXCP_OVERFLOW
);
2693 gen_store_gpr(t0
, rd
);
2698 if (rs
!= 0 && rt
!= 0) {
2699 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2700 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2701 } else if (rs
== 0 && rt
!= 0) {
2702 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2703 } else if (rs
!= 0 && rt
== 0) {
2704 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2706 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2711 TCGv t0
= tcg_temp_local_new();
2712 TCGv t1
= tcg_temp_new();
2713 TCGv t2
= tcg_temp_new();
2714 TCGLabel
*l1
= gen_new_label();
2716 gen_load_gpr(t1
, rs
);
2717 gen_load_gpr(t2
, rt
);
2718 tcg_gen_sub_tl(t0
, t1
, t2
);
2719 tcg_gen_ext32s_tl(t0
, t0
);
2720 tcg_gen_xor_tl(t2
, t1
, t2
);
2721 tcg_gen_xor_tl(t1
, t0
, t1
);
2722 tcg_gen_and_tl(t1
, t1
, t2
);
2724 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2726 /* operands of different sign, first operand and result different sign */
2727 generate_exception(ctx
, EXCP_OVERFLOW
);
2729 gen_store_gpr(t0
, rd
);
2734 if (rs
!= 0 && rt
!= 0) {
2735 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2736 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2737 } else if (rs
== 0 && rt
!= 0) {
2738 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2739 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2740 } else if (rs
!= 0 && rt
== 0) {
2741 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2743 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2746 #if defined(TARGET_MIPS64)
2749 TCGv t0
= tcg_temp_local_new();
2750 TCGv t1
= tcg_temp_new();
2751 TCGv t2
= tcg_temp_new();
2752 TCGLabel
*l1
= gen_new_label();
2754 gen_load_gpr(t1
, rs
);
2755 gen_load_gpr(t2
, rt
);
2756 tcg_gen_add_tl(t0
, t1
, t2
);
2757 tcg_gen_xor_tl(t1
, t1
, t2
);
2758 tcg_gen_xor_tl(t2
, t0
, t2
);
2759 tcg_gen_andc_tl(t1
, t2
, t1
);
2761 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2763 /* operands of same sign, result different sign */
2764 generate_exception(ctx
, EXCP_OVERFLOW
);
2766 gen_store_gpr(t0
, rd
);
2771 if (rs
!= 0 && rt
!= 0) {
2772 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2773 } else if (rs
== 0 && rt
!= 0) {
2774 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2775 } else if (rs
!= 0 && rt
== 0) {
2776 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2778 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2783 TCGv t0
= tcg_temp_local_new();
2784 TCGv t1
= tcg_temp_new();
2785 TCGv t2
= tcg_temp_new();
2786 TCGLabel
*l1
= gen_new_label();
2788 gen_load_gpr(t1
, rs
);
2789 gen_load_gpr(t2
, rt
);
2790 tcg_gen_sub_tl(t0
, t1
, t2
);
2791 tcg_gen_xor_tl(t2
, t1
, t2
);
2792 tcg_gen_xor_tl(t1
, t0
, t1
);
2793 tcg_gen_and_tl(t1
, t1
, t2
);
2795 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2797 /* operands of different sign, first operand and result different sign */
2798 generate_exception(ctx
, EXCP_OVERFLOW
);
2800 gen_store_gpr(t0
, rd
);
2805 if (rs
!= 0 && rt
!= 0) {
2806 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2807 } else if (rs
== 0 && rt
!= 0) {
2808 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2809 } else if (rs
!= 0 && rt
== 0) {
2810 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2812 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2817 if (likely(rs
!= 0 && rt
!= 0)) {
2818 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2819 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2827 /* Conditional move */
2828 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2829 int rd
, int rs
, int rt
)
2834 /* If no destination, treat it as a NOP. */
2838 t0
= tcg_temp_new();
2839 gen_load_gpr(t0
, rt
);
2840 t1
= tcg_const_tl(0);
2841 t2
= tcg_temp_new();
2842 gen_load_gpr(t2
, rs
);
2845 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2848 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2851 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2854 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2863 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2864 int rd
, int rs
, int rt
)
2867 /* If no destination, treat it as a NOP. */
2873 if (likely(rs
!= 0 && rt
!= 0)) {
2874 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2876 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2880 if (rs
!= 0 && rt
!= 0) {
2881 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 } else if (rs
== 0 && rt
!= 0) {
2883 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2884 } else if (rs
!= 0 && rt
== 0) {
2885 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2887 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2891 if (likely(rs
!= 0 && rt
!= 0)) {
2892 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2893 } else if (rs
== 0 && rt
!= 0) {
2894 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2895 } else if (rs
!= 0 && rt
== 0) {
2896 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2898 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2902 if (likely(rs
!= 0 && rt
!= 0)) {
2903 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2904 } else if (rs
== 0 && rt
!= 0) {
2905 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2906 } else if (rs
!= 0 && rt
== 0) {
2907 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2909 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2915 /* Set on lower than */
2916 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2917 int rd
, int rs
, int rt
)
2922 /* If no destination, treat it as a NOP. */
2926 t0
= tcg_temp_new();
2927 t1
= tcg_temp_new();
2928 gen_load_gpr(t0
, rs
);
2929 gen_load_gpr(t1
, rt
);
2932 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2935 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2943 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2944 int rd
, int rs
, int rt
)
2949 /* If no destination, treat it as a NOP.
2950 For add & sub, we must generate the overflow exception when needed. */
2954 t0
= tcg_temp_new();
2955 t1
= tcg_temp_new();
2956 gen_load_gpr(t0
, rs
);
2957 gen_load_gpr(t1
, rt
);
2960 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2961 tcg_gen_shl_tl(t0
, t1
, t0
);
2962 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2969 tcg_gen_ext32u_tl(t1
, t1
);
2970 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2971 tcg_gen_shr_tl(t0
, t1
, t0
);
2972 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2976 TCGv_i32 t2
= tcg_temp_new_i32();
2977 TCGv_i32 t3
= tcg_temp_new_i32();
2979 tcg_gen_trunc_tl_i32(t2
, t0
);
2980 tcg_gen_trunc_tl_i32(t3
, t1
);
2981 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2982 tcg_gen_rotr_i32(t2
, t3
, t2
);
2983 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2984 tcg_temp_free_i32(t2
);
2985 tcg_temp_free_i32(t3
);
2988 #if defined(TARGET_MIPS64)
2990 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2991 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2994 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2995 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2998 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2999 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3002 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3003 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3011 /* Arithmetic on HI/LO registers */
3012 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3014 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3025 #if defined(TARGET_MIPS64)
3027 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3031 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3035 #if defined(TARGET_MIPS64)
3037 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3041 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3046 #if defined(TARGET_MIPS64)
3048 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3052 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3055 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3060 #if defined(TARGET_MIPS64)
3062 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3066 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3069 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3075 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3078 TCGv t0
= tcg_const_tl(addr
);
3079 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3080 gen_store_gpr(t0
, reg
);
3084 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3090 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3093 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3094 addr
= addr_add(ctx
, pc
, offset
);
3095 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3099 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3100 addr
= addr_add(ctx
, pc
, offset
);
3101 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3103 #if defined(TARGET_MIPS64)
3106 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3107 addr
= addr_add(ctx
, pc
, offset
);
3108 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3112 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3115 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3116 addr
= addr_add(ctx
, pc
, offset
);
3117 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3122 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3123 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3124 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3127 #if defined(TARGET_MIPS64)
3128 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3129 case R6_OPC_LDPC
+ (1 << 16):
3130 case R6_OPC_LDPC
+ (2 << 16):
3131 case R6_OPC_LDPC
+ (3 << 16):
3133 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3134 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3135 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3139 MIPS_INVAL("OPC_PCREL");
3140 generate_exception(ctx
, EXCP_RI
);
3147 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3156 t0
= tcg_temp_new();
3157 t1
= tcg_temp_new();
3159 gen_load_gpr(t0
, rs
);
3160 gen_load_gpr(t1
, rt
);
3165 TCGv t2
= tcg_temp_new();
3166 TCGv t3
= tcg_temp_new();
3167 tcg_gen_ext32s_tl(t0
, t0
);
3168 tcg_gen_ext32s_tl(t1
, t1
);
3169 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3170 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3171 tcg_gen_and_tl(t2
, t2
, t3
);
3172 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3173 tcg_gen_or_tl(t2
, t2
, t3
);
3174 tcg_gen_movi_tl(t3
, 0);
3175 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3176 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3177 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3184 TCGv t2
= tcg_temp_new();
3185 TCGv t3
= tcg_temp_new();
3186 tcg_gen_ext32s_tl(t0
, t0
);
3187 tcg_gen_ext32s_tl(t1
, t1
);
3188 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3189 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3190 tcg_gen_and_tl(t2
, t2
, t3
);
3191 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3192 tcg_gen_or_tl(t2
, t2
, t3
);
3193 tcg_gen_movi_tl(t3
, 0);
3194 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3195 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3196 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3203 TCGv t2
= tcg_const_tl(0);
3204 TCGv t3
= tcg_const_tl(1);
3205 tcg_gen_ext32u_tl(t0
, t0
);
3206 tcg_gen_ext32u_tl(t1
, t1
);
3207 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3208 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3209 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3216 TCGv t2
= tcg_const_tl(0);
3217 TCGv t3
= tcg_const_tl(1);
3218 tcg_gen_ext32u_tl(t0
, t0
);
3219 tcg_gen_ext32u_tl(t1
, t1
);
3220 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3221 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3222 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3229 TCGv_i32 t2
= tcg_temp_new_i32();
3230 TCGv_i32 t3
= tcg_temp_new_i32();
3231 tcg_gen_trunc_tl_i32(t2
, t0
);
3232 tcg_gen_trunc_tl_i32(t3
, t1
);
3233 tcg_gen_mul_i32(t2
, t2
, t3
);
3234 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3235 tcg_temp_free_i32(t2
);
3236 tcg_temp_free_i32(t3
);
3241 TCGv_i32 t2
= tcg_temp_new_i32();
3242 TCGv_i32 t3
= tcg_temp_new_i32();
3243 tcg_gen_trunc_tl_i32(t2
, t0
);
3244 tcg_gen_trunc_tl_i32(t3
, t1
);
3245 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3246 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3247 tcg_temp_free_i32(t2
);
3248 tcg_temp_free_i32(t3
);
3253 TCGv_i32 t2
= tcg_temp_new_i32();
3254 TCGv_i32 t3
= tcg_temp_new_i32();
3255 tcg_gen_trunc_tl_i32(t2
, t0
);
3256 tcg_gen_trunc_tl_i32(t3
, t1
);
3257 tcg_gen_mul_i32(t2
, t2
, t3
);
3258 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3259 tcg_temp_free_i32(t2
);
3260 tcg_temp_free_i32(t3
);
3265 TCGv_i32 t2
= tcg_temp_new_i32();
3266 TCGv_i32 t3
= tcg_temp_new_i32();
3267 tcg_gen_trunc_tl_i32(t2
, t0
);
3268 tcg_gen_trunc_tl_i32(t3
, t1
);
3269 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3270 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3271 tcg_temp_free_i32(t2
);
3272 tcg_temp_free_i32(t3
);
3275 #if defined(TARGET_MIPS64)
3278 TCGv t2
= tcg_temp_new();
3279 TCGv t3
= tcg_temp_new();
3280 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3281 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3282 tcg_gen_and_tl(t2
, t2
, t3
);
3283 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3284 tcg_gen_or_tl(t2
, t2
, t3
);
3285 tcg_gen_movi_tl(t3
, 0);
3286 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3287 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3294 TCGv t2
= tcg_temp_new();
3295 TCGv t3
= tcg_temp_new();
3296 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3297 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3298 tcg_gen_and_tl(t2
, t2
, t3
);
3299 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3300 tcg_gen_or_tl(t2
, t2
, t3
);
3301 tcg_gen_movi_tl(t3
, 0);
3302 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3303 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3310 TCGv t2
= tcg_const_tl(0);
3311 TCGv t3
= tcg_const_tl(1);
3312 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3313 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3320 TCGv t2
= tcg_const_tl(0);
3321 TCGv t3
= tcg_const_tl(1);
3322 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3323 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3329 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3333 TCGv t2
= tcg_temp_new();
3334 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3339 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3343 TCGv t2
= tcg_temp_new();
3344 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3350 MIPS_INVAL("r6 mul/div");
3351 generate_exception(ctx
, EXCP_RI
);
3359 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3360 int acc
, int rs
, int rt
)
3364 t0
= tcg_temp_new();
3365 t1
= tcg_temp_new();
3367 gen_load_gpr(t0
, rs
);
3368 gen_load_gpr(t1
, rt
);
3377 TCGv t2
= tcg_temp_new();
3378 TCGv t3
= tcg_temp_new();
3379 tcg_gen_ext32s_tl(t0
, t0
);
3380 tcg_gen_ext32s_tl(t1
, t1
);
3381 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3382 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3383 tcg_gen_and_tl(t2
, t2
, t3
);
3384 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3385 tcg_gen_or_tl(t2
, t2
, t3
);
3386 tcg_gen_movi_tl(t3
, 0);
3387 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3388 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3389 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3390 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3391 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3398 TCGv t2
= tcg_const_tl(0);
3399 TCGv t3
= tcg_const_tl(1);
3400 tcg_gen_ext32u_tl(t0
, t0
);
3401 tcg_gen_ext32u_tl(t1
, t1
);
3402 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3403 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3404 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3405 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3406 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3413 TCGv_i32 t2
= tcg_temp_new_i32();
3414 TCGv_i32 t3
= tcg_temp_new_i32();
3415 tcg_gen_trunc_tl_i32(t2
, t0
);
3416 tcg_gen_trunc_tl_i32(t3
, t1
);
3417 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3418 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3419 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3420 tcg_temp_free_i32(t2
);
3421 tcg_temp_free_i32(t3
);
3426 TCGv_i32 t2
= tcg_temp_new_i32();
3427 TCGv_i32 t3
= tcg_temp_new_i32();
3428 tcg_gen_trunc_tl_i32(t2
, t0
);
3429 tcg_gen_trunc_tl_i32(t3
, t1
);
3430 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3431 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3432 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3433 tcg_temp_free_i32(t2
);
3434 tcg_temp_free_i32(t3
);
3437 #if defined(TARGET_MIPS64)
3440 TCGv t2
= tcg_temp_new();
3441 TCGv t3
= tcg_temp_new();
3442 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3443 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3444 tcg_gen_and_tl(t2
, t2
, t3
);
3445 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3446 tcg_gen_or_tl(t2
, t2
, t3
);
3447 tcg_gen_movi_tl(t3
, 0);
3448 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3449 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3450 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3457 TCGv t2
= tcg_const_tl(0);
3458 TCGv t3
= tcg_const_tl(1);
3459 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3460 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3461 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3467 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3470 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3475 TCGv_i64 t2
= tcg_temp_new_i64();
3476 TCGv_i64 t3
= tcg_temp_new_i64();
3478 tcg_gen_ext_tl_i64(t2
, t0
);
3479 tcg_gen_ext_tl_i64(t3
, t1
);
3480 tcg_gen_mul_i64(t2
, t2
, t3
);
3481 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3482 tcg_gen_add_i64(t2
, t2
, t3
);
3483 tcg_temp_free_i64(t3
);
3484 gen_move_low32(cpu_LO
[acc
], t2
);
3485 gen_move_high32(cpu_HI
[acc
], t2
);
3486 tcg_temp_free_i64(t2
);
3491 TCGv_i64 t2
= tcg_temp_new_i64();
3492 TCGv_i64 t3
= tcg_temp_new_i64();
3494 tcg_gen_ext32u_tl(t0
, t0
);
3495 tcg_gen_ext32u_tl(t1
, t1
);
3496 tcg_gen_extu_tl_i64(t2
, t0
);
3497 tcg_gen_extu_tl_i64(t3
, t1
);
3498 tcg_gen_mul_i64(t2
, t2
, t3
);
3499 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3500 tcg_gen_add_i64(t2
, t2
, t3
);
3501 tcg_temp_free_i64(t3
);
3502 gen_move_low32(cpu_LO
[acc
], t2
);
3503 gen_move_high32(cpu_HI
[acc
], t2
);
3504 tcg_temp_free_i64(t2
);
3509 TCGv_i64 t2
= tcg_temp_new_i64();
3510 TCGv_i64 t3
= tcg_temp_new_i64();
3512 tcg_gen_ext_tl_i64(t2
, t0
);
3513 tcg_gen_ext_tl_i64(t3
, t1
);
3514 tcg_gen_mul_i64(t2
, t2
, t3
);
3515 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3516 tcg_gen_sub_i64(t2
, t3
, t2
);
3517 tcg_temp_free_i64(t3
);
3518 gen_move_low32(cpu_LO
[acc
], t2
);
3519 gen_move_high32(cpu_HI
[acc
], t2
);
3520 tcg_temp_free_i64(t2
);
3525 TCGv_i64 t2
= tcg_temp_new_i64();
3526 TCGv_i64 t3
= tcg_temp_new_i64();
3528 tcg_gen_ext32u_tl(t0
, t0
);
3529 tcg_gen_ext32u_tl(t1
, t1
);
3530 tcg_gen_extu_tl_i64(t2
, t0
);
3531 tcg_gen_extu_tl_i64(t3
, t1
);
3532 tcg_gen_mul_i64(t2
, t2
, t3
);
3533 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3534 tcg_gen_sub_i64(t2
, t3
, t2
);
3535 tcg_temp_free_i64(t3
);
3536 gen_move_low32(cpu_LO
[acc
], t2
);
3537 gen_move_high32(cpu_HI
[acc
], t2
);
3538 tcg_temp_free_i64(t2
);
3542 MIPS_INVAL("mul/div");
3543 generate_exception(ctx
, EXCP_RI
);
3551 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3552 int rd
, int rs
, int rt
)
3554 TCGv t0
= tcg_temp_new();
3555 TCGv t1
= tcg_temp_new();
3557 gen_load_gpr(t0
, rs
);
3558 gen_load_gpr(t1
, rt
);
3561 case OPC_VR54XX_MULS
:
3562 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3564 case OPC_VR54XX_MULSU
:
3565 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MACC
:
3568 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MACCU
:
3571 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MSAC
:
3574 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MSACU
:
3577 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MULHI
:
3580 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULHIU
:
3583 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULSHI
:
3586 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MULSHIU
:
3589 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MACCHI
:
3592 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MACCHIU
:
3595 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MSACHI
:
3598 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3600 case OPC_VR54XX_MSACHIU
:
3601 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3604 MIPS_INVAL("mul vr54xx");
3605 generate_exception(ctx
, EXCP_RI
);
3608 gen_store_gpr(t0
, rd
);
3615 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3624 t0
= tcg_temp_new();
3625 gen_load_gpr(t0
, rs
);
3629 gen_helper_clo(cpu_gpr
[rd
], t0
);
3633 gen_helper_clz(cpu_gpr
[rd
], t0
);
3635 #if defined(TARGET_MIPS64)
3638 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3642 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3649 /* Godson integer instructions */
3650 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3651 int rd
, int rs
, int rt
)
3663 case OPC_MULTU_G_2E
:
3664 case OPC_MULTU_G_2F
:
3665 #if defined(TARGET_MIPS64)
3666 case OPC_DMULT_G_2E
:
3667 case OPC_DMULT_G_2F
:
3668 case OPC_DMULTU_G_2E
:
3669 case OPC_DMULTU_G_2F
:
3671 t0
= tcg_temp_new();
3672 t1
= tcg_temp_new();
3675 t0
= tcg_temp_local_new();
3676 t1
= tcg_temp_local_new();
3680 gen_load_gpr(t0
, rs
);
3681 gen_load_gpr(t1
, rt
);
3686 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3687 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3689 case OPC_MULTU_G_2E
:
3690 case OPC_MULTU_G_2F
:
3691 tcg_gen_ext32u_tl(t0
, t0
);
3692 tcg_gen_ext32u_tl(t1
, t1
);
3693 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3694 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3699 TCGLabel
*l1
= gen_new_label();
3700 TCGLabel
*l2
= gen_new_label();
3701 TCGLabel
*l3
= gen_new_label();
3702 tcg_gen_ext32s_tl(t0
, t0
);
3703 tcg_gen_ext32s_tl(t1
, t1
);
3704 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3705 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3708 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3710 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3713 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3714 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3721 TCGLabel
*l1
= gen_new_label();
3722 TCGLabel
*l2
= gen_new_label();
3723 tcg_gen_ext32u_tl(t0
, t0
);
3724 tcg_gen_ext32u_tl(t1
, t1
);
3725 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3726 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3729 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3730 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3737 TCGLabel
*l1
= gen_new_label();
3738 TCGLabel
*l2
= gen_new_label();
3739 TCGLabel
*l3
= gen_new_label();
3740 tcg_gen_ext32u_tl(t0
, t0
);
3741 tcg_gen_ext32u_tl(t1
, t1
);
3742 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3743 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3744 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3746 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3749 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3750 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3757 TCGLabel
*l1
= gen_new_label();
3758 TCGLabel
*l2
= gen_new_label();
3759 tcg_gen_ext32u_tl(t0
, t0
);
3760 tcg_gen_ext32u_tl(t1
, t1
);
3761 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3762 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3765 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3766 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3770 #if defined(TARGET_MIPS64)
3771 case OPC_DMULT_G_2E
:
3772 case OPC_DMULT_G_2F
:
3773 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3775 case OPC_DMULTU_G_2E
:
3776 case OPC_DMULTU_G_2F
:
3777 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3782 TCGLabel
*l1
= gen_new_label();
3783 TCGLabel
*l2
= gen_new_label();
3784 TCGLabel
*l3
= gen_new_label();
3785 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3786 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3789 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3791 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3794 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3798 case OPC_DDIVU_G_2E
:
3799 case OPC_DDIVU_G_2F
:
3801 TCGLabel
*l1
= gen_new_label();
3802 TCGLabel
*l2
= gen_new_label();
3803 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3804 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3807 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3814 TCGLabel
*l1
= gen_new_label();
3815 TCGLabel
*l2
= gen_new_label();
3816 TCGLabel
*l3
= gen_new_label();
3817 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3818 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3819 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3824 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3828 case OPC_DMODU_G_2E
:
3829 case OPC_DMODU_G_2F
:
3831 TCGLabel
*l1
= gen_new_label();
3832 TCGLabel
*l2
= gen_new_label();
3833 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3834 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3837 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3848 /* Loongson multimedia instructions */
3849 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3851 uint32_t opc
, shift_max
;
3854 opc
= MASK_LMI(ctx
->opcode
);
3860 t0
= tcg_temp_local_new_i64();
3861 t1
= tcg_temp_local_new_i64();
3864 t0
= tcg_temp_new_i64();
3865 t1
= tcg_temp_new_i64();
3869 gen_load_fpr64(ctx
, t0
, rs
);
3870 gen_load_fpr64(ctx
, t1
, rt
);
3872 #define LMI_HELPER(UP, LO) \
3873 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3874 #define LMI_HELPER_1(UP, LO) \
3875 case OPC_##UP: gen_helper_##LO(t0, t0); break
3876 #define LMI_DIRECT(UP, LO, OP) \
3877 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3880 LMI_HELPER(PADDSH
, paddsh
);
3881 LMI_HELPER(PADDUSH
, paddush
);
3882 LMI_HELPER(PADDH
, paddh
);
3883 LMI_HELPER(PADDW
, paddw
);
3884 LMI_HELPER(PADDSB
, paddsb
);
3885 LMI_HELPER(PADDUSB
, paddusb
);
3886 LMI_HELPER(PADDB
, paddb
);
3888 LMI_HELPER(PSUBSH
, psubsh
);
3889 LMI_HELPER(PSUBUSH
, psubush
);
3890 LMI_HELPER(PSUBH
, psubh
);
3891 LMI_HELPER(PSUBW
, psubw
);
3892 LMI_HELPER(PSUBSB
, psubsb
);
3893 LMI_HELPER(PSUBUSB
, psubusb
);
3894 LMI_HELPER(PSUBB
, psubb
);
3896 LMI_HELPER(PSHUFH
, pshufh
);
3897 LMI_HELPER(PACKSSWH
, packsswh
);
3898 LMI_HELPER(PACKSSHB
, packsshb
);
3899 LMI_HELPER(PACKUSHB
, packushb
);
3901 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3902 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3903 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3904 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3905 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3906 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3908 LMI_HELPER(PAVGH
, pavgh
);
3909 LMI_HELPER(PAVGB
, pavgb
);
3910 LMI_HELPER(PMAXSH
, pmaxsh
);
3911 LMI_HELPER(PMINSH
, pminsh
);
3912 LMI_HELPER(PMAXUB
, pmaxub
);
3913 LMI_HELPER(PMINUB
, pminub
);
3915 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3916 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3917 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3918 LMI_HELPER(PCMPGTH
, pcmpgth
);
3919 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3920 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3922 LMI_HELPER(PSLLW
, psllw
);
3923 LMI_HELPER(PSLLH
, psllh
);
3924 LMI_HELPER(PSRLW
, psrlw
);
3925 LMI_HELPER(PSRLH
, psrlh
);
3926 LMI_HELPER(PSRAW
, psraw
);
3927 LMI_HELPER(PSRAH
, psrah
);
3929 LMI_HELPER(PMULLH
, pmullh
);
3930 LMI_HELPER(PMULHH
, pmulhh
);
3931 LMI_HELPER(PMULHUH
, pmulhuh
);
3932 LMI_HELPER(PMADDHW
, pmaddhw
);
3934 LMI_HELPER(PASUBUB
, pasubub
);
3935 LMI_HELPER_1(BIADD
, biadd
);
3936 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3938 LMI_DIRECT(PADDD
, paddd
, add
);
3939 LMI_DIRECT(PSUBD
, psubd
, sub
);
3940 LMI_DIRECT(XOR_CP2
, xor, xor);
3941 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3942 LMI_DIRECT(AND_CP2
, and, and);
3943 LMI_DIRECT(PANDN
, pandn
, andc
);
3944 LMI_DIRECT(OR
, or, or);
3947 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3953 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3956 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3960 tcg_gen_andi_i64(t1
, t1
, 3);
3961 tcg_gen_shli_i64(t1
, t1
, 4);
3962 tcg_gen_shr_i64(t0
, t0
, t1
);
3963 tcg_gen_ext16u_i64(t0
, t0
);
3967 tcg_gen_add_i64(t0
, t0
, t1
);
3968 tcg_gen_ext32s_i64(t0
, t0
);
3971 tcg_gen_sub_i64(t0
, t0
, t1
);
3972 tcg_gen_ext32s_i64(t0
, t0
);
3994 /* Make sure shift count isn't TCG undefined behaviour. */
3995 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4000 tcg_gen_shl_i64(t0
, t0
, t1
);
4004 /* Since SRA is UndefinedResult without sign-extended inputs,
4005 we can treat SRA and DSRA the same. */
4006 tcg_gen_sar_i64(t0
, t0
, t1
);
4009 /* We want to shift in zeros for SRL; zero-extend first. */
4010 tcg_gen_ext32u_i64(t0
, t0
);
4013 tcg_gen_shr_i64(t0
, t0
, t1
);
4017 if (shift_max
== 32) {
4018 tcg_gen_ext32s_i64(t0
, t0
);
4021 /* Shifts larger than MAX produce zero. */
4022 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4023 tcg_gen_neg_i64(t1
, t1
);
4024 tcg_gen_and_i64(t0
, t0
, t1
);
4030 TCGv_i64 t2
= tcg_temp_new_i64();
4031 TCGLabel
*lab
= gen_new_label();
4033 tcg_gen_mov_i64(t2
, t0
);
4034 tcg_gen_add_i64(t0
, t1
, t2
);
4035 if (opc
== OPC_ADD_CP2
) {
4036 tcg_gen_ext32s_i64(t0
, t0
);
4038 tcg_gen_xor_i64(t1
, t1
, t2
);
4039 tcg_gen_xor_i64(t2
, t2
, t0
);
4040 tcg_gen_andc_i64(t1
, t2
, t1
);
4041 tcg_temp_free_i64(t2
);
4042 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4043 generate_exception(ctx
, EXCP_OVERFLOW
);
4051 TCGv_i64 t2
= tcg_temp_new_i64();
4052 TCGLabel
*lab
= gen_new_label();
4054 tcg_gen_mov_i64(t2
, t0
);
4055 tcg_gen_sub_i64(t0
, t1
, t2
);
4056 if (opc
== OPC_SUB_CP2
) {
4057 tcg_gen_ext32s_i64(t0
, t0
);
4059 tcg_gen_xor_i64(t1
, t1
, t2
);
4060 tcg_gen_xor_i64(t2
, t2
, t0
);
4061 tcg_gen_and_i64(t1
, t1
, t2
);
4062 tcg_temp_free_i64(t2
);
4063 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4064 generate_exception(ctx
, EXCP_OVERFLOW
);
4070 tcg_gen_ext32u_i64(t0
, t0
);
4071 tcg_gen_ext32u_i64(t1
, t1
);
4072 tcg_gen_mul_i64(t0
, t0
, t1
);
4081 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4082 FD field is the CC field? */
4084 MIPS_INVAL("loongson_cp2");
4085 generate_exception(ctx
, EXCP_RI
);
4092 gen_store_fpr64(ctx
, t0
, rd
);
4094 tcg_temp_free_i64(t0
);
4095 tcg_temp_free_i64(t1
);
4099 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4100 int rs
, int rt
, int16_t imm
)
4103 TCGv t0
= tcg_temp_new();
4104 TCGv t1
= tcg_temp_new();
4107 /* Load needed operands */
4115 /* Compare two registers */
4117 gen_load_gpr(t0
, rs
);
4118 gen_load_gpr(t1
, rt
);
4128 /* Compare register to immediate */
4129 if (rs
!= 0 || imm
!= 0) {
4130 gen_load_gpr(t0
, rs
);
4131 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4138 case OPC_TEQ
: /* rs == rs */
4139 case OPC_TEQI
: /* r0 == 0 */
4140 case OPC_TGE
: /* rs >= rs */
4141 case OPC_TGEI
: /* r0 >= 0 */
4142 case OPC_TGEU
: /* rs >= rs unsigned */
4143 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4145 generate_exception(ctx
, EXCP_TRAP
);
4147 case OPC_TLT
: /* rs < rs */
4148 case OPC_TLTI
: /* r0 < 0 */
4149 case OPC_TLTU
: /* rs < rs unsigned */
4150 case OPC_TLTIU
: /* r0 < 0 unsigned */
4151 case OPC_TNE
: /* rs != rs */
4152 case OPC_TNEI
: /* r0 != 0 */
4153 /* Never trap: treat as NOP. */
4157 TCGLabel
*l1
= gen_new_label();
4162 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4166 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4170 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4174 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4178 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4182 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4185 generate_exception(ctx
, EXCP_TRAP
);
4192 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4194 TranslationBlock
*tb
;
4196 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4197 likely(!ctx
->singlestep_enabled
)) {
4200 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4203 if (ctx
->singlestep_enabled
) {
4204 save_cpu_state(ctx
, 0);
4205 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
4211 /* Branches (before delay slot) */
4212 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4214 int rs
, int rt
, int32_t offset
,
4217 target_ulong btgt
= -1;
4219 int bcond_compute
= 0;
4220 TCGv t0
= tcg_temp_new();
4221 TCGv t1
= tcg_temp_new();
4223 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4224 #ifdef MIPS_DEBUG_DISAS
4225 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4226 TARGET_FMT_lx
"\n", ctx
->pc
);
4228 generate_exception(ctx
, EXCP_RI
);
4232 /* Load needed operands */
4238 /* Compare two registers */
4240 gen_load_gpr(t0
, rs
);
4241 gen_load_gpr(t1
, rt
);
4244 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4258 /* Compare to zero */
4260 gen_load_gpr(t0
, rs
);
4263 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4266 #if defined(TARGET_MIPS64)
4268 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4270 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4273 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4278 /* Jump to immediate */
4279 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4283 /* Jump to register */
4284 if (offset
!= 0 && offset
!= 16) {
4285 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4286 others are reserved. */
4287 MIPS_INVAL("jump hint");
4288 generate_exception(ctx
, EXCP_RI
);
4291 gen_load_gpr(btarget
, rs
);
4294 MIPS_INVAL("branch/jump");
4295 generate_exception(ctx
, EXCP_RI
);
4298 if (bcond_compute
== 0) {
4299 /* No condition to be computed */
4301 case OPC_BEQ
: /* rx == rx */
4302 case OPC_BEQL
: /* rx == rx likely */
4303 case OPC_BGEZ
: /* 0 >= 0 */
4304 case OPC_BGEZL
: /* 0 >= 0 likely */
4305 case OPC_BLEZ
: /* 0 <= 0 */
4306 case OPC_BLEZL
: /* 0 <= 0 likely */
4308 ctx
->hflags
|= MIPS_HFLAG_B
;
4310 case OPC_BGEZAL
: /* 0 >= 0 */
4311 case OPC_BGEZALL
: /* 0 >= 0 likely */
4312 /* Always take and link */
4314 ctx
->hflags
|= MIPS_HFLAG_B
;
4316 case OPC_BNE
: /* rx != rx */
4317 case OPC_BGTZ
: /* 0 > 0 */
4318 case OPC_BLTZ
: /* 0 < 0 */
4321 case OPC_BLTZAL
: /* 0 < 0 */
4322 /* Handle as an unconditional branch to get correct delay
4325 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4326 ctx
->hflags
|= MIPS_HFLAG_B
;
4328 case OPC_BLTZALL
: /* 0 < 0 likely */
4329 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4330 /* Skip the instruction in the delay slot */
4333 case OPC_BNEL
: /* rx != rx likely */
4334 case OPC_BGTZL
: /* 0 > 0 likely */
4335 case OPC_BLTZL
: /* 0 < 0 likely */
4336 /* Skip the instruction in the delay slot */
4340 ctx
->hflags
|= MIPS_HFLAG_B
;
4343 ctx
->hflags
|= MIPS_HFLAG_BX
;
4347 ctx
->hflags
|= MIPS_HFLAG_B
;
4350 ctx
->hflags
|= MIPS_HFLAG_BR
;
4354 ctx
->hflags
|= MIPS_HFLAG_BR
;
4357 MIPS_INVAL("branch/jump");
4358 generate_exception(ctx
, EXCP_RI
);
4364 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4367 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4370 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4373 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4376 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4379 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4382 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4386 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4390 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4393 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4396 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4410 #if defined(TARGET_MIPS64)
4412 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4416 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4419 ctx
->hflags
|= MIPS_HFLAG_BC
;
4422 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4425 ctx
->hflags
|= MIPS_HFLAG_BL
;
4428 MIPS_INVAL("conditional branch/jump");
4429 generate_exception(ctx
, EXCP_RI
);
4434 ctx
->btarget
= btgt
;
4436 switch (delayslot_size
) {
4438 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4441 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4446 int post_delay
= insn_bytes
+ delayslot_size
;
4447 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4449 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4453 if (insn_bytes
== 2)
4454 ctx
->hflags
|= MIPS_HFLAG_B16
;
4459 /* special3 bitfield operations */
4460 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4461 int rs
, int lsb
, int msb
)
4463 TCGv t0
= tcg_temp_new();
4464 TCGv t1
= tcg_temp_new();
4466 gen_load_gpr(t1
, rs
);
4469 if (lsb
+ msb
> 31) {
4472 tcg_gen_shri_tl(t0
, t1
, lsb
);
4474 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4476 tcg_gen_ext32s_tl(t0
, t0
);
4479 #if defined(TARGET_MIPS64)
4488 if (lsb
+ msb
> 63) {
4491 tcg_gen_shri_tl(t0
, t1
, lsb
);
4493 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4501 gen_load_gpr(t0
, rt
);
4502 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4503 tcg_gen_ext32s_tl(t0
, t0
);
4505 #if defined(TARGET_MIPS64)
4516 gen_load_gpr(t0
, rt
);
4517 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4522 MIPS_INVAL("bitops");
4523 generate_exception(ctx
, EXCP_RI
);
4528 gen_store_gpr(t0
, rt
);
4533 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4538 /* If no destination, treat it as a NOP. */
4542 t0
= tcg_temp_new();
4543 gen_load_gpr(t0
, rt
);
4547 TCGv t1
= tcg_temp_new();
4549 tcg_gen_shri_tl(t1
, t0
, 8);
4550 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4551 tcg_gen_shli_tl(t0
, t0
, 8);
4552 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4553 tcg_gen_or_tl(t0
, t0
, t1
);
4555 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4559 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4562 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4564 #if defined(TARGET_MIPS64)
4567 TCGv t1
= tcg_temp_new();
4569 tcg_gen_shri_tl(t1
, t0
, 8);
4570 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4571 tcg_gen_shli_tl(t0
, t0
, 8);
4572 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4573 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4579 TCGv t1
= tcg_temp_new();
4581 tcg_gen_shri_tl(t1
, t0
, 16);
4582 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4583 tcg_gen_shli_tl(t0
, t0
, 16);
4584 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4585 tcg_gen_or_tl(t0
, t0
, t1
);
4586 tcg_gen_shri_tl(t1
, t0
, 32);
4587 tcg_gen_shli_tl(t0
, t0
, 32);
4588 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4594 MIPS_INVAL("bsfhl");
4595 generate_exception(ctx
, EXCP_RI
);
4602 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4611 t0
= tcg_temp_new();
4612 t1
= tcg_temp_new();
4613 gen_load_gpr(t0
, rs
);
4614 gen_load_gpr(t1
, rt
);
4615 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4616 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4617 if (opc
== OPC_LSA
) {
4618 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4627 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4635 t0
= tcg_temp_new();
4636 gen_load_gpr(t0
, rt
);
4638 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4640 TCGv t1
= tcg_temp_new();
4641 gen_load_gpr(t1
, rs
);
4645 TCGv_i64 t2
= tcg_temp_new_i64();
4646 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4647 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4648 gen_move_low32(cpu_gpr
[rd
], t2
);
4649 tcg_temp_free_i64(t2
);
4652 #if defined(TARGET_MIPS64)
4654 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4655 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4656 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4666 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4673 t0
= tcg_temp_new();
4674 gen_load_gpr(t0
, rt
);
4677 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4679 #if defined(TARGET_MIPS64)
4681 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4688 #ifndef CONFIG_USER_ONLY
4689 /* CP0 (MMU and control) */
4690 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4692 TCGv_i64 t0
= tcg_temp_new_i64();
4693 TCGv_i64 t1
= tcg_temp_new_i64();
4695 tcg_gen_ext_tl_i64(t0
, arg
);
4696 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4697 #if defined(TARGET_MIPS64)
4698 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4700 tcg_gen_concat32_i64(t1
, t1
, t0
);
4702 tcg_gen_st_i64(t1
, cpu_env
, off
);
4703 tcg_temp_free_i64(t1
);
4704 tcg_temp_free_i64(t0
);
4707 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4709 TCGv_i64 t0
= tcg_temp_new_i64();
4710 TCGv_i64 t1
= tcg_temp_new_i64();
4712 tcg_gen_ext_tl_i64(t0
, arg
);
4713 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4714 tcg_gen_concat32_i64(t1
, t1
, t0
);
4715 tcg_gen_st_i64(t1
, cpu_env
, off
);
4716 tcg_temp_free_i64(t1
);
4717 tcg_temp_free_i64(t0
);
4720 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4722 TCGv_i64 t0
= tcg_temp_new_i64();
4724 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4725 #if defined(TARGET_MIPS64)
4726 tcg_gen_shri_i64(t0
, t0
, 30);
4728 tcg_gen_shri_i64(t0
, t0
, 32);
4730 gen_move_low32(arg
, t0
);
4731 tcg_temp_free_i64(t0
);
4734 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4736 TCGv_i64 t0
= tcg_temp_new_i64();
4738 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4739 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4740 gen_move_low32(arg
, t0
);
4741 tcg_temp_free_i64(t0
);
4744 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4746 TCGv_i32 t0
= tcg_temp_new_i32();
4748 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4749 tcg_gen_ext_i32_tl(arg
, t0
);
4750 tcg_temp_free_i32(t0
);
4753 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4755 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4756 tcg_gen_ext32s_tl(arg
, arg
);
4759 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4761 TCGv_i32 t0
= tcg_temp_new_i32();
4763 tcg_gen_trunc_tl_i32(t0
, arg
);
4764 tcg_gen_st_i32(t0
, cpu_env
, off
);
4765 tcg_temp_free_i32(t0
);
4768 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
4770 tcg_gen_ext32s_tl(arg
, arg
);
4771 tcg_gen_st_tl(arg
, cpu_env
, off
);
4774 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4776 const char *rn
= "invalid";
4778 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4779 goto mfhc0_read_zero
;
4786 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4790 goto mfhc0_read_zero
;
4796 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4800 goto mfhc0_read_zero
;
4806 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4807 ctx
->CP0_LLAddr_shift
);
4811 goto mfhc0_read_zero
;
4820 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4824 goto mfhc0_read_zero
;
4828 goto mfhc0_read_zero
;
4831 (void)rn
; /* avoid a compiler warning */
4832 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4836 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4837 tcg_gen_movi_tl(arg
, 0);
4840 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4842 const char *rn
= "invalid";
4843 uint64_t mask
= ctx
->PAMask
>> 36;
4845 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4853 tcg_gen_andi_tl(arg
, arg
, mask
);
4854 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4864 tcg_gen_andi_tl(arg
, arg
, mask
);
4865 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4875 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4876 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4877 relevant for modern MIPS cores supporting MTHC0, therefore
4878 treating MTHC0 to LLAddr as NOP. */
4891 tcg_gen_andi_tl(arg
, arg
, mask
);
4892 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4903 (void)rn
; /* avoid a compiler warning */
4905 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4908 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4910 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4911 tcg_gen_movi_tl(arg
, 0);
4913 tcg_gen_movi_tl(arg
, ~0);
4917 #define CP0_CHECK(c) \
4920 goto cp0_unimplemented; \
4924 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4926 const char *rn
= "invalid";
4929 check_insn(ctx
, ISA_MIPS32
);
4935 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4939 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4940 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4944 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4945 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4949 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4950 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4954 goto cp0_unimplemented
;
4960 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4961 gen_helper_mfc0_random(arg
, cpu_env
);
4965 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4970 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4971 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4975 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4976 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4980 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4981 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4985 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4986 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4990 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4991 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4992 rn
= "VPEScheFBack";
4995 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4996 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5000 goto cp0_unimplemented
;
5007 TCGv_i64 tmp
= tcg_temp_new_i64();
5008 tcg_gen_ld_i64(tmp
, cpu_env
,
5009 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5010 #if defined(TARGET_MIPS64)
5012 /* Move RI/XI fields to bits 31:30 */
5013 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5014 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5017 gen_move_low32(arg
, tmp
);
5018 tcg_temp_free_i64(tmp
);
5023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5024 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5028 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5029 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5033 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5034 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5038 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5039 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5043 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5044 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5049 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5053 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5054 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5058 goto cp0_unimplemented
;
5065 TCGv_i64 tmp
= tcg_temp_new_i64();
5066 tcg_gen_ld_i64(tmp
, cpu_env
,
5067 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5068 #if defined(TARGET_MIPS64)
5070 /* Move RI/XI fields to bits 31:30 */
5071 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5072 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5075 gen_move_low32(arg
, tmp
);
5076 tcg_temp_free_i64(tmp
);
5081 goto cp0_unimplemented
;
5087 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5088 tcg_gen_ext32s_tl(arg
, arg
);
5092 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5093 rn
= "ContextConfig";
5094 goto cp0_unimplemented
;
5097 CP0_CHECK(ctx
->ulri
);
5098 tcg_gen_ld32s_tl(arg
, cpu_env
,
5099 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5103 goto cp0_unimplemented
;
5109 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5113 check_insn(ctx
, ISA_MIPS32R2
);
5114 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5118 goto cp0_unimplemented
;
5124 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5128 check_insn(ctx
, ISA_MIPS32R2
);
5129 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5133 check_insn(ctx
, ISA_MIPS32R2
);
5134 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5138 check_insn(ctx
, ISA_MIPS32R2
);
5139 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5143 check_insn(ctx
, ISA_MIPS32R2
);
5144 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5148 check_insn(ctx
, ISA_MIPS32R2
);
5149 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5153 goto cp0_unimplemented
;
5159 check_insn(ctx
, ISA_MIPS32R2
);
5160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5164 goto cp0_unimplemented
;
5170 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5171 tcg_gen_ext32s_tl(arg
, arg
);
5176 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5185 goto cp0_unimplemented
;
5191 /* Mark as an IO operation because we read the time. */
5192 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5195 gen_helper_mfc0_count(arg
, cpu_env
);
5196 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5199 /* Break the TB to be able to take timer interrupts immediately
5200 after reading count. */
5201 ctx
->bstate
= BS_STOP
;
5204 /* 6,7 are implementation dependent */
5206 goto cp0_unimplemented
;
5212 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5213 tcg_gen_ext32s_tl(arg
, arg
);
5217 goto cp0_unimplemented
;
5223 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5226 /* 6,7 are implementation dependent */
5228 goto cp0_unimplemented
;
5234 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5238 check_insn(ctx
, ISA_MIPS32R2
);
5239 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5243 check_insn(ctx
, ISA_MIPS32R2
);
5244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5248 check_insn(ctx
, ISA_MIPS32R2
);
5249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5253 goto cp0_unimplemented
;
5259 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5263 goto cp0_unimplemented
;
5269 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5270 tcg_gen_ext32s_tl(arg
, arg
);
5274 goto cp0_unimplemented
;
5280 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5284 check_insn(ctx
, ISA_MIPS32R2
);
5285 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5289 goto cp0_unimplemented
;
5295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5307 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5311 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5315 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5318 /* 6,7 are implementation dependent */
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5328 goto cp0_unimplemented
;
5334 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5338 goto cp0_unimplemented
;
5344 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5348 goto cp0_unimplemented
;
5354 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5358 goto cp0_unimplemented
;
5364 #if defined(TARGET_MIPS64)
5365 check_insn(ctx
, ISA_MIPS3
);
5366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5367 tcg_gen_ext32s_tl(arg
, arg
);
5372 goto cp0_unimplemented
;
5376 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5377 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5384 goto cp0_unimplemented
;
5388 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5389 rn
= "'Diagnostic"; /* implementation dependent */
5394 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5398 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5399 rn
= "TraceControl";
5402 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5403 rn
= "TraceControl2";
5406 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5407 rn
= "UserTraceData";
5410 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5414 goto cp0_unimplemented
;
5421 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5422 tcg_gen_ext32s_tl(arg
, arg
);
5426 goto cp0_unimplemented
;
5432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5433 rn
= "Performance0";
5436 // gen_helper_mfc0_performance1(arg);
5437 rn
= "Performance1";
5440 // gen_helper_mfc0_performance2(arg);
5441 rn
= "Performance2";
5444 // gen_helper_mfc0_performance3(arg);
5445 rn
= "Performance3";
5448 // gen_helper_mfc0_performance4(arg);
5449 rn
= "Performance4";
5452 // gen_helper_mfc0_performance5(arg);
5453 rn
= "Performance5";
5456 // gen_helper_mfc0_performance6(arg);
5457 rn
= "Performance6";
5460 // gen_helper_mfc0_performance7(arg);
5461 rn
= "Performance7";
5464 goto cp0_unimplemented
;
5468 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5474 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5478 goto cp0_unimplemented
;
5488 TCGv_i64 tmp
= tcg_temp_new_i64();
5489 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5490 gen_move_low32(arg
, tmp
);
5491 tcg_temp_free_i64(tmp
);
5499 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5503 goto cp0_unimplemented
;
5512 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5523 goto cp0_unimplemented
;
5529 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5530 tcg_gen_ext32s_tl(arg
, arg
);
5534 goto cp0_unimplemented
;
5541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5545 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5546 tcg_gen_ld_tl(arg
, cpu_env
,
5547 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5548 tcg_gen_ext32s_tl(arg
, arg
);
5552 goto cp0_unimplemented
;
5556 goto cp0_unimplemented
;
5558 (void)rn
; /* avoid a compiler warning */
5559 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5563 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5564 gen_mfc0_unimplemented(ctx
, arg
);
5567 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5569 const char *rn
= "invalid";
5572 check_insn(ctx
, ISA_MIPS32
);
5574 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5582 gen_helper_mtc0_index(cpu_env
, arg
);
5586 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5587 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5596 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5601 goto cp0_unimplemented
;
5611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5612 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5617 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5621 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5622 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5627 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5632 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5636 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5637 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5638 rn
= "VPEScheFBack";
5641 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5642 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5646 goto cp0_unimplemented
;
5652 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5656 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5657 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5661 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5662 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5666 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5667 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5671 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5672 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5682 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5687 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5691 goto cp0_unimplemented
;
5697 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5701 goto cp0_unimplemented
;
5707 gen_helper_mtc0_context(cpu_env
, arg
);
5711 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5712 rn
= "ContextConfig";
5713 goto cp0_unimplemented
;
5716 CP0_CHECK(ctx
->ulri
);
5717 tcg_gen_st_tl(arg
, cpu_env
,
5718 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5722 goto cp0_unimplemented
;
5728 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5732 check_insn(ctx
, ISA_MIPS32R2
);
5733 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5735 ctx
->bstate
= BS_STOP
;
5738 goto cp0_unimplemented
;
5744 gen_helper_mtc0_wired(cpu_env
, arg
);
5748 check_insn(ctx
, ISA_MIPS32R2
);
5749 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5753 check_insn(ctx
, ISA_MIPS32R2
);
5754 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5758 check_insn(ctx
, ISA_MIPS32R2
);
5759 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5763 check_insn(ctx
, ISA_MIPS32R2
);
5764 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5768 check_insn(ctx
, ISA_MIPS32R2
);
5769 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5773 goto cp0_unimplemented
;
5779 check_insn(ctx
, ISA_MIPS32R2
);
5780 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5781 ctx
->bstate
= BS_STOP
;
5785 goto cp0_unimplemented
;
5803 goto cp0_unimplemented
;
5809 gen_helper_mtc0_count(cpu_env
, arg
);
5812 /* 6,7 are implementation dependent */
5814 goto cp0_unimplemented
;
5820 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5824 goto cp0_unimplemented
;
5830 gen_helper_mtc0_compare(cpu_env
, arg
);
5833 /* 6,7 are implementation dependent */
5835 goto cp0_unimplemented
;
5841 save_cpu_state(ctx
, 1);
5842 gen_helper_mtc0_status(cpu_env
, arg
);
5843 /* BS_STOP isn't good enough here, hflags may have changed. */
5844 gen_save_pc(ctx
->pc
+ 4);
5845 ctx
->bstate
= BS_EXCP
;
5849 check_insn(ctx
, ISA_MIPS32R2
);
5850 gen_helper_mtc0_intctl(cpu_env
, arg
);
5851 /* Stop translation as we may have switched the execution mode */
5852 ctx
->bstate
= BS_STOP
;
5856 check_insn(ctx
, ISA_MIPS32R2
);
5857 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5858 /* Stop translation as we may have switched the execution mode */
5859 ctx
->bstate
= BS_STOP
;
5863 check_insn(ctx
, ISA_MIPS32R2
);
5864 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5865 /* Stop translation as we may have switched the execution mode */
5866 ctx
->bstate
= BS_STOP
;
5870 goto cp0_unimplemented
;
5876 save_cpu_state(ctx
, 1);
5877 gen_helper_mtc0_cause(cpu_env
, arg
);
5881 goto cp0_unimplemented
;
5887 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
5891 goto cp0_unimplemented
;
5901 check_insn(ctx
, ISA_MIPS32R2
);
5902 gen_helper_mtc0_ebase(cpu_env
, arg
);
5906 goto cp0_unimplemented
;
5912 gen_helper_mtc0_config0(cpu_env
, arg
);
5914 /* Stop translation as we may have switched the execution mode */
5915 ctx
->bstate
= BS_STOP
;
5918 /* ignored, read only */
5922 gen_helper_mtc0_config2(cpu_env
, arg
);
5924 /* Stop translation as we may have switched the execution mode */
5925 ctx
->bstate
= BS_STOP
;
5928 gen_helper_mtc0_config3(cpu_env
, arg
);
5930 /* Stop translation as we may have switched the execution mode */
5931 ctx
->bstate
= BS_STOP
;
5934 gen_helper_mtc0_config4(cpu_env
, arg
);
5936 ctx
->bstate
= BS_STOP
;
5939 gen_helper_mtc0_config5(cpu_env
, arg
);
5941 /* Stop translation as we may have switched the execution mode */
5942 ctx
->bstate
= BS_STOP
;
5944 /* 6,7 are implementation dependent */
5954 rn
= "Invalid config selector";
5955 goto cp0_unimplemented
;
5961 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5965 goto cp0_unimplemented
;
5971 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5975 goto cp0_unimplemented
;
5981 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5985 goto cp0_unimplemented
;
5991 #if defined(TARGET_MIPS64)
5992 check_insn(ctx
, ISA_MIPS3
);
5993 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5998 goto cp0_unimplemented
;
6002 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6003 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6006 gen_helper_mtc0_framemask(cpu_env
, arg
);
6010 goto cp0_unimplemented
;
6015 rn
= "Diagnostic"; /* implementation dependent */
6020 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6021 /* BS_STOP isn't good enough here, hflags may have changed. */
6022 gen_save_pc(ctx
->pc
+ 4);
6023 ctx
->bstate
= BS_EXCP
;
6027 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6028 rn
= "TraceControl";
6029 /* Stop translation as we may have switched the execution mode */
6030 ctx
->bstate
= BS_STOP
;
6033 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6034 rn
= "TraceControl2";
6035 /* Stop translation as we may have switched the execution mode */
6036 ctx
->bstate
= BS_STOP
;
6039 /* Stop translation as we may have switched the execution mode */
6040 ctx
->bstate
= BS_STOP
;
6041 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6042 rn
= "UserTraceData";
6043 /* Stop translation as we may have switched the execution mode */
6044 ctx
->bstate
= BS_STOP
;
6047 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6048 /* Stop translation as we may have switched the execution mode */
6049 ctx
->bstate
= BS_STOP
;
6053 goto cp0_unimplemented
;
6060 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
6064 goto cp0_unimplemented
;
6070 gen_helper_mtc0_performance0(cpu_env
, arg
);
6071 rn
= "Performance0";
6074 // gen_helper_mtc0_performance1(arg);
6075 rn
= "Performance1";
6078 // gen_helper_mtc0_performance2(arg);
6079 rn
= "Performance2";
6082 // gen_helper_mtc0_performance3(arg);
6083 rn
= "Performance3";
6086 // gen_helper_mtc0_performance4(arg);
6087 rn
= "Performance4";
6090 // gen_helper_mtc0_performance5(arg);
6091 rn
= "Performance5";
6094 // gen_helper_mtc0_performance6(arg);
6095 rn
= "Performance6";
6098 // gen_helper_mtc0_performance7(arg);
6099 rn
= "Performance7";
6102 goto cp0_unimplemented
;
6116 goto cp0_unimplemented
;
6125 gen_helper_mtc0_taglo(cpu_env
, arg
);
6132 gen_helper_mtc0_datalo(cpu_env
, arg
);
6136 goto cp0_unimplemented
;
6145 gen_helper_mtc0_taghi(cpu_env
, arg
);
6152 gen_helper_mtc0_datahi(cpu_env
, arg
);
6157 goto cp0_unimplemented
;
6163 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6167 goto cp0_unimplemented
;
6174 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6178 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6179 tcg_gen_st_tl(arg
, cpu_env
,
6180 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6184 goto cp0_unimplemented
;
6186 /* Stop translation as we may have switched the execution mode */
6187 ctx
->bstate
= BS_STOP
;
6190 goto cp0_unimplemented
;
6192 (void)rn
; /* avoid a compiler warning */
6193 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6194 /* For simplicity assume that all writes can cause interrupts. */
6195 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6197 ctx
->bstate
= BS_STOP
;
6202 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6205 #if defined(TARGET_MIPS64)
6206 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6208 const char *rn
= "invalid";
6211 check_insn(ctx
, ISA_MIPS64
);
6217 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6221 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6222 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6226 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6227 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6231 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6232 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6236 goto cp0_unimplemented
;
6242 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6243 gen_helper_mfc0_random(arg
, cpu_env
);
6247 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6252 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6257 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6262 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6263 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6268 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6272 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6273 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6274 rn
= "VPEScheFBack";
6277 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6282 goto cp0_unimplemented
;
6288 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6293 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6298 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6303 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6313 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6317 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6318 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6322 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6323 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6327 goto cp0_unimplemented
;
6333 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6337 goto cp0_unimplemented
;
6343 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6347 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6348 rn
= "ContextConfig";
6349 goto cp0_unimplemented
;
6352 CP0_CHECK(ctx
->ulri
);
6353 tcg_gen_ld_tl(arg
, cpu_env
,
6354 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6358 goto cp0_unimplemented
;
6364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6368 check_insn(ctx
, ISA_MIPS32R2
);
6369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6373 goto cp0_unimplemented
;
6379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6383 check_insn(ctx
, ISA_MIPS32R2
);
6384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6388 check_insn(ctx
, ISA_MIPS32R2
);
6389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6393 check_insn(ctx
, ISA_MIPS32R2
);
6394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6398 check_insn(ctx
, ISA_MIPS32R2
);
6399 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6403 check_insn(ctx
, ISA_MIPS32R2
);
6404 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6408 goto cp0_unimplemented
;
6414 check_insn(ctx
, ISA_MIPS32R2
);
6415 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6419 goto cp0_unimplemented
;
6425 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6430 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6435 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6439 goto cp0_unimplemented
;
6445 /* Mark as an IO operation because we read the time. */
6446 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6449 gen_helper_mfc0_count(arg
, cpu_env
);
6450 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6453 /* Break the TB to be able to take timer interrupts immediately
6454 after reading count. */
6455 ctx
->bstate
= BS_STOP
;
6458 /* 6,7 are implementation dependent */
6460 goto cp0_unimplemented
;
6466 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6470 goto cp0_unimplemented
;
6476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6479 /* 6,7 are implementation dependent */
6481 goto cp0_unimplemented
;
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6491 check_insn(ctx
, ISA_MIPS32R2
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6496 check_insn(ctx
, ISA_MIPS32R2
);
6497 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6501 check_insn(ctx
, ISA_MIPS32R2
);
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6506 goto cp0_unimplemented
;
6512 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6516 goto cp0_unimplemented
;
6522 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6526 goto cp0_unimplemented
;
6532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6536 check_insn(ctx
, ISA_MIPS32R2
);
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6541 goto cp0_unimplemented
;
6547 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6551 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6559 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6563 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6570 /* 6,7 are implementation dependent */
6572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6576 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6580 goto cp0_unimplemented
;
6586 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6590 goto cp0_unimplemented
;
6596 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6600 goto cp0_unimplemented
;
6606 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6610 goto cp0_unimplemented
;
6616 check_insn(ctx
, ISA_MIPS3
);
6617 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6621 goto cp0_unimplemented
;
6625 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6626 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6633 goto cp0_unimplemented
;
6637 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6638 rn
= "'Diagnostic"; /* implementation dependent */
6643 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6647 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6648 rn
= "TraceControl";
6651 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6652 rn
= "TraceControl2";
6655 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6656 rn
= "UserTraceData";
6659 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6663 goto cp0_unimplemented
;
6670 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6674 goto cp0_unimplemented
;
6680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6681 rn
= "Performance0";
6684 // gen_helper_dmfc0_performance1(arg);
6685 rn
= "Performance1";
6688 // gen_helper_dmfc0_performance2(arg);
6689 rn
= "Performance2";
6692 // gen_helper_dmfc0_performance3(arg);
6693 rn
= "Performance3";
6696 // gen_helper_dmfc0_performance4(arg);
6697 rn
= "Performance4";
6700 // gen_helper_dmfc0_performance5(arg);
6701 rn
= "Performance5";
6704 // gen_helper_dmfc0_performance6(arg);
6705 rn
= "Performance6";
6708 // gen_helper_dmfc0_performance7(arg);
6709 rn
= "Performance7";
6712 goto cp0_unimplemented
;
6716 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6723 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6727 goto cp0_unimplemented
;
6736 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6743 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6747 goto cp0_unimplemented
;
6756 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6763 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6767 goto cp0_unimplemented
;
6773 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6777 goto cp0_unimplemented
;
6784 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6788 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6789 tcg_gen_ld_tl(arg
, cpu_env
,
6790 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6794 goto cp0_unimplemented
;
6798 goto cp0_unimplemented
;
6800 (void)rn
; /* avoid a compiler warning */
6801 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6805 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6806 gen_mfc0_unimplemented(ctx
, arg
);
6809 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6811 const char *rn
= "invalid";
6814 check_insn(ctx
, ISA_MIPS64
);
6816 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6824 gen_helper_mtc0_index(cpu_env
, arg
);
6828 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6829 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6833 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6838 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6843 goto cp0_unimplemented
;
6853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6854 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6858 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6874 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6878 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6879 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6880 rn
= "VPEScheFBack";
6883 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6884 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6888 goto cp0_unimplemented
;
6894 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6898 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6899 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6904 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6909 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6914 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6918 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6919 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6923 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6924 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6928 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6929 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6933 goto cp0_unimplemented
;
6939 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6943 goto cp0_unimplemented
;
6949 gen_helper_mtc0_context(cpu_env
, arg
);
6953 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6954 rn
= "ContextConfig";
6955 goto cp0_unimplemented
;
6958 CP0_CHECK(ctx
->ulri
);
6959 tcg_gen_st_tl(arg
, cpu_env
,
6960 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6964 goto cp0_unimplemented
;
6970 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6974 check_insn(ctx
, ISA_MIPS32R2
);
6975 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6979 goto cp0_unimplemented
;
6985 gen_helper_mtc0_wired(cpu_env
, arg
);
6989 check_insn(ctx
, ISA_MIPS32R2
);
6990 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6994 check_insn(ctx
, ISA_MIPS32R2
);
6995 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6999 check_insn(ctx
, ISA_MIPS32R2
);
7000 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7004 check_insn(ctx
, ISA_MIPS32R2
);
7005 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7009 check_insn(ctx
, ISA_MIPS32R2
);
7010 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7014 goto cp0_unimplemented
;
7020 check_insn(ctx
, ISA_MIPS32R2
);
7021 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7022 ctx
->bstate
= BS_STOP
;
7026 goto cp0_unimplemented
;
7044 goto cp0_unimplemented
;
7050 gen_helper_mtc0_count(cpu_env
, arg
);
7053 /* 6,7 are implementation dependent */
7055 goto cp0_unimplemented
;
7057 /* Stop translation as we may have switched the execution mode */
7058 ctx
->bstate
= BS_STOP
;
7063 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7067 goto cp0_unimplemented
;
7073 gen_helper_mtc0_compare(cpu_env
, arg
);
7076 /* 6,7 are implementation dependent */
7078 goto cp0_unimplemented
;
7080 /* Stop translation as we may have switched the execution mode */
7081 ctx
->bstate
= BS_STOP
;
7086 save_cpu_state(ctx
, 1);
7087 gen_helper_mtc0_status(cpu_env
, arg
);
7088 /* BS_STOP isn't good enough here, hflags may have changed. */
7089 gen_save_pc(ctx
->pc
+ 4);
7090 ctx
->bstate
= BS_EXCP
;
7094 check_insn(ctx
, ISA_MIPS32R2
);
7095 gen_helper_mtc0_intctl(cpu_env
, arg
);
7096 /* Stop translation as we may have switched the execution mode */
7097 ctx
->bstate
= BS_STOP
;
7101 check_insn(ctx
, ISA_MIPS32R2
);
7102 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7103 /* Stop translation as we may have switched the execution mode */
7104 ctx
->bstate
= BS_STOP
;
7108 check_insn(ctx
, ISA_MIPS32R2
);
7109 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7110 /* Stop translation as we may have switched the execution mode */
7111 ctx
->bstate
= BS_STOP
;
7115 goto cp0_unimplemented
;
7121 save_cpu_state(ctx
, 1);
7122 /* Mark as an IO operation because we may trigger a software
7124 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7127 gen_helper_mtc0_cause(cpu_env
, arg
);
7128 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7131 /* Stop translation as we may have triggered an intetrupt */
7132 ctx
->bstate
= BS_STOP
;
7136 goto cp0_unimplemented
;
7142 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7146 goto cp0_unimplemented
;
7156 check_insn(ctx
, ISA_MIPS32R2
);
7157 gen_helper_mtc0_ebase(cpu_env
, arg
);
7161 goto cp0_unimplemented
;
7167 gen_helper_mtc0_config0(cpu_env
, arg
);
7169 /* Stop translation as we may have switched the execution mode */
7170 ctx
->bstate
= BS_STOP
;
7173 /* ignored, read only */
7177 gen_helper_mtc0_config2(cpu_env
, arg
);
7179 /* Stop translation as we may have switched the execution mode */
7180 ctx
->bstate
= BS_STOP
;
7183 gen_helper_mtc0_config3(cpu_env
, arg
);
7185 /* Stop translation as we may have switched the execution mode */
7186 ctx
->bstate
= BS_STOP
;
7189 /* currently ignored */
7193 gen_helper_mtc0_config5(cpu_env
, arg
);
7195 /* Stop translation as we may have switched the execution mode */
7196 ctx
->bstate
= BS_STOP
;
7198 /* 6,7 are implementation dependent */
7200 rn
= "Invalid config selector";
7201 goto cp0_unimplemented
;
7207 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7211 goto cp0_unimplemented
;
7217 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7221 goto cp0_unimplemented
;
7227 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7231 goto cp0_unimplemented
;
7237 check_insn(ctx
, ISA_MIPS3
);
7238 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7242 goto cp0_unimplemented
;
7246 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7247 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7250 gen_helper_mtc0_framemask(cpu_env
, arg
);
7254 goto cp0_unimplemented
;
7259 rn
= "Diagnostic"; /* implementation dependent */
7264 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7265 /* BS_STOP isn't good enough here, hflags may have changed. */
7266 gen_save_pc(ctx
->pc
+ 4);
7267 ctx
->bstate
= BS_EXCP
;
7271 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7272 /* Stop translation as we may have switched the execution mode */
7273 ctx
->bstate
= BS_STOP
;
7274 rn
= "TraceControl";
7277 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7278 /* Stop translation as we may have switched the execution mode */
7279 ctx
->bstate
= BS_STOP
;
7280 rn
= "TraceControl2";
7283 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7284 /* Stop translation as we may have switched the execution mode */
7285 ctx
->bstate
= BS_STOP
;
7286 rn
= "UserTraceData";
7289 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7290 /* Stop translation as we may have switched the execution mode */
7291 ctx
->bstate
= BS_STOP
;
7295 goto cp0_unimplemented
;
7302 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7306 goto cp0_unimplemented
;
7312 gen_helper_mtc0_performance0(cpu_env
, arg
);
7313 rn
= "Performance0";
7316 // gen_helper_mtc0_performance1(cpu_env, arg);
7317 rn
= "Performance1";
7320 // gen_helper_mtc0_performance2(cpu_env, arg);
7321 rn
= "Performance2";
7324 // gen_helper_mtc0_performance3(cpu_env, arg);
7325 rn
= "Performance3";
7328 // gen_helper_mtc0_performance4(cpu_env, arg);
7329 rn
= "Performance4";
7332 // gen_helper_mtc0_performance5(cpu_env, arg);
7333 rn
= "Performance5";
7336 // gen_helper_mtc0_performance6(cpu_env, arg);
7337 rn
= "Performance6";
7340 // gen_helper_mtc0_performance7(cpu_env, arg);
7341 rn
= "Performance7";
7344 goto cp0_unimplemented
;
7358 goto cp0_unimplemented
;
7367 gen_helper_mtc0_taglo(cpu_env
, arg
);
7374 gen_helper_mtc0_datalo(cpu_env
, arg
);
7378 goto cp0_unimplemented
;
7387 gen_helper_mtc0_taghi(cpu_env
, arg
);
7394 gen_helper_mtc0_datahi(cpu_env
, arg
);
7399 goto cp0_unimplemented
;
7405 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7409 goto cp0_unimplemented
;
7416 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7420 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7421 tcg_gen_st_tl(arg
, cpu_env
,
7422 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7426 goto cp0_unimplemented
;
7428 /* Stop translation as we may have switched the execution mode */
7429 ctx
->bstate
= BS_STOP
;
7432 goto cp0_unimplemented
;
7434 (void)rn
; /* avoid a compiler warning */
7435 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7436 /* For simplicity assume that all writes can cause interrupts. */
7437 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7439 ctx
->bstate
= BS_STOP
;
7444 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7446 #endif /* TARGET_MIPS64 */
7448 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7449 int u
, int sel
, int h
)
7451 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7452 TCGv t0
= tcg_temp_local_new();
7454 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7455 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7456 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7457 tcg_gen_movi_tl(t0
, -1);
7458 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7459 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7460 tcg_gen_movi_tl(t0
, -1);
7466 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7469 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7479 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7482 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7485 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7488 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7491 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7494 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7497 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7500 gen_mfc0(ctx
, t0
, rt
, sel
);
7507 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7510 gen_mfc0(ctx
, t0
, rt
, sel
);
7516 gen_helper_mftc0_status(t0
, cpu_env
);
7519 gen_mfc0(ctx
, t0
, rt
, sel
);
7525 gen_helper_mftc0_cause(t0
, cpu_env
);
7535 gen_helper_mftc0_epc(t0
, cpu_env
);
7545 gen_helper_mftc0_ebase(t0
, cpu_env
);
7555 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7565 gen_helper_mftc0_debug(t0
, cpu_env
);
7568 gen_mfc0(ctx
, t0
, rt
, sel
);
7573 gen_mfc0(ctx
, t0
, rt
, sel
);
7575 } else switch (sel
) {
7576 /* GPR registers. */
7578 gen_helper_1e0i(mftgpr
, t0
, rt
);
7580 /* Auxiliary CPU registers */
7584 gen_helper_1e0i(mftlo
, t0
, 0);
7587 gen_helper_1e0i(mfthi
, t0
, 0);
7590 gen_helper_1e0i(mftacx
, t0
, 0);
7593 gen_helper_1e0i(mftlo
, t0
, 1);
7596 gen_helper_1e0i(mfthi
, t0
, 1);
7599 gen_helper_1e0i(mftacx
, t0
, 1);
7602 gen_helper_1e0i(mftlo
, t0
, 2);
7605 gen_helper_1e0i(mfthi
, t0
, 2);
7608 gen_helper_1e0i(mftacx
, t0
, 2);
7611 gen_helper_1e0i(mftlo
, t0
, 3);
7614 gen_helper_1e0i(mfthi
, t0
, 3);
7617 gen_helper_1e0i(mftacx
, t0
, 3);
7620 gen_helper_mftdsp(t0
, cpu_env
);
7626 /* Floating point (COP1). */
7628 /* XXX: For now we support only a single FPU context. */
7630 TCGv_i32 fp0
= tcg_temp_new_i32();
7632 gen_load_fpr32(ctx
, fp0
, rt
);
7633 tcg_gen_ext_i32_tl(t0
, fp0
);
7634 tcg_temp_free_i32(fp0
);
7636 TCGv_i32 fp0
= tcg_temp_new_i32();
7638 gen_load_fpr32h(ctx
, fp0
, rt
);
7639 tcg_gen_ext_i32_tl(t0
, fp0
);
7640 tcg_temp_free_i32(fp0
);
7644 /* XXX: For now we support only a single FPU context. */
7645 gen_helper_1e0i(cfc1
, t0
, rt
);
7647 /* COP2: Not implemented. */
7654 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7655 gen_store_gpr(t0
, rd
);
7661 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7662 generate_exception(ctx
, EXCP_RI
);
7665 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7666 int u
, int sel
, int h
)
7668 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7669 TCGv t0
= tcg_temp_local_new();
7671 gen_load_gpr(t0
, rt
);
7672 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7673 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7674 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7676 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7677 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7684 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7687 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7697 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7700 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7703 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7706 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7709 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7712 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7715 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7718 gen_mtc0(ctx
, t0
, rd
, sel
);
7725 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7728 gen_mtc0(ctx
, t0
, rd
, sel
);
7734 gen_helper_mttc0_status(cpu_env
, t0
);
7737 gen_mtc0(ctx
, t0
, rd
, sel
);
7743 gen_helper_mttc0_cause(cpu_env
, t0
);
7753 gen_helper_mttc0_ebase(cpu_env
, t0
);
7763 gen_helper_mttc0_debug(cpu_env
, t0
);
7766 gen_mtc0(ctx
, t0
, rd
, sel
);
7771 gen_mtc0(ctx
, t0
, rd
, sel
);
7773 } else switch (sel
) {
7774 /* GPR registers. */
7776 gen_helper_0e1i(mttgpr
, t0
, rd
);
7778 /* Auxiliary CPU registers */
7782 gen_helper_0e1i(mttlo
, t0
, 0);
7785 gen_helper_0e1i(mtthi
, t0
, 0);
7788 gen_helper_0e1i(mttacx
, t0
, 0);
7791 gen_helper_0e1i(mttlo
, t0
, 1);
7794 gen_helper_0e1i(mtthi
, t0
, 1);
7797 gen_helper_0e1i(mttacx
, t0
, 1);
7800 gen_helper_0e1i(mttlo
, t0
, 2);
7803 gen_helper_0e1i(mtthi
, t0
, 2);
7806 gen_helper_0e1i(mttacx
, t0
, 2);
7809 gen_helper_0e1i(mttlo
, t0
, 3);
7812 gen_helper_0e1i(mtthi
, t0
, 3);
7815 gen_helper_0e1i(mttacx
, t0
, 3);
7818 gen_helper_mttdsp(cpu_env
, t0
);
7824 /* Floating point (COP1). */
7826 /* XXX: For now we support only a single FPU context. */
7828 TCGv_i32 fp0
= tcg_temp_new_i32();
7830 tcg_gen_trunc_tl_i32(fp0
, t0
);
7831 gen_store_fpr32(ctx
, fp0
, rd
);
7832 tcg_temp_free_i32(fp0
);
7834 TCGv_i32 fp0
= tcg_temp_new_i32();
7836 tcg_gen_trunc_tl_i32(fp0
, t0
);
7837 gen_store_fpr32h(ctx
, fp0
, rd
);
7838 tcg_temp_free_i32(fp0
);
7842 /* XXX: For now we support only a single FPU context. */
7843 save_cpu_state(ctx
, 1);
7845 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7847 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7848 tcg_temp_free_i32(fs_tmp
);
7850 /* Stop translation as we may have changed hflags */
7851 ctx
->bstate
= BS_STOP
;
7853 /* COP2: Not implemented. */
7860 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7866 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7867 generate_exception(ctx
, EXCP_RI
);
7870 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7872 const char *opn
= "ldst";
7874 check_cp0_enabled(ctx
);
7881 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7886 TCGv t0
= tcg_temp_new();
7888 gen_load_gpr(t0
, rt
);
7889 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7894 #if defined(TARGET_MIPS64)
7896 check_insn(ctx
, ISA_MIPS3
);
7901 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7905 check_insn(ctx
, ISA_MIPS3
);
7907 TCGv t0
= tcg_temp_new();
7909 gen_load_gpr(t0
, rt
);
7910 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7922 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7928 TCGv t0
= tcg_temp_new();
7929 gen_load_gpr(t0
, rt
);
7930 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7936 check_insn(ctx
, ASE_MT
);
7941 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7942 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7946 check_insn(ctx
, ASE_MT
);
7947 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7948 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7953 if (!env
->tlb
->helper_tlbwi
)
7955 gen_helper_tlbwi(cpu_env
);
7960 if (!env
->tlb
->helper_tlbinv
) {
7963 gen_helper_tlbinv(cpu_env
);
7964 } /* treat as nop if TLBINV not supported */
7969 if (!env
->tlb
->helper_tlbinvf
) {
7972 gen_helper_tlbinvf(cpu_env
);
7973 } /* treat as nop if TLBINV not supported */
7977 if (!env
->tlb
->helper_tlbwr
)
7979 gen_helper_tlbwr(cpu_env
);
7983 if (!env
->tlb
->helper_tlbp
)
7985 gen_helper_tlbp(cpu_env
);
7989 if (!env
->tlb
->helper_tlbr
)
7991 gen_helper_tlbr(cpu_env
);
7993 case OPC_ERET
: /* OPC_ERETNC */
7994 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7995 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7998 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
7999 if (ctx
->opcode
& (1 << bit_shift
)) {
8002 check_insn(ctx
, ISA_MIPS32R5
);
8003 gen_helper_eretnc(cpu_env
);
8007 check_insn(ctx
, ISA_MIPS2
);
8008 gen_helper_eret(cpu_env
);
8010 ctx
->bstate
= BS_EXCP
;
8015 check_insn(ctx
, ISA_MIPS32
);
8016 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8017 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8020 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8022 generate_exception(ctx
, EXCP_RI
);
8024 gen_helper_deret(cpu_env
);
8025 ctx
->bstate
= BS_EXCP
;
8030 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8031 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8032 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8035 /* If we get an exception, we want to restart at next instruction */
8037 save_cpu_state(ctx
, 1);
8039 gen_helper_wait(cpu_env
);
8040 ctx
->bstate
= BS_EXCP
;
8045 generate_exception(ctx
, EXCP_RI
);
8048 (void)opn
; /* avoid a compiler warning */
8050 #endif /* !CONFIG_USER_ONLY */
8052 /* CP1 Branches (before delay slot) */
8053 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8054 int32_t cc
, int32_t offset
)
8056 target_ulong btarget
;
8057 TCGv_i32 t0
= tcg_temp_new_i32();
8059 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8060 generate_exception(ctx
, EXCP_RI
);
8065 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8067 btarget
= ctx
->pc
+ 4 + offset
;
8071 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8072 tcg_gen_not_i32(t0
, t0
);
8073 tcg_gen_andi_i32(t0
, t0
, 1);
8074 tcg_gen_extu_i32_tl(bcond
, t0
);
8077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8078 tcg_gen_not_i32(t0
, t0
);
8079 tcg_gen_andi_i32(t0
, t0
, 1);
8080 tcg_gen_extu_i32_tl(bcond
, t0
);
8083 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8084 tcg_gen_andi_i32(t0
, t0
, 1);
8085 tcg_gen_extu_i32_tl(bcond
, t0
);
8088 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8089 tcg_gen_andi_i32(t0
, t0
, 1);
8090 tcg_gen_extu_i32_tl(bcond
, t0
);
8092 ctx
->hflags
|= MIPS_HFLAG_BL
;
8096 TCGv_i32 t1
= tcg_temp_new_i32();
8097 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8098 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8099 tcg_gen_nand_i32(t0
, t0
, t1
);
8100 tcg_temp_free_i32(t1
);
8101 tcg_gen_andi_i32(t0
, t0
, 1);
8102 tcg_gen_extu_i32_tl(bcond
, t0
);
8107 TCGv_i32 t1
= tcg_temp_new_i32();
8108 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8109 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8110 tcg_gen_or_i32(t0
, t0
, t1
);
8111 tcg_temp_free_i32(t1
);
8112 tcg_gen_andi_i32(t0
, t0
, 1);
8113 tcg_gen_extu_i32_tl(bcond
, t0
);
8118 TCGv_i32 t1
= tcg_temp_new_i32();
8119 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8120 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8121 tcg_gen_and_i32(t0
, t0
, t1
);
8122 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8123 tcg_gen_and_i32(t0
, t0
, t1
);
8124 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8125 tcg_gen_nand_i32(t0
, t0
, t1
);
8126 tcg_temp_free_i32(t1
);
8127 tcg_gen_andi_i32(t0
, t0
, 1);
8128 tcg_gen_extu_i32_tl(bcond
, t0
);
8133 TCGv_i32 t1
= tcg_temp_new_i32();
8134 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8135 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8136 tcg_gen_or_i32(t0
, t0
, t1
);
8137 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8138 tcg_gen_or_i32(t0
, t0
, t1
);
8139 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8140 tcg_gen_or_i32(t0
, t0
, t1
);
8141 tcg_temp_free_i32(t1
);
8142 tcg_gen_andi_i32(t0
, t0
, 1);
8143 tcg_gen_extu_i32_tl(bcond
, t0
);
8146 ctx
->hflags
|= MIPS_HFLAG_BC
;
8149 MIPS_INVAL("cp1 cond branch");
8150 generate_exception (ctx
, EXCP_RI
);
8153 ctx
->btarget
= btarget
;
8154 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8156 tcg_temp_free_i32(t0
);
8159 /* R6 CP1 Branches */
8160 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8161 int32_t ft
, int32_t offset
,
8164 target_ulong btarget
;
8165 TCGv_i64 t0
= tcg_temp_new_i64();
8167 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8168 #ifdef MIPS_DEBUG_DISAS
8169 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8172 generate_exception(ctx
, EXCP_RI
);
8176 gen_load_fpr64(ctx
, t0
, ft
);
8177 tcg_gen_andi_i64(t0
, t0
, 1);
8179 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8183 tcg_gen_xori_i64(t0
, t0
, 1);
8184 ctx
->hflags
|= MIPS_HFLAG_BC
;
8187 /* t0 already set */
8188 ctx
->hflags
|= MIPS_HFLAG_BC
;
8191 MIPS_INVAL("cp1 cond branch");
8192 generate_exception(ctx
, EXCP_RI
);
8196 tcg_gen_trunc_i64_tl(bcond
, t0
);
8198 ctx
->btarget
= btarget
;
8200 switch (delayslot_size
) {
8202 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8205 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8210 tcg_temp_free_i64(t0
);
8213 /* Coprocessor 1 (FPU) */
8215 #define FOP(func, fmt) (((fmt) << 21) | (func))
8218 OPC_ADD_S
= FOP(0, FMT_S
),
8219 OPC_SUB_S
= FOP(1, FMT_S
),
8220 OPC_MUL_S
= FOP(2, FMT_S
),
8221 OPC_DIV_S
= FOP(3, FMT_S
),
8222 OPC_SQRT_S
= FOP(4, FMT_S
),
8223 OPC_ABS_S
= FOP(5, FMT_S
),
8224 OPC_MOV_S
= FOP(6, FMT_S
),
8225 OPC_NEG_S
= FOP(7, FMT_S
),
8226 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8227 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8228 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8229 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8230 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8231 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8232 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8233 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8234 OPC_SEL_S
= FOP(16, FMT_S
),
8235 OPC_MOVCF_S
= FOP(17, FMT_S
),
8236 OPC_MOVZ_S
= FOP(18, FMT_S
),
8237 OPC_MOVN_S
= FOP(19, FMT_S
),
8238 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8239 OPC_RECIP_S
= FOP(21, FMT_S
),
8240 OPC_RSQRT_S
= FOP(22, FMT_S
),
8241 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8242 OPC_MADDF_S
= FOP(24, FMT_S
),
8243 OPC_MSUBF_S
= FOP(25, FMT_S
),
8244 OPC_RINT_S
= FOP(26, FMT_S
),
8245 OPC_CLASS_S
= FOP(27, FMT_S
),
8246 OPC_MIN_S
= FOP(28, FMT_S
),
8247 OPC_RECIP2_S
= FOP(28, FMT_S
),
8248 OPC_MINA_S
= FOP(29, FMT_S
),
8249 OPC_RECIP1_S
= FOP(29, FMT_S
),
8250 OPC_MAX_S
= FOP(30, FMT_S
),
8251 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8252 OPC_MAXA_S
= FOP(31, FMT_S
),
8253 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8254 OPC_CVT_D_S
= FOP(33, FMT_S
),
8255 OPC_CVT_W_S
= FOP(36, FMT_S
),
8256 OPC_CVT_L_S
= FOP(37, FMT_S
),
8257 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8258 OPC_CMP_F_S
= FOP (48, FMT_S
),
8259 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8260 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8261 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8262 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8263 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8264 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8265 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8266 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8267 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8268 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8269 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8270 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8271 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8272 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8273 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8275 OPC_ADD_D
= FOP(0, FMT_D
),
8276 OPC_SUB_D
= FOP(1, FMT_D
),
8277 OPC_MUL_D
= FOP(2, FMT_D
),
8278 OPC_DIV_D
= FOP(3, FMT_D
),
8279 OPC_SQRT_D
= FOP(4, FMT_D
),
8280 OPC_ABS_D
= FOP(5, FMT_D
),
8281 OPC_MOV_D
= FOP(6, FMT_D
),
8282 OPC_NEG_D
= FOP(7, FMT_D
),
8283 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8284 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8285 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8286 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8287 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8288 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8289 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8290 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8291 OPC_SEL_D
= FOP(16, FMT_D
),
8292 OPC_MOVCF_D
= FOP(17, FMT_D
),
8293 OPC_MOVZ_D
= FOP(18, FMT_D
),
8294 OPC_MOVN_D
= FOP(19, FMT_D
),
8295 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8296 OPC_RECIP_D
= FOP(21, FMT_D
),
8297 OPC_RSQRT_D
= FOP(22, FMT_D
),
8298 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8299 OPC_MADDF_D
= FOP(24, FMT_D
),
8300 OPC_MSUBF_D
= FOP(25, FMT_D
),
8301 OPC_RINT_D
= FOP(26, FMT_D
),
8302 OPC_CLASS_D
= FOP(27, FMT_D
),
8303 OPC_MIN_D
= FOP(28, FMT_D
),
8304 OPC_RECIP2_D
= FOP(28, FMT_D
),
8305 OPC_MINA_D
= FOP(29, FMT_D
),
8306 OPC_RECIP1_D
= FOP(29, FMT_D
),
8307 OPC_MAX_D
= FOP(30, FMT_D
),
8308 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8309 OPC_MAXA_D
= FOP(31, FMT_D
),
8310 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8311 OPC_CVT_S_D
= FOP(32, FMT_D
),
8312 OPC_CVT_W_D
= FOP(36, FMT_D
),
8313 OPC_CVT_L_D
= FOP(37, FMT_D
),
8314 OPC_CMP_F_D
= FOP (48, FMT_D
),
8315 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8316 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8317 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8318 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8319 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8320 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8321 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8322 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8323 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8324 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8325 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8326 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8327 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8328 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8329 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8331 OPC_CVT_S_W
= FOP(32, FMT_W
),
8332 OPC_CVT_D_W
= FOP(33, FMT_W
),
8333 OPC_CVT_S_L
= FOP(32, FMT_L
),
8334 OPC_CVT_D_L
= FOP(33, FMT_L
),
8335 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8337 OPC_ADD_PS
= FOP(0, FMT_PS
),
8338 OPC_SUB_PS
= FOP(1, FMT_PS
),
8339 OPC_MUL_PS
= FOP(2, FMT_PS
),
8340 OPC_DIV_PS
= FOP(3, FMT_PS
),
8341 OPC_ABS_PS
= FOP(5, FMT_PS
),
8342 OPC_MOV_PS
= FOP(6, FMT_PS
),
8343 OPC_NEG_PS
= FOP(7, FMT_PS
),
8344 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8345 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8346 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8347 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8348 OPC_MULR_PS
= FOP(26, FMT_PS
),
8349 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8350 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8351 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8352 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8354 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8355 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8356 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8357 OPC_PLL_PS
= FOP(44, FMT_PS
),
8358 OPC_PLU_PS
= FOP(45, FMT_PS
),
8359 OPC_PUL_PS
= FOP(46, FMT_PS
),
8360 OPC_PUU_PS
= FOP(47, FMT_PS
),
8361 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8362 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8363 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8364 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8365 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8366 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8367 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8368 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8369 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8370 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8371 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8372 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8373 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8374 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8375 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8376 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8380 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8381 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8382 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8383 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8384 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8385 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8386 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8387 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8388 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8389 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8390 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8391 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8392 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8393 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8394 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8395 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8396 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8397 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8398 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8399 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8400 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8401 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8403 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8404 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8405 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8406 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8407 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8408 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8409 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8410 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8411 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8412 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8413 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8414 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8415 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8416 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8417 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8418 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8419 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8420 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8421 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8422 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8423 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8424 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8426 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8428 TCGv t0
= tcg_temp_new();
8433 TCGv_i32 fp0
= tcg_temp_new_i32();
8435 gen_load_fpr32(ctx
, fp0
, fs
);
8436 tcg_gen_ext_i32_tl(t0
, fp0
);
8437 tcg_temp_free_i32(fp0
);
8439 gen_store_gpr(t0
, rt
);
8442 gen_load_gpr(t0
, rt
);
8444 TCGv_i32 fp0
= tcg_temp_new_i32();
8446 tcg_gen_trunc_tl_i32(fp0
, t0
);
8447 gen_store_fpr32(ctx
, fp0
, fs
);
8448 tcg_temp_free_i32(fp0
);
8452 gen_helper_1e0i(cfc1
, t0
, fs
);
8453 gen_store_gpr(t0
, rt
);
8456 gen_load_gpr(t0
, rt
);
8457 save_cpu_state(ctx
, 1);
8459 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8461 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8462 tcg_temp_free_i32(fs_tmp
);
8464 /* Stop translation as we may have changed hflags */
8465 ctx
->bstate
= BS_STOP
;
8467 #if defined(TARGET_MIPS64)
8469 gen_load_fpr64(ctx
, t0
, fs
);
8470 gen_store_gpr(t0
, rt
);
8473 gen_load_gpr(t0
, rt
);
8474 gen_store_fpr64(ctx
, t0
, fs
);
8479 TCGv_i32 fp0
= tcg_temp_new_i32();
8481 gen_load_fpr32h(ctx
, fp0
, fs
);
8482 tcg_gen_ext_i32_tl(t0
, fp0
);
8483 tcg_temp_free_i32(fp0
);
8485 gen_store_gpr(t0
, rt
);
8488 gen_load_gpr(t0
, rt
);
8490 TCGv_i32 fp0
= tcg_temp_new_i32();
8492 tcg_gen_trunc_tl_i32(fp0
, t0
);
8493 gen_store_fpr32h(ctx
, fp0
, fs
);
8494 tcg_temp_free_i32(fp0
);
8498 MIPS_INVAL("cp1 move");
8499 generate_exception (ctx
, EXCP_RI
);
8507 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8523 l1
= gen_new_label();
8524 t0
= tcg_temp_new_i32();
8525 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8526 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8527 tcg_temp_free_i32(t0
);
8529 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8531 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8536 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8540 TCGv_i32 t0
= tcg_temp_new_i32();
8541 TCGLabel
*l1
= gen_new_label();
8548 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8549 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8550 gen_load_fpr32(ctx
, t0
, fs
);
8551 gen_store_fpr32(ctx
, t0
, fd
);
8553 tcg_temp_free_i32(t0
);
8556 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8559 TCGv_i32 t0
= tcg_temp_new_i32();
8561 TCGLabel
*l1
= gen_new_label();
8568 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8569 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8570 tcg_temp_free_i32(t0
);
8571 fp0
= tcg_temp_new_i64();
8572 gen_load_fpr64(ctx
, fp0
, fs
);
8573 gen_store_fpr64(ctx
, fp0
, fd
);
8574 tcg_temp_free_i64(fp0
);
8578 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8582 TCGv_i32 t0
= tcg_temp_new_i32();
8583 TCGLabel
*l1
= gen_new_label();
8584 TCGLabel
*l2
= gen_new_label();
8591 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8592 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8593 gen_load_fpr32(ctx
, t0
, fs
);
8594 gen_store_fpr32(ctx
, t0
, fd
);
8597 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8598 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8599 gen_load_fpr32h(ctx
, t0
, fs
);
8600 gen_store_fpr32h(ctx
, t0
, fd
);
8601 tcg_temp_free_i32(t0
);
8605 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8608 TCGv_i32 t1
= tcg_const_i32(0);
8609 TCGv_i32 fp0
= tcg_temp_new_i32();
8610 TCGv_i32 fp1
= tcg_temp_new_i32();
8611 TCGv_i32 fp2
= tcg_temp_new_i32();
8612 gen_load_fpr32(ctx
, fp0
, fd
);
8613 gen_load_fpr32(ctx
, fp1
, ft
);
8614 gen_load_fpr32(ctx
, fp2
, fs
);
8618 tcg_gen_andi_i32(fp0
, fp0
, 1);
8619 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8622 tcg_gen_andi_i32(fp1
, fp1
, 1);
8623 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8626 tcg_gen_andi_i32(fp1
, fp1
, 1);
8627 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8630 MIPS_INVAL("gen_sel_s");
8631 generate_exception (ctx
, EXCP_RI
);
8635 gen_store_fpr32(ctx
, fp0
, fd
);
8636 tcg_temp_free_i32(fp2
);
8637 tcg_temp_free_i32(fp1
);
8638 tcg_temp_free_i32(fp0
);
8639 tcg_temp_free_i32(t1
);
8642 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8645 TCGv_i64 t1
= tcg_const_i64(0);
8646 TCGv_i64 fp0
= tcg_temp_new_i64();
8647 TCGv_i64 fp1
= tcg_temp_new_i64();
8648 TCGv_i64 fp2
= tcg_temp_new_i64();
8649 gen_load_fpr64(ctx
, fp0
, fd
);
8650 gen_load_fpr64(ctx
, fp1
, ft
);
8651 gen_load_fpr64(ctx
, fp2
, fs
);
8655 tcg_gen_andi_i64(fp0
, fp0
, 1);
8656 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8659 tcg_gen_andi_i64(fp1
, fp1
, 1);
8660 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8663 tcg_gen_andi_i64(fp1
, fp1
, 1);
8664 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8667 MIPS_INVAL("gen_sel_d");
8668 generate_exception (ctx
, EXCP_RI
);
8672 gen_store_fpr64(ctx
, fp0
, fd
);
8673 tcg_temp_free_i64(fp2
);
8674 tcg_temp_free_i64(fp1
);
8675 tcg_temp_free_i64(fp0
);
8676 tcg_temp_free_i64(t1
);
8679 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8680 int ft
, int fs
, int fd
, int cc
)
8682 uint32_t func
= ctx
->opcode
& 0x3f;
8686 TCGv_i32 fp0
= tcg_temp_new_i32();
8687 TCGv_i32 fp1
= tcg_temp_new_i32();
8689 gen_load_fpr32(ctx
, fp0
, fs
);
8690 gen_load_fpr32(ctx
, fp1
, ft
);
8691 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8692 tcg_temp_free_i32(fp1
);
8693 gen_store_fpr32(ctx
, fp0
, fd
);
8694 tcg_temp_free_i32(fp0
);
8699 TCGv_i32 fp0
= tcg_temp_new_i32();
8700 TCGv_i32 fp1
= tcg_temp_new_i32();
8702 gen_load_fpr32(ctx
, fp0
, fs
);
8703 gen_load_fpr32(ctx
, fp1
, ft
);
8704 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8705 tcg_temp_free_i32(fp1
);
8706 gen_store_fpr32(ctx
, fp0
, fd
);
8707 tcg_temp_free_i32(fp0
);
8712 TCGv_i32 fp0
= tcg_temp_new_i32();
8713 TCGv_i32 fp1
= tcg_temp_new_i32();
8715 gen_load_fpr32(ctx
, fp0
, fs
);
8716 gen_load_fpr32(ctx
, fp1
, ft
);
8717 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8718 tcg_temp_free_i32(fp1
);
8719 gen_store_fpr32(ctx
, fp0
, fd
);
8720 tcg_temp_free_i32(fp0
);
8725 TCGv_i32 fp0
= tcg_temp_new_i32();
8726 TCGv_i32 fp1
= tcg_temp_new_i32();
8728 gen_load_fpr32(ctx
, fp0
, fs
);
8729 gen_load_fpr32(ctx
, fp1
, ft
);
8730 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8731 tcg_temp_free_i32(fp1
);
8732 gen_store_fpr32(ctx
, fp0
, fd
);
8733 tcg_temp_free_i32(fp0
);
8738 TCGv_i32 fp0
= tcg_temp_new_i32();
8740 gen_load_fpr32(ctx
, fp0
, fs
);
8741 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8742 gen_store_fpr32(ctx
, fp0
, fd
);
8743 tcg_temp_free_i32(fp0
);
8748 TCGv_i32 fp0
= tcg_temp_new_i32();
8750 gen_load_fpr32(ctx
, fp0
, fs
);
8751 gen_helper_float_abs_s(fp0
, fp0
);
8752 gen_store_fpr32(ctx
, fp0
, fd
);
8753 tcg_temp_free_i32(fp0
);
8758 TCGv_i32 fp0
= tcg_temp_new_i32();
8760 gen_load_fpr32(ctx
, fp0
, fs
);
8761 gen_store_fpr32(ctx
, fp0
, fd
);
8762 tcg_temp_free_i32(fp0
);
8767 TCGv_i32 fp0
= tcg_temp_new_i32();
8769 gen_load_fpr32(ctx
, fp0
, fs
);
8770 gen_helper_float_chs_s(fp0
, fp0
);
8771 gen_store_fpr32(ctx
, fp0
, fd
);
8772 tcg_temp_free_i32(fp0
);
8776 check_cp1_64bitmode(ctx
);
8778 TCGv_i32 fp32
= tcg_temp_new_i32();
8779 TCGv_i64 fp64
= tcg_temp_new_i64();
8781 gen_load_fpr32(ctx
, fp32
, fs
);
8782 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8783 tcg_temp_free_i32(fp32
);
8784 gen_store_fpr64(ctx
, fp64
, fd
);
8785 tcg_temp_free_i64(fp64
);
8789 check_cp1_64bitmode(ctx
);
8791 TCGv_i32 fp32
= tcg_temp_new_i32();
8792 TCGv_i64 fp64
= tcg_temp_new_i64();
8794 gen_load_fpr32(ctx
, fp32
, fs
);
8795 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8796 tcg_temp_free_i32(fp32
);
8797 gen_store_fpr64(ctx
, fp64
, fd
);
8798 tcg_temp_free_i64(fp64
);
8802 check_cp1_64bitmode(ctx
);
8804 TCGv_i32 fp32
= tcg_temp_new_i32();
8805 TCGv_i64 fp64
= tcg_temp_new_i64();
8807 gen_load_fpr32(ctx
, fp32
, fs
);
8808 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8809 tcg_temp_free_i32(fp32
);
8810 gen_store_fpr64(ctx
, fp64
, fd
);
8811 tcg_temp_free_i64(fp64
);
8815 check_cp1_64bitmode(ctx
);
8817 TCGv_i32 fp32
= tcg_temp_new_i32();
8818 TCGv_i64 fp64
= tcg_temp_new_i64();
8820 gen_load_fpr32(ctx
, fp32
, fs
);
8821 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8822 tcg_temp_free_i32(fp32
);
8823 gen_store_fpr64(ctx
, fp64
, fd
);
8824 tcg_temp_free_i64(fp64
);
8829 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 gen_load_fpr32(ctx
, fp0
, fs
);
8832 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8833 gen_store_fpr32(ctx
, fp0
, fd
);
8834 tcg_temp_free_i32(fp0
);
8839 TCGv_i32 fp0
= tcg_temp_new_i32();
8841 gen_load_fpr32(ctx
, fp0
, fs
);
8842 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8843 gen_store_fpr32(ctx
, fp0
, fd
);
8844 tcg_temp_free_i32(fp0
);
8849 TCGv_i32 fp0
= tcg_temp_new_i32();
8851 gen_load_fpr32(ctx
, fp0
, fs
);
8852 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8853 gen_store_fpr32(ctx
, fp0
, fd
);
8854 tcg_temp_free_i32(fp0
);
8859 TCGv_i32 fp0
= tcg_temp_new_i32();
8861 gen_load_fpr32(ctx
, fp0
, fs
);
8862 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8863 gen_store_fpr32(ctx
, fp0
, fd
);
8864 tcg_temp_free_i32(fp0
);
8868 check_insn(ctx
, ISA_MIPS32R6
);
8869 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8872 check_insn(ctx
, ISA_MIPS32R6
);
8873 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8876 check_insn(ctx
, ISA_MIPS32R6
);
8877 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8880 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8881 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8884 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8886 TCGLabel
*l1
= gen_new_label();
8890 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8892 fp0
= tcg_temp_new_i32();
8893 gen_load_fpr32(ctx
, fp0
, fs
);
8894 gen_store_fpr32(ctx
, fp0
, fd
);
8895 tcg_temp_free_i32(fp0
);
8900 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8902 TCGLabel
*l1
= gen_new_label();
8906 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8907 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_store_fpr32(ctx
, fp0
, fd
);
8910 tcg_temp_free_i32(fp0
);
8917 TCGv_i32 fp0
= tcg_temp_new_i32();
8919 gen_load_fpr32(ctx
, fp0
, fs
);
8920 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8921 gen_store_fpr32(ctx
, fp0
, fd
);
8922 tcg_temp_free_i32(fp0
);
8927 TCGv_i32 fp0
= tcg_temp_new_i32();
8929 gen_load_fpr32(ctx
, fp0
, fs
);
8930 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8931 gen_store_fpr32(ctx
, fp0
, fd
);
8932 tcg_temp_free_i32(fp0
);
8936 check_insn(ctx
, ISA_MIPS32R6
);
8938 TCGv_i32 fp0
= tcg_temp_new_i32();
8939 TCGv_i32 fp1
= tcg_temp_new_i32();
8940 TCGv_i32 fp2
= tcg_temp_new_i32();
8941 gen_load_fpr32(ctx
, fp0
, fs
);
8942 gen_load_fpr32(ctx
, fp1
, ft
);
8943 gen_load_fpr32(ctx
, fp2
, fd
);
8944 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8945 gen_store_fpr32(ctx
, fp2
, fd
);
8946 tcg_temp_free_i32(fp2
);
8947 tcg_temp_free_i32(fp1
);
8948 tcg_temp_free_i32(fp0
);
8952 check_insn(ctx
, ISA_MIPS32R6
);
8954 TCGv_i32 fp0
= tcg_temp_new_i32();
8955 TCGv_i32 fp1
= tcg_temp_new_i32();
8956 TCGv_i32 fp2
= tcg_temp_new_i32();
8957 gen_load_fpr32(ctx
, fp0
, fs
);
8958 gen_load_fpr32(ctx
, fp1
, ft
);
8959 gen_load_fpr32(ctx
, fp2
, fd
);
8960 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8961 gen_store_fpr32(ctx
, fp2
, fd
);
8962 tcg_temp_free_i32(fp2
);
8963 tcg_temp_free_i32(fp1
);
8964 tcg_temp_free_i32(fp0
);
8968 check_insn(ctx
, ISA_MIPS32R6
);
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8971 gen_load_fpr32(ctx
, fp0
, fs
);
8972 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8973 gen_store_fpr32(ctx
, fp0
, fd
);
8974 tcg_temp_free_i32(fp0
);
8978 check_insn(ctx
, ISA_MIPS32R6
);
8980 TCGv_i32 fp0
= tcg_temp_new_i32();
8981 gen_load_fpr32(ctx
, fp0
, fs
);
8982 gen_helper_float_class_s(fp0
, fp0
);
8983 gen_store_fpr32(ctx
, fp0
, fd
);
8984 tcg_temp_free_i32(fp0
);
8987 case OPC_MIN_S
: /* OPC_RECIP2_S */
8988 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8990 TCGv_i32 fp0
= tcg_temp_new_i32();
8991 TCGv_i32 fp1
= tcg_temp_new_i32();
8992 TCGv_i32 fp2
= tcg_temp_new_i32();
8993 gen_load_fpr32(ctx
, fp0
, fs
);
8994 gen_load_fpr32(ctx
, fp1
, ft
);
8995 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8996 gen_store_fpr32(ctx
, fp2
, fd
);
8997 tcg_temp_free_i32(fp2
);
8998 tcg_temp_free_i32(fp1
);
8999 tcg_temp_free_i32(fp0
);
9002 check_cp1_64bitmode(ctx
);
9004 TCGv_i32 fp0
= tcg_temp_new_i32();
9005 TCGv_i32 fp1
= tcg_temp_new_i32();
9007 gen_load_fpr32(ctx
, fp0
, fs
);
9008 gen_load_fpr32(ctx
, fp1
, ft
);
9009 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9010 tcg_temp_free_i32(fp1
);
9011 gen_store_fpr32(ctx
, fp0
, fd
);
9012 tcg_temp_free_i32(fp0
);
9016 case OPC_MINA_S
: /* OPC_RECIP1_S */
9017 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9019 TCGv_i32 fp0
= tcg_temp_new_i32();
9020 TCGv_i32 fp1
= tcg_temp_new_i32();
9021 TCGv_i32 fp2
= tcg_temp_new_i32();
9022 gen_load_fpr32(ctx
, fp0
, fs
);
9023 gen_load_fpr32(ctx
, fp1
, ft
);
9024 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9025 gen_store_fpr32(ctx
, fp2
, fd
);
9026 tcg_temp_free_i32(fp2
);
9027 tcg_temp_free_i32(fp1
);
9028 tcg_temp_free_i32(fp0
);
9031 check_cp1_64bitmode(ctx
);
9033 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 gen_load_fpr32(ctx
, fp0
, fs
);
9036 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9037 gen_store_fpr32(ctx
, fp0
, fd
);
9038 tcg_temp_free_i32(fp0
);
9042 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9043 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9045 TCGv_i32 fp0
= tcg_temp_new_i32();
9046 TCGv_i32 fp1
= tcg_temp_new_i32();
9047 gen_load_fpr32(ctx
, fp0
, fs
);
9048 gen_load_fpr32(ctx
, fp1
, ft
);
9049 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9050 gen_store_fpr32(ctx
, fp1
, fd
);
9051 tcg_temp_free_i32(fp1
);
9052 tcg_temp_free_i32(fp0
);
9055 check_cp1_64bitmode(ctx
);
9057 TCGv_i32 fp0
= tcg_temp_new_i32();
9059 gen_load_fpr32(ctx
, fp0
, fs
);
9060 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9061 gen_store_fpr32(ctx
, fp0
, fd
);
9062 tcg_temp_free_i32(fp0
);
9066 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9067 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9069 TCGv_i32 fp0
= tcg_temp_new_i32();
9070 TCGv_i32 fp1
= tcg_temp_new_i32();
9071 gen_load_fpr32(ctx
, fp0
, fs
);
9072 gen_load_fpr32(ctx
, fp1
, ft
);
9073 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9074 gen_store_fpr32(ctx
, fp1
, fd
);
9075 tcg_temp_free_i32(fp1
);
9076 tcg_temp_free_i32(fp0
);
9079 check_cp1_64bitmode(ctx
);
9081 TCGv_i32 fp0
= tcg_temp_new_i32();
9082 TCGv_i32 fp1
= tcg_temp_new_i32();
9084 gen_load_fpr32(ctx
, fp0
, fs
);
9085 gen_load_fpr32(ctx
, fp1
, ft
);
9086 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9087 tcg_temp_free_i32(fp1
);
9088 gen_store_fpr32(ctx
, fp0
, fd
);
9089 tcg_temp_free_i32(fp0
);
9094 check_cp1_registers(ctx
, fd
);
9096 TCGv_i32 fp32
= tcg_temp_new_i32();
9097 TCGv_i64 fp64
= tcg_temp_new_i64();
9099 gen_load_fpr32(ctx
, fp32
, fs
);
9100 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9101 tcg_temp_free_i32(fp32
);
9102 gen_store_fpr64(ctx
, fp64
, fd
);
9103 tcg_temp_free_i64(fp64
);
9108 TCGv_i32 fp0
= tcg_temp_new_i32();
9110 gen_load_fpr32(ctx
, fp0
, fs
);
9111 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9112 gen_store_fpr32(ctx
, fp0
, fd
);
9113 tcg_temp_free_i32(fp0
);
9117 check_cp1_64bitmode(ctx
);
9119 TCGv_i32 fp32
= tcg_temp_new_i32();
9120 TCGv_i64 fp64
= tcg_temp_new_i64();
9122 gen_load_fpr32(ctx
, fp32
, fs
);
9123 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9124 tcg_temp_free_i32(fp32
);
9125 gen_store_fpr64(ctx
, fp64
, fd
);
9126 tcg_temp_free_i64(fp64
);
9132 TCGv_i64 fp64
= tcg_temp_new_i64();
9133 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9134 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9136 gen_load_fpr32(ctx
, fp32_0
, fs
);
9137 gen_load_fpr32(ctx
, fp32_1
, ft
);
9138 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9139 tcg_temp_free_i32(fp32_1
);
9140 tcg_temp_free_i32(fp32_0
);
9141 gen_store_fpr64(ctx
, fp64
, fd
);
9142 tcg_temp_free_i64(fp64
);
9154 case OPC_CMP_NGLE_S
:
9161 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9162 if (ctx
->opcode
& (1 << 6)) {
9163 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9165 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9169 check_cp1_registers(ctx
, fs
| ft
| fd
);
9171 TCGv_i64 fp0
= tcg_temp_new_i64();
9172 TCGv_i64 fp1
= tcg_temp_new_i64();
9174 gen_load_fpr64(ctx
, fp0
, fs
);
9175 gen_load_fpr64(ctx
, fp1
, ft
);
9176 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9177 tcg_temp_free_i64(fp1
);
9178 gen_store_fpr64(ctx
, fp0
, fd
);
9179 tcg_temp_free_i64(fp0
);
9183 check_cp1_registers(ctx
, fs
| ft
| fd
);
9185 TCGv_i64 fp0
= tcg_temp_new_i64();
9186 TCGv_i64 fp1
= tcg_temp_new_i64();
9188 gen_load_fpr64(ctx
, fp0
, fs
);
9189 gen_load_fpr64(ctx
, fp1
, ft
);
9190 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9191 tcg_temp_free_i64(fp1
);
9192 gen_store_fpr64(ctx
, fp0
, fd
);
9193 tcg_temp_free_i64(fp0
);
9197 check_cp1_registers(ctx
, fs
| ft
| fd
);
9199 TCGv_i64 fp0
= tcg_temp_new_i64();
9200 TCGv_i64 fp1
= tcg_temp_new_i64();
9202 gen_load_fpr64(ctx
, fp0
, fs
);
9203 gen_load_fpr64(ctx
, fp1
, ft
);
9204 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9205 tcg_temp_free_i64(fp1
);
9206 gen_store_fpr64(ctx
, fp0
, fd
);
9207 tcg_temp_free_i64(fp0
);
9211 check_cp1_registers(ctx
, fs
| ft
| fd
);
9213 TCGv_i64 fp0
= tcg_temp_new_i64();
9214 TCGv_i64 fp1
= tcg_temp_new_i64();
9216 gen_load_fpr64(ctx
, fp0
, fs
);
9217 gen_load_fpr64(ctx
, fp1
, ft
);
9218 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9219 tcg_temp_free_i64(fp1
);
9220 gen_store_fpr64(ctx
, fp0
, fd
);
9221 tcg_temp_free_i64(fp0
);
9225 check_cp1_registers(ctx
, fs
| fd
);
9227 TCGv_i64 fp0
= tcg_temp_new_i64();
9229 gen_load_fpr64(ctx
, fp0
, fs
);
9230 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9231 gen_store_fpr64(ctx
, fp0
, fd
);
9232 tcg_temp_free_i64(fp0
);
9236 check_cp1_registers(ctx
, fs
| fd
);
9238 TCGv_i64 fp0
= tcg_temp_new_i64();
9240 gen_load_fpr64(ctx
, fp0
, fs
);
9241 gen_helper_float_abs_d(fp0
, fp0
);
9242 gen_store_fpr64(ctx
, fp0
, fd
);
9243 tcg_temp_free_i64(fp0
);
9247 check_cp1_registers(ctx
, fs
| fd
);
9249 TCGv_i64 fp0
= tcg_temp_new_i64();
9251 gen_load_fpr64(ctx
, fp0
, fs
);
9252 gen_store_fpr64(ctx
, fp0
, fd
);
9253 tcg_temp_free_i64(fp0
);
9257 check_cp1_registers(ctx
, fs
| fd
);
9259 TCGv_i64 fp0
= tcg_temp_new_i64();
9261 gen_load_fpr64(ctx
, fp0
, fs
);
9262 gen_helper_float_chs_d(fp0
, fp0
);
9263 gen_store_fpr64(ctx
, fp0
, fd
);
9264 tcg_temp_free_i64(fp0
);
9268 check_cp1_64bitmode(ctx
);
9270 TCGv_i64 fp0
= tcg_temp_new_i64();
9272 gen_load_fpr64(ctx
, fp0
, fs
);
9273 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9274 gen_store_fpr64(ctx
, fp0
, fd
);
9275 tcg_temp_free_i64(fp0
);
9279 check_cp1_64bitmode(ctx
);
9281 TCGv_i64 fp0
= tcg_temp_new_i64();
9283 gen_load_fpr64(ctx
, fp0
, fs
);
9284 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9285 gen_store_fpr64(ctx
, fp0
, fd
);
9286 tcg_temp_free_i64(fp0
);
9290 check_cp1_64bitmode(ctx
);
9292 TCGv_i64 fp0
= tcg_temp_new_i64();
9294 gen_load_fpr64(ctx
, fp0
, fs
);
9295 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9296 gen_store_fpr64(ctx
, fp0
, fd
);
9297 tcg_temp_free_i64(fp0
);
9301 check_cp1_64bitmode(ctx
);
9303 TCGv_i64 fp0
= tcg_temp_new_i64();
9305 gen_load_fpr64(ctx
, fp0
, fs
);
9306 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9307 gen_store_fpr64(ctx
, fp0
, fd
);
9308 tcg_temp_free_i64(fp0
);
9312 check_cp1_registers(ctx
, fs
);
9314 TCGv_i32 fp32
= tcg_temp_new_i32();
9315 TCGv_i64 fp64
= tcg_temp_new_i64();
9317 gen_load_fpr64(ctx
, fp64
, fs
);
9318 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9319 tcg_temp_free_i64(fp64
);
9320 gen_store_fpr32(ctx
, fp32
, fd
);
9321 tcg_temp_free_i32(fp32
);
9325 check_cp1_registers(ctx
, fs
);
9327 TCGv_i32 fp32
= tcg_temp_new_i32();
9328 TCGv_i64 fp64
= tcg_temp_new_i64();
9330 gen_load_fpr64(ctx
, fp64
, fs
);
9331 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9332 tcg_temp_free_i64(fp64
);
9333 gen_store_fpr32(ctx
, fp32
, fd
);
9334 tcg_temp_free_i32(fp32
);
9338 check_cp1_registers(ctx
, fs
);
9340 TCGv_i32 fp32
= tcg_temp_new_i32();
9341 TCGv_i64 fp64
= tcg_temp_new_i64();
9343 gen_load_fpr64(ctx
, fp64
, fs
);
9344 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9345 tcg_temp_free_i64(fp64
);
9346 gen_store_fpr32(ctx
, fp32
, fd
);
9347 tcg_temp_free_i32(fp32
);
9351 check_cp1_registers(ctx
, fs
);
9353 TCGv_i32 fp32
= tcg_temp_new_i32();
9354 TCGv_i64 fp64
= tcg_temp_new_i64();
9356 gen_load_fpr64(ctx
, fp64
, fs
);
9357 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9358 tcg_temp_free_i64(fp64
);
9359 gen_store_fpr32(ctx
, fp32
, fd
);
9360 tcg_temp_free_i32(fp32
);
9364 check_insn(ctx
, ISA_MIPS32R6
);
9365 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9368 check_insn(ctx
, ISA_MIPS32R6
);
9369 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9372 check_insn(ctx
, ISA_MIPS32R6
);
9373 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9376 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9377 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9380 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9382 TCGLabel
*l1
= gen_new_label();
9386 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9388 fp0
= tcg_temp_new_i64();
9389 gen_load_fpr64(ctx
, fp0
, fs
);
9390 gen_store_fpr64(ctx
, fp0
, fd
);
9391 tcg_temp_free_i64(fp0
);
9396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9398 TCGLabel
*l1
= gen_new_label();
9402 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9403 fp0
= tcg_temp_new_i64();
9404 gen_load_fpr64(ctx
, fp0
, fs
);
9405 gen_store_fpr64(ctx
, fp0
, fd
);
9406 tcg_temp_free_i64(fp0
);
9412 check_cp1_registers(ctx
, fs
| fd
);
9414 TCGv_i64 fp0
= tcg_temp_new_i64();
9416 gen_load_fpr64(ctx
, fp0
, fs
);
9417 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9418 gen_store_fpr64(ctx
, fp0
, fd
);
9419 tcg_temp_free_i64(fp0
);
9423 check_cp1_registers(ctx
, fs
| fd
);
9425 TCGv_i64 fp0
= tcg_temp_new_i64();
9427 gen_load_fpr64(ctx
, fp0
, fs
);
9428 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9429 gen_store_fpr64(ctx
, fp0
, fd
);
9430 tcg_temp_free_i64(fp0
);
9434 check_insn(ctx
, ISA_MIPS32R6
);
9436 TCGv_i64 fp0
= tcg_temp_new_i64();
9437 TCGv_i64 fp1
= tcg_temp_new_i64();
9438 TCGv_i64 fp2
= tcg_temp_new_i64();
9439 gen_load_fpr64(ctx
, fp0
, fs
);
9440 gen_load_fpr64(ctx
, fp1
, ft
);
9441 gen_load_fpr64(ctx
, fp2
, fd
);
9442 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9443 gen_store_fpr64(ctx
, fp2
, fd
);
9444 tcg_temp_free_i64(fp2
);
9445 tcg_temp_free_i64(fp1
);
9446 tcg_temp_free_i64(fp0
);
9450 check_insn(ctx
, ISA_MIPS32R6
);
9452 TCGv_i64 fp0
= tcg_temp_new_i64();
9453 TCGv_i64 fp1
= tcg_temp_new_i64();
9454 TCGv_i64 fp2
= tcg_temp_new_i64();
9455 gen_load_fpr64(ctx
, fp0
, fs
);
9456 gen_load_fpr64(ctx
, fp1
, ft
);
9457 gen_load_fpr64(ctx
, fp2
, fd
);
9458 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9459 gen_store_fpr64(ctx
, fp2
, fd
);
9460 tcg_temp_free_i64(fp2
);
9461 tcg_temp_free_i64(fp1
);
9462 tcg_temp_free_i64(fp0
);
9466 check_insn(ctx
, ISA_MIPS32R6
);
9468 TCGv_i64 fp0
= tcg_temp_new_i64();
9469 gen_load_fpr64(ctx
, fp0
, fs
);
9470 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9471 gen_store_fpr64(ctx
, fp0
, fd
);
9472 tcg_temp_free_i64(fp0
);
9476 check_insn(ctx
, ISA_MIPS32R6
);
9478 TCGv_i64 fp0
= tcg_temp_new_i64();
9479 gen_load_fpr64(ctx
, fp0
, fs
);
9480 gen_helper_float_class_d(fp0
, fp0
);
9481 gen_store_fpr64(ctx
, fp0
, fd
);
9482 tcg_temp_free_i64(fp0
);
9485 case OPC_MIN_D
: /* OPC_RECIP2_D */
9486 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9488 TCGv_i64 fp0
= tcg_temp_new_i64();
9489 TCGv_i64 fp1
= tcg_temp_new_i64();
9490 gen_load_fpr64(ctx
, fp0
, fs
);
9491 gen_load_fpr64(ctx
, fp1
, ft
);
9492 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9493 gen_store_fpr64(ctx
, fp1
, fd
);
9494 tcg_temp_free_i64(fp1
);
9495 tcg_temp_free_i64(fp0
);
9498 check_cp1_64bitmode(ctx
);
9500 TCGv_i64 fp0
= tcg_temp_new_i64();
9501 TCGv_i64 fp1
= tcg_temp_new_i64();
9503 gen_load_fpr64(ctx
, fp0
, fs
);
9504 gen_load_fpr64(ctx
, fp1
, ft
);
9505 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9506 tcg_temp_free_i64(fp1
);
9507 gen_store_fpr64(ctx
, fp0
, fd
);
9508 tcg_temp_free_i64(fp0
);
9512 case OPC_MINA_D
: /* OPC_RECIP1_D */
9513 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9515 TCGv_i64 fp0
= tcg_temp_new_i64();
9516 TCGv_i64 fp1
= tcg_temp_new_i64();
9517 gen_load_fpr64(ctx
, fp0
, fs
);
9518 gen_load_fpr64(ctx
, fp1
, ft
);
9519 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9520 gen_store_fpr64(ctx
, fp1
, fd
);
9521 tcg_temp_free_i64(fp1
);
9522 tcg_temp_free_i64(fp0
);
9525 check_cp1_64bitmode(ctx
);
9527 TCGv_i64 fp0
= tcg_temp_new_i64();
9529 gen_load_fpr64(ctx
, fp0
, fs
);
9530 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9531 gen_store_fpr64(ctx
, fp0
, fd
);
9532 tcg_temp_free_i64(fp0
);
9536 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9537 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9539 TCGv_i64 fp0
= tcg_temp_new_i64();
9540 TCGv_i64 fp1
= tcg_temp_new_i64();
9541 gen_load_fpr64(ctx
, fp0
, fs
);
9542 gen_load_fpr64(ctx
, fp1
, ft
);
9543 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9544 gen_store_fpr64(ctx
, fp1
, fd
);
9545 tcg_temp_free_i64(fp1
);
9546 tcg_temp_free_i64(fp0
);
9549 check_cp1_64bitmode(ctx
);
9551 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 gen_load_fpr64(ctx
, fp0
, fs
);
9554 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9555 gen_store_fpr64(ctx
, fp0
, fd
);
9556 tcg_temp_free_i64(fp0
);
9560 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9561 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9563 TCGv_i64 fp0
= tcg_temp_new_i64();
9564 TCGv_i64 fp1
= tcg_temp_new_i64();
9565 gen_load_fpr64(ctx
, fp0
, fs
);
9566 gen_load_fpr64(ctx
, fp1
, ft
);
9567 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9568 gen_store_fpr64(ctx
, fp1
, fd
);
9569 tcg_temp_free_i64(fp1
);
9570 tcg_temp_free_i64(fp0
);
9573 check_cp1_64bitmode(ctx
);
9575 TCGv_i64 fp0
= tcg_temp_new_i64();
9576 TCGv_i64 fp1
= tcg_temp_new_i64();
9578 gen_load_fpr64(ctx
, fp0
, fs
);
9579 gen_load_fpr64(ctx
, fp1
, ft
);
9580 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9581 tcg_temp_free_i64(fp1
);
9582 gen_store_fpr64(ctx
, fp0
, fd
);
9583 tcg_temp_free_i64(fp0
);
9596 case OPC_CMP_NGLE_D
:
9603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9604 if (ctx
->opcode
& (1 << 6)) {
9605 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9607 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9611 check_cp1_registers(ctx
, fs
);
9613 TCGv_i32 fp32
= tcg_temp_new_i32();
9614 TCGv_i64 fp64
= tcg_temp_new_i64();
9616 gen_load_fpr64(ctx
, fp64
, fs
);
9617 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9618 tcg_temp_free_i64(fp64
);
9619 gen_store_fpr32(ctx
, fp32
, fd
);
9620 tcg_temp_free_i32(fp32
);
9624 check_cp1_registers(ctx
, fs
);
9626 TCGv_i32 fp32
= tcg_temp_new_i32();
9627 TCGv_i64 fp64
= tcg_temp_new_i64();
9629 gen_load_fpr64(ctx
, fp64
, fs
);
9630 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9631 tcg_temp_free_i64(fp64
);
9632 gen_store_fpr32(ctx
, fp32
, fd
);
9633 tcg_temp_free_i32(fp32
);
9637 check_cp1_64bitmode(ctx
);
9639 TCGv_i64 fp0
= tcg_temp_new_i64();
9641 gen_load_fpr64(ctx
, fp0
, fs
);
9642 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9643 gen_store_fpr64(ctx
, fp0
, fd
);
9644 tcg_temp_free_i64(fp0
);
9649 TCGv_i32 fp0
= tcg_temp_new_i32();
9651 gen_load_fpr32(ctx
, fp0
, fs
);
9652 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9653 gen_store_fpr32(ctx
, fp0
, fd
);
9654 tcg_temp_free_i32(fp0
);
9658 check_cp1_registers(ctx
, fd
);
9660 TCGv_i32 fp32
= tcg_temp_new_i32();
9661 TCGv_i64 fp64
= tcg_temp_new_i64();
9663 gen_load_fpr32(ctx
, fp32
, fs
);
9664 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9665 tcg_temp_free_i32(fp32
);
9666 gen_store_fpr64(ctx
, fp64
, fd
);
9667 tcg_temp_free_i64(fp64
);
9671 check_cp1_64bitmode(ctx
);
9673 TCGv_i32 fp32
= tcg_temp_new_i32();
9674 TCGv_i64 fp64
= tcg_temp_new_i64();
9676 gen_load_fpr64(ctx
, fp64
, fs
);
9677 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9678 tcg_temp_free_i64(fp64
);
9679 gen_store_fpr32(ctx
, fp32
, fd
);
9680 tcg_temp_free_i32(fp32
);
9684 check_cp1_64bitmode(ctx
);
9686 TCGv_i64 fp0
= tcg_temp_new_i64();
9688 gen_load_fpr64(ctx
, fp0
, fs
);
9689 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9690 gen_store_fpr64(ctx
, fp0
, fd
);
9691 tcg_temp_free_i64(fp0
);
9697 TCGv_i64 fp0
= tcg_temp_new_i64();
9699 gen_load_fpr64(ctx
, fp0
, fs
);
9700 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9701 gen_store_fpr64(ctx
, fp0
, fd
);
9702 tcg_temp_free_i64(fp0
);
9708 TCGv_i64 fp0
= tcg_temp_new_i64();
9709 TCGv_i64 fp1
= tcg_temp_new_i64();
9711 gen_load_fpr64(ctx
, fp0
, fs
);
9712 gen_load_fpr64(ctx
, fp1
, ft
);
9713 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9714 tcg_temp_free_i64(fp1
);
9715 gen_store_fpr64(ctx
, fp0
, fd
);
9716 tcg_temp_free_i64(fp0
);
9722 TCGv_i64 fp0
= tcg_temp_new_i64();
9723 TCGv_i64 fp1
= tcg_temp_new_i64();
9725 gen_load_fpr64(ctx
, fp0
, fs
);
9726 gen_load_fpr64(ctx
, fp1
, ft
);
9727 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9728 tcg_temp_free_i64(fp1
);
9729 gen_store_fpr64(ctx
, fp0
, fd
);
9730 tcg_temp_free_i64(fp0
);
9736 TCGv_i64 fp0
= tcg_temp_new_i64();
9737 TCGv_i64 fp1
= tcg_temp_new_i64();
9739 gen_load_fpr64(ctx
, fp0
, fs
);
9740 gen_load_fpr64(ctx
, fp1
, ft
);
9741 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9742 tcg_temp_free_i64(fp1
);
9743 gen_store_fpr64(ctx
, fp0
, fd
);
9744 tcg_temp_free_i64(fp0
);
9750 TCGv_i64 fp0
= tcg_temp_new_i64();
9752 gen_load_fpr64(ctx
, fp0
, fs
);
9753 gen_helper_float_abs_ps(fp0
, fp0
);
9754 gen_store_fpr64(ctx
, fp0
, fd
);
9755 tcg_temp_free_i64(fp0
);
9761 TCGv_i64 fp0
= tcg_temp_new_i64();
9763 gen_load_fpr64(ctx
, fp0
, fs
);
9764 gen_store_fpr64(ctx
, fp0
, fd
);
9765 tcg_temp_free_i64(fp0
);
9771 TCGv_i64 fp0
= tcg_temp_new_i64();
9773 gen_load_fpr64(ctx
, fp0
, fs
);
9774 gen_helper_float_chs_ps(fp0
, fp0
);
9775 gen_store_fpr64(ctx
, fp0
, fd
);
9776 tcg_temp_free_i64(fp0
);
9781 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9786 TCGLabel
*l1
= gen_new_label();
9790 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9791 fp0
= tcg_temp_new_i64();
9792 gen_load_fpr64(ctx
, fp0
, fs
);
9793 gen_store_fpr64(ctx
, fp0
, fd
);
9794 tcg_temp_free_i64(fp0
);
9801 TCGLabel
*l1
= gen_new_label();
9805 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9806 fp0
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp0
, fs
);
9808 gen_store_fpr64(ctx
, fp0
, fd
);
9809 tcg_temp_free_i64(fp0
);
9817 TCGv_i64 fp0
= tcg_temp_new_i64();
9818 TCGv_i64 fp1
= tcg_temp_new_i64();
9820 gen_load_fpr64(ctx
, fp0
, ft
);
9821 gen_load_fpr64(ctx
, fp1
, fs
);
9822 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9823 tcg_temp_free_i64(fp1
);
9824 gen_store_fpr64(ctx
, fp0
, fd
);
9825 tcg_temp_free_i64(fp0
);
9831 TCGv_i64 fp0
= tcg_temp_new_i64();
9832 TCGv_i64 fp1
= tcg_temp_new_i64();
9834 gen_load_fpr64(ctx
, fp0
, ft
);
9835 gen_load_fpr64(ctx
, fp1
, fs
);
9836 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9837 tcg_temp_free_i64(fp1
);
9838 gen_store_fpr64(ctx
, fp0
, fd
);
9839 tcg_temp_free_i64(fp0
);
9845 TCGv_i64 fp0
= tcg_temp_new_i64();
9846 TCGv_i64 fp1
= tcg_temp_new_i64();
9848 gen_load_fpr64(ctx
, fp0
, fs
);
9849 gen_load_fpr64(ctx
, fp1
, ft
);
9850 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9851 tcg_temp_free_i64(fp1
);
9852 gen_store_fpr64(ctx
, fp0
, fd
);
9853 tcg_temp_free_i64(fp0
);
9859 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp0
, fs
);
9862 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9863 gen_store_fpr64(ctx
, fp0
, fd
);
9864 tcg_temp_free_i64(fp0
);
9870 TCGv_i64 fp0
= tcg_temp_new_i64();
9872 gen_load_fpr64(ctx
, fp0
, fs
);
9873 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9874 gen_store_fpr64(ctx
, fp0
, fd
);
9875 tcg_temp_free_i64(fp0
);
9881 TCGv_i64 fp0
= tcg_temp_new_i64();
9882 TCGv_i64 fp1
= tcg_temp_new_i64();
9884 gen_load_fpr64(ctx
, fp0
, fs
);
9885 gen_load_fpr64(ctx
, fp1
, ft
);
9886 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9887 tcg_temp_free_i64(fp1
);
9888 gen_store_fpr64(ctx
, fp0
, fd
);
9889 tcg_temp_free_i64(fp0
);
9893 check_cp1_64bitmode(ctx
);
9895 TCGv_i32 fp0
= tcg_temp_new_i32();
9897 gen_load_fpr32h(ctx
, fp0
, fs
);
9898 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9899 gen_store_fpr32(ctx
, fp0
, fd
);
9900 tcg_temp_free_i32(fp0
);
9906 TCGv_i64 fp0
= tcg_temp_new_i64();
9908 gen_load_fpr64(ctx
, fp0
, fs
);
9909 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9910 gen_store_fpr64(ctx
, fp0
, fd
);
9911 tcg_temp_free_i64(fp0
);
9915 check_cp1_64bitmode(ctx
);
9917 TCGv_i32 fp0
= tcg_temp_new_i32();
9919 gen_load_fpr32(ctx
, fp0
, fs
);
9920 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9921 gen_store_fpr32(ctx
, fp0
, fd
);
9922 tcg_temp_free_i32(fp0
);
9928 TCGv_i32 fp0
= tcg_temp_new_i32();
9929 TCGv_i32 fp1
= tcg_temp_new_i32();
9931 gen_load_fpr32(ctx
, fp0
, fs
);
9932 gen_load_fpr32(ctx
, fp1
, ft
);
9933 gen_store_fpr32h(ctx
, fp0
, fd
);
9934 gen_store_fpr32(ctx
, fp1
, fd
);
9935 tcg_temp_free_i32(fp0
);
9936 tcg_temp_free_i32(fp1
);
9942 TCGv_i32 fp0
= tcg_temp_new_i32();
9943 TCGv_i32 fp1
= tcg_temp_new_i32();
9945 gen_load_fpr32(ctx
, fp0
, fs
);
9946 gen_load_fpr32h(ctx
, fp1
, ft
);
9947 gen_store_fpr32(ctx
, fp1
, fd
);
9948 gen_store_fpr32h(ctx
, fp0
, fd
);
9949 tcg_temp_free_i32(fp0
);
9950 tcg_temp_free_i32(fp1
);
9956 TCGv_i32 fp0
= tcg_temp_new_i32();
9957 TCGv_i32 fp1
= tcg_temp_new_i32();
9959 gen_load_fpr32h(ctx
, fp0
, fs
);
9960 gen_load_fpr32(ctx
, fp1
, ft
);
9961 gen_store_fpr32(ctx
, fp1
, fd
);
9962 gen_store_fpr32h(ctx
, fp0
, fd
);
9963 tcg_temp_free_i32(fp0
);
9964 tcg_temp_free_i32(fp1
);
9970 TCGv_i32 fp0
= tcg_temp_new_i32();
9971 TCGv_i32 fp1
= tcg_temp_new_i32();
9973 gen_load_fpr32h(ctx
, fp0
, fs
);
9974 gen_load_fpr32h(ctx
, fp1
, ft
);
9975 gen_store_fpr32(ctx
, fp1
, fd
);
9976 gen_store_fpr32h(ctx
, fp0
, fd
);
9977 tcg_temp_free_i32(fp0
);
9978 tcg_temp_free_i32(fp1
);
9984 case OPC_CMP_UEQ_PS
:
9985 case OPC_CMP_OLT_PS
:
9986 case OPC_CMP_ULT_PS
:
9987 case OPC_CMP_OLE_PS
:
9988 case OPC_CMP_ULE_PS
:
9990 case OPC_CMP_NGLE_PS
:
9991 case OPC_CMP_SEQ_PS
:
9992 case OPC_CMP_NGL_PS
:
9994 case OPC_CMP_NGE_PS
:
9996 case OPC_CMP_NGT_PS
:
9997 if (ctx
->opcode
& (1 << 6)) {
9998 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10000 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10004 MIPS_INVAL("farith");
10005 generate_exception (ctx
, EXCP_RI
);
10010 /* Coprocessor 3 (FPU) */
10011 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10012 int fd
, int fs
, int base
, int index
)
10014 TCGv t0
= tcg_temp_new();
10017 gen_load_gpr(t0
, index
);
10018 } else if (index
== 0) {
10019 gen_load_gpr(t0
, base
);
10021 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10023 /* Don't do NOP if destination is zero: we must perform the actual
10029 TCGv_i32 fp0
= tcg_temp_new_i32();
10031 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10032 tcg_gen_trunc_tl_i32(fp0
, t0
);
10033 gen_store_fpr32(ctx
, fp0
, fd
);
10034 tcg_temp_free_i32(fp0
);
10039 check_cp1_registers(ctx
, fd
);
10041 TCGv_i64 fp0
= tcg_temp_new_i64();
10042 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10043 gen_store_fpr64(ctx
, fp0
, fd
);
10044 tcg_temp_free_i64(fp0
);
10048 check_cp1_64bitmode(ctx
);
10049 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10051 TCGv_i64 fp0
= tcg_temp_new_i64();
10053 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10054 gen_store_fpr64(ctx
, fp0
, fd
);
10055 tcg_temp_free_i64(fp0
);
10061 TCGv_i32 fp0
= tcg_temp_new_i32();
10062 gen_load_fpr32(ctx
, fp0
, fs
);
10063 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10064 tcg_temp_free_i32(fp0
);
10069 check_cp1_registers(ctx
, fs
);
10071 TCGv_i64 fp0
= tcg_temp_new_i64();
10072 gen_load_fpr64(ctx
, fp0
, fs
);
10073 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10074 tcg_temp_free_i64(fp0
);
10078 check_cp1_64bitmode(ctx
);
10079 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10081 TCGv_i64 fp0
= tcg_temp_new_i64();
10082 gen_load_fpr64(ctx
, fp0
, fs
);
10083 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10084 tcg_temp_free_i64(fp0
);
10091 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10092 int fd
, int fr
, int fs
, int ft
)
10098 TCGv t0
= tcg_temp_local_new();
10099 TCGv_i32 fp
= tcg_temp_new_i32();
10100 TCGv_i32 fph
= tcg_temp_new_i32();
10101 TCGLabel
*l1
= gen_new_label();
10102 TCGLabel
*l2
= gen_new_label();
10104 gen_load_gpr(t0
, fr
);
10105 tcg_gen_andi_tl(t0
, t0
, 0x7);
10107 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10108 gen_load_fpr32(ctx
, fp
, fs
);
10109 gen_load_fpr32h(ctx
, fph
, fs
);
10110 gen_store_fpr32(ctx
, fp
, fd
);
10111 gen_store_fpr32h(ctx
, fph
, fd
);
10114 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10116 #ifdef TARGET_WORDS_BIGENDIAN
10117 gen_load_fpr32(ctx
, fp
, fs
);
10118 gen_load_fpr32h(ctx
, fph
, ft
);
10119 gen_store_fpr32h(ctx
, fp
, fd
);
10120 gen_store_fpr32(ctx
, fph
, fd
);
10122 gen_load_fpr32h(ctx
, fph
, fs
);
10123 gen_load_fpr32(ctx
, fp
, ft
);
10124 gen_store_fpr32(ctx
, fph
, fd
);
10125 gen_store_fpr32h(ctx
, fp
, fd
);
10128 tcg_temp_free_i32(fp
);
10129 tcg_temp_free_i32(fph
);
10135 TCGv_i32 fp0
= tcg_temp_new_i32();
10136 TCGv_i32 fp1
= tcg_temp_new_i32();
10137 TCGv_i32 fp2
= tcg_temp_new_i32();
10139 gen_load_fpr32(ctx
, fp0
, fs
);
10140 gen_load_fpr32(ctx
, fp1
, ft
);
10141 gen_load_fpr32(ctx
, fp2
, fr
);
10142 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10143 tcg_temp_free_i32(fp0
);
10144 tcg_temp_free_i32(fp1
);
10145 gen_store_fpr32(ctx
, fp2
, fd
);
10146 tcg_temp_free_i32(fp2
);
10151 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10153 TCGv_i64 fp0
= tcg_temp_new_i64();
10154 TCGv_i64 fp1
= tcg_temp_new_i64();
10155 TCGv_i64 fp2
= tcg_temp_new_i64();
10157 gen_load_fpr64(ctx
, fp0
, fs
);
10158 gen_load_fpr64(ctx
, fp1
, ft
);
10159 gen_load_fpr64(ctx
, fp2
, fr
);
10160 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10161 tcg_temp_free_i64(fp0
);
10162 tcg_temp_free_i64(fp1
);
10163 gen_store_fpr64(ctx
, fp2
, fd
);
10164 tcg_temp_free_i64(fp2
);
10170 TCGv_i64 fp0
= tcg_temp_new_i64();
10171 TCGv_i64 fp1
= tcg_temp_new_i64();
10172 TCGv_i64 fp2
= tcg_temp_new_i64();
10174 gen_load_fpr64(ctx
, fp0
, fs
);
10175 gen_load_fpr64(ctx
, fp1
, ft
);
10176 gen_load_fpr64(ctx
, fp2
, fr
);
10177 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10178 tcg_temp_free_i64(fp0
);
10179 tcg_temp_free_i64(fp1
);
10180 gen_store_fpr64(ctx
, fp2
, fd
);
10181 tcg_temp_free_i64(fp2
);
10187 TCGv_i32 fp0
= tcg_temp_new_i32();
10188 TCGv_i32 fp1
= tcg_temp_new_i32();
10189 TCGv_i32 fp2
= tcg_temp_new_i32();
10191 gen_load_fpr32(ctx
, fp0
, fs
);
10192 gen_load_fpr32(ctx
, fp1
, ft
);
10193 gen_load_fpr32(ctx
, fp2
, fr
);
10194 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10195 tcg_temp_free_i32(fp0
);
10196 tcg_temp_free_i32(fp1
);
10197 gen_store_fpr32(ctx
, fp2
, fd
);
10198 tcg_temp_free_i32(fp2
);
10203 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10205 TCGv_i64 fp0
= tcg_temp_new_i64();
10206 TCGv_i64 fp1
= tcg_temp_new_i64();
10207 TCGv_i64 fp2
= tcg_temp_new_i64();
10209 gen_load_fpr64(ctx
, fp0
, fs
);
10210 gen_load_fpr64(ctx
, fp1
, ft
);
10211 gen_load_fpr64(ctx
, fp2
, fr
);
10212 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10213 tcg_temp_free_i64(fp0
);
10214 tcg_temp_free_i64(fp1
);
10215 gen_store_fpr64(ctx
, fp2
, fd
);
10216 tcg_temp_free_i64(fp2
);
10222 TCGv_i64 fp0
= tcg_temp_new_i64();
10223 TCGv_i64 fp1
= tcg_temp_new_i64();
10224 TCGv_i64 fp2
= tcg_temp_new_i64();
10226 gen_load_fpr64(ctx
, fp0
, fs
);
10227 gen_load_fpr64(ctx
, fp1
, ft
);
10228 gen_load_fpr64(ctx
, fp2
, fr
);
10229 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10230 tcg_temp_free_i64(fp0
);
10231 tcg_temp_free_i64(fp1
);
10232 gen_store_fpr64(ctx
, fp2
, fd
);
10233 tcg_temp_free_i64(fp2
);
10239 TCGv_i32 fp0
= tcg_temp_new_i32();
10240 TCGv_i32 fp1
= tcg_temp_new_i32();
10241 TCGv_i32 fp2
= tcg_temp_new_i32();
10243 gen_load_fpr32(ctx
, fp0
, fs
);
10244 gen_load_fpr32(ctx
, fp1
, ft
);
10245 gen_load_fpr32(ctx
, fp2
, fr
);
10246 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10247 tcg_temp_free_i32(fp0
);
10248 tcg_temp_free_i32(fp1
);
10249 gen_store_fpr32(ctx
, fp2
, fd
);
10250 tcg_temp_free_i32(fp2
);
10255 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10257 TCGv_i64 fp0
= tcg_temp_new_i64();
10258 TCGv_i64 fp1
= tcg_temp_new_i64();
10259 TCGv_i64 fp2
= tcg_temp_new_i64();
10261 gen_load_fpr64(ctx
, fp0
, fs
);
10262 gen_load_fpr64(ctx
, fp1
, ft
);
10263 gen_load_fpr64(ctx
, fp2
, fr
);
10264 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10265 tcg_temp_free_i64(fp0
);
10266 tcg_temp_free_i64(fp1
);
10267 gen_store_fpr64(ctx
, fp2
, fd
);
10268 tcg_temp_free_i64(fp2
);
10274 TCGv_i64 fp0
= tcg_temp_new_i64();
10275 TCGv_i64 fp1
= tcg_temp_new_i64();
10276 TCGv_i64 fp2
= tcg_temp_new_i64();
10278 gen_load_fpr64(ctx
, fp0
, fs
);
10279 gen_load_fpr64(ctx
, fp1
, ft
);
10280 gen_load_fpr64(ctx
, fp2
, fr
);
10281 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10282 tcg_temp_free_i64(fp0
);
10283 tcg_temp_free_i64(fp1
);
10284 gen_store_fpr64(ctx
, fp2
, fd
);
10285 tcg_temp_free_i64(fp2
);
10291 TCGv_i32 fp0
= tcg_temp_new_i32();
10292 TCGv_i32 fp1
= tcg_temp_new_i32();
10293 TCGv_i32 fp2
= tcg_temp_new_i32();
10295 gen_load_fpr32(ctx
, fp0
, fs
);
10296 gen_load_fpr32(ctx
, fp1
, ft
);
10297 gen_load_fpr32(ctx
, fp2
, fr
);
10298 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10299 tcg_temp_free_i32(fp0
);
10300 tcg_temp_free_i32(fp1
);
10301 gen_store_fpr32(ctx
, fp2
, fd
);
10302 tcg_temp_free_i32(fp2
);
10307 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10309 TCGv_i64 fp0
= tcg_temp_new_i64();
10310 TCGv_i64 fp1
= tcg_temp_new_i64();
10311 TCGv_i64 fp2
= tcg_temp_new_i64();
10313 gen_load_fpr64(ctx
, fp0
, fs
);
10314 gen_load_fpr64(ctx
, fp1
, ft
);
10315 gen_load_fpr64(ctx
, fp2
, fr
);
10316 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10317 tcg_temp_free_i64(fp0
);
10318 tcg_temp_free_i64(fp1
);
10319 gen_store_fpr64(ctx
, fp2
, fd
);
10320 tcg_temp_free_i64(fp2
);
10326 TCGv_i64 fp0
= tcg_temp_new_i64();
10327 TCGv_i64 fp1
= tcg_temp_new_i64();
10328 TCGv_i64 fp2
= tcg_temp_new_i64();
10330 gen_load_fpr64(ctx
, fp0
, fs
);
10331 gen_load_fpr64(ctx
, fp1
, ft
);
10332 gen_load_fpr64(ctx
, fp2
, fr
);
10333 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10334 tcg_temp_free_i64(fp0
);
10335 tcg_temp_free_i64(fp1
);
10336 gen_store_fpr64(ctx
, fp2
, fd
);
10337 tcg_temp_free_i64(fp2
);
10341 MIPS_INVAL("flt3_arith");
10342 generate_exception (ctx
, EXCP_RI
);
10347 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
)
10351 #if !defined(CONFIG_USER_ONLY)
10352 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10353 Therefore only check the ISA in system mode. */
10354 check_insn(ctx
, ISA_MIPS32R2
);
10356 t0
= tcg_temp_new();
10360 save_cpu_state(ctx
, 1);
10361 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10362 gen_store_gpr(t0
, rt
);
10365 save_cpu_state(ctx
, 1);
10366 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10367 gen_store_gpr(t0
, rt
);
10370 save_cpu_state(ctx
, 1);
10371 gen_helper_rdhwr_cc(t0
, cpu_env
);
10372 gen_store_gpr(t0
, rt
);
10375 save_cpu_state(ctx
, 1);
10376 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10377 gen_store_gpr(t0
, rt
);
10380 #if defined(CONFIG_USER_ONLY)
10381 tcg_gen_ld_tl(t0
, cpu_env
,
10382 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10383 gen_store_gpr(t0
, rt
);
10386 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10387 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10388 tcg_gen_ld_tl(t0
, cpu_env
,
10389 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10390 gen_store_gpr(t0
, rt
);
10392 generate_exception(ctx
, EXCP_RI
);
10396 default: /* Invalid */
10397 MIPS_INVAL("rdhwr");
10398 generate_exception(ctx
, EXCP_RI
);
10404 static inline void clear_branch_hflags(DisasContext
*ctx
)
10406 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10407 if (ctx
->bstate
== BS_NONE
) {
10408 save_cpu_state(ctx
, 0);
10410 /* it is not safe to save ctx->hflags as hflags may be changed
10411 in execution time by the instruction in delay / forbidden slot. */
10412 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10416 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10418 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10419 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10420 /* Branches completion */
10421 clear_branch_hflags(ctx
);
10422 ctx
->bstate
= BS_BRANCH
;
10423 /* FIXME: Need to clear can_do_io. */
10424 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10425 case MIPS_HFLAG_FBNSLOT
:
10426 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10429 /* unconditional branch */
10430 if (proc_hflags
& MIPS_HFLAG_BX
) {
10431 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10433 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10435 case MIPS_HFLAG_BL
:
10436 /* blikely taken case */
10437 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10439 case MIPS_HFLAG_BC
:
10440 /* Conditional branch */
10442 TCGLabel
*l1
= gen_new_label();
10444 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10445 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10447 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10450 case MIPS_HFLAG_BR
:
10451 /* unconditional branch to register */
10452 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10453 TCGv t0
= tcg_temp_new();
10454 TCGv_i32 t1
= tcg_temp_new_i32();
10456 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10457 tcg_gen_trunc_tl_i32(t1
, t0
);
10459 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10460 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10461 tcg_gen_or_i32(hflags
, hflags
, t1
);
10462 tcg_temp_free_i32(t1
);
10464 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10466 tcg_gen_mov_tl(cpu_PC
, btarget
);
10468 if (ctx
->singlestep_enabled
) {
10469 save_cpu_state(ctx
, 0);
10470 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
10472 tcg_gen_exit_tb(0);
10475 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10481 /* Compact Branches */
10482 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10483 int rs
, int rt
, int32_t offset
)
10485 int bcond_compute
= 0;
10486 TCGv t0
= tcg_temp_new();
10487 TCGv t1
= tcg_temp_new();
10488 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10490 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10491 #ifdef MIPS_DEBUG_DISAS
10492 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10495 generate_exception(ctx
, EXCP_RI
);
10499 /* Load needed operands and calculate btarget */
10501 /* compact branch */
10502 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10503 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10504 gen_load_gpr(t0
, rs
);
10505 gen_load_gpr(t1
, rt
);
10507 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10508 if (rs
<= rt
&& rs
== 0) {
10509 /* OPC_BEQZALC, OPC_BNEZALC */
10510 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10513 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10514 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10515 gen_load_gpr(t0
, rs
);
10516 gen_load_gpr(t1
, rt
);
10518 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10520 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10521 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10522 if (rs
== 0 || rs
== rt
) {
10523 /* OPC_BLEZALC, OPC_BGEZALC */
10524 /* OPC_BGTZALC, OPC_BLTZALC */
10525 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10527 gen_load_gpr(t0
, rs
);
10528 gen_load_gpr(t1
, rt
);
10530 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10534 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10539 /* OPC_BEQZC, OPC_BNEZC */
10540 gen_load_gpr(t0
, rs
);
10542 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10544 /* OPC_JIC, OPC_JIALC */
10545 TCGv tbase
= tcg_temp_new();
10546 TCGv toffset
= tcg_temp_new();
10548 gen_load_gpr(tbase
, rt
);
10549 tcg_gen_movi_tl(toffset
, offset
);
10550 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10551 tcg_temp_free(tbase
);
10552 tcg_temp_free(toffset
);
10556 MIPS_INVAL("Compact branch/jump");
10557 generate_exception(ctx
, EXCP_RI
);
10561 if (bcond_compute
== 0) {
10562 /* Uncoditional compact branch */
10565 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10568 ctx
->hflags
|= MIPS_HFLAG_BR
;
10571 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10574 ctx
->hflags
|= MIPS_HFLAG_B
;
10577 MIPS_INVAL("Compact branch/jump");
10578 generate_exception(ctx
, EXCP_RI
);
10582 /* Generating branch here as compact branches don't have delay slot */
10583 gen_branch(ctx
, 4);
10585 /* Conditional compact branch */
10586 TCGLabel
*fs
= gen_new_label();
10587 save_cpu_state(ctx
, 0);
10590 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10591 if (rs
== 0 && rt
!= 0) {
10593 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10594 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10596 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10599 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10602 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10603 if (rs
== 0 && rt
!= 0) {
10605 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10606 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10608 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10611 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10614 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10615 if (rs
== 0 && rt
!= 0) {
10617 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10618 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10620 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10623 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10626 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10627 if (rs
== 0 && rt
!= 0) {
10629 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10630 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10632 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10635 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10638 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10639 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10641 /* OPC_BOVC, OPC_BNVC */
10642 TCGv t2
= tcg_temp_new();
10643 TCGv t3
= tcg_temp_new();
10644 TCGv t4
= tcg_temp_new();
10645 TCGv input_overflow
= tcg_temp_new();
10647 gen_load_gpr(t0
, rs
);
10648 gen_load_gpr(t1
, rt
);
10649 tcg_gen_ext32s_tl(t2
, t0
);
10650 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10651 tcg_gen_ext32s_tl(t3
, t1
);
10652 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10653 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10655 tcg_gen_add_tl(t4
, t2
, t3
);
10656 tcg_gen_ext32s_tl(t4
, t4
);
10657 tcg_gen_xor_tl(t2
, t2
, t3
);
10658 tcg_gen_xor_tl(t3
, t4
, t3
);
10659 tcg_gen_andc_tl(t2
, t3
, t2
);
10660 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10661 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10662 if (opc
== OPC_BOVC
) {
10664 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10667 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10669 tcg_temp_free(input_overflow
);
10673 } else if (rs
< rt
&& rs
== 0) {
10674 /* OPC_BEQZALC, OPC_BNEZALC */
10675 if (opc
== OPC_BEQZALC
) {
10677 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10680 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10683 /* OPC_BEQC, OPC_BNEC */
10684 if (opc
== OPC_BEQC
) {
10686 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10689 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10694 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10697 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10700 MIPS_INVAL("Compact conditional branch/jump");
10701 generate_exception(ctx
, EXCP_RI
);
10705 /* Generating branch here as compact branches don't have delay slot */
10706 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10709 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10717 /* ISA extensions (ASEs) */
10718 /* MIPS16 extension to MIPS32 */
10720 /* MIPS16 major opcodes */
10722 M16_OPC_ADDIUSP
= 0x00,
10723 M16_OPC_ADDIUPC
= 0x01,
10725 M16_OPC_JAL
= 0x03,
10726 M16_OPC_BEQZ
= 0x04,
10727 M16_OPC_BNEQZ
= 0x05,
10728 M16_OPC_SHIFT
= 0x06,
10730 M16_OPC_RRIA
= 0x08,
10731 M16_OPC_ADDIU8
= 0x09,
10732 M16_OPC_SLTI
= 0x0a,
10733 M16_OPC_SLTIU
= 0x0b,
10736 M16_OPC_CMPI
= 0x0e,
10740 M16_OPC_LWSP
= 0x12,
10742 M16_OPC_LBU
= 0x14,
10743 M16_OPC_LHU
= 0x15,
10744 M16_OPC_LWPC
= 0x16,
10745 M16_OPC_LWU
= 0x17,
10748 M16_OPC_SWSP
= 0x1a,
10750 M16_OPC_RRR
= 0x1c,
10752 M16_OPC_EXTEND
= 0x1e,
10756 /* I8 funct field */
10775 /* RR funct field */
10809 /* I64 funct field */
10817 I64_DADDIUPC
= 0x6,
10821 /* RR ry field for CNVT */
10823 RR_RY_CNVT_ZEB
= 0x0,
10824 RR_RY_CNVT_ZEH
= 0x1,
10825 RR_RY_CNVT_ZEW
= 0x2,
10826 RR_RY_CNVT_SEB
= 0x4,
10827 RR_RY_CNVT_SEH
= 0x5,
10828 RR_RY_CNVT_SEW
= 0x6,
10831 static int xlat (int r
)
10833 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10838 static void gen_mips16_save (DisasContext
*ctx
,
10839 int xsregs
, int aregs
,
10840 int do_ra
, int do_s0
, int do_s1
,
10843 TCGv t0
= tcg_temp_new();
10844 TCGv t1
= tcg_temp_new();
10845 TCGv t2
= tcg_temp_new();
10875 generate_exception(ctx
, EXCP_RI
);
10881 gen_base_offset_addr(ctx
, t0
, 29, 12);
10882 gen_load_gpr(t1
, 7);
10883 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10886 gen_base_offset_addr(ctx
, t0
, 29, 8);
10887 gen_load_gpr(t1
, 6);
10888 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10891 gen_base_offset_addr(ctx
, t0
, 29, 4);
10892 gen_load_gpr(t1
, 5);
10893 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10896 gen_base_offset_addr(ctx
, t0
, 29, 0);
10897 gen_load_gpr(t1
, 4);
10898 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10901 gen_load_gpr(t0
, 29);
10903 #define DECR_AND_STORE(reg) do { \
10904 tcg_gen_movi_tl(t2, -4); \
10905 gen_op_addr_add(ctx, t0, t0, t2); \
10906 gen_load_gpr(t1, reg); \
10907 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10911 DECR_AND_STORE(31);
10916 DECR_AND_STORE(30);
10919 DECR_AND_STORE(23);
10922 DECR_AND_STORE(22);
10925 DECR_AND_STORE(21);
10928 DECR_AND_STORE(20);
10931 DECR_AND_STORE(19);
10934 DECR_AND_STORE(18);
10938 DECR_AND_STORE(17);
10941 DECR_AND_STORE(16);
10971 generate_exception(ctx
, EXCP_RI
);
10987 #undef DECR_AND_STORE
10989 tcg_gen_movi_tl(t2
, -framesize
);
10990 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
10996 static void gen_mips16_restore (DisasContext
*ctx
,
10997 int xsregs
, int aregs
,
10998 int do_ra
, int do_s0
, int do_s1
,
11002 TCGv t0
= tcg_temp_new();
11003 TCGv t1
= tcg_temp_new();
11004 TCGv t2
= tcg_temp_new();
11006 tcg_gen_movi_tl(t2
, framesize
);
11007 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11009 #define DECR_AND_LOAD(reg) do { \
11010 tcg_gen_movi_tl(t2, -4); \
11011 gen_op_addr_add(ctx, t0, t0, t2); \
11012 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11013 gen_store_gpr(t1, reg); \
11077 generate_exception(ctx
, EXCP_RI
);
11093 #undef DECR_AND_LOAD
11095 tcg_gen_movi_tl(t2
, framesize
);
11096 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11102 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11103 int is_64_bit
, int extended
)
11107 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11108 generate_exception(ctx
, EXCP_RI
);
11112 t0
= tcg_temp_new();
11114 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11115 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11117 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11123 #if defined(TARGET_MIPS64)
11124 static void decode_i64_mips16 (DisasContext
*ctx
,
11125 int ry
, int funct
, int16_t offset
,
11130 check_insn(ctx
, ISA_MIPS3
);
11131 check_mips_64(ctx
);
11132 offset
= extended
? offset
: offset
<< 3;
11133 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11136 check_insn(ctx
, ISA_MIPS3
);
11137 check_mips_64(ctx
);
11138 offset
= extended
? offset
: offset
<< 3;
11139 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11142 check_insn(ctx
, ISA_MIPS3
);
11143 check_mips_64(ctx
);
11144 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11145 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11148 check_insn(ctx
, ISA_MIPS3
);
11149 check_mips_64(ctx
);
11150 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11151 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11154 check_insn(ctx
, ISA_MIPS3
);
11155 check_mips_64(ctx
);
11156 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11157 generate_exception(ctx
, EXCP_RI
);
11159 offset
= extended
? offset
: offset
<< 3;
11160 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11164 check_insn(ctx
, ISA_MIPS3
);
11165 check_mips_64(ctx
);
11166 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11167 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11170 check_insn(ctx
, ISA_MIPS3
);
11171 check_mips_64(ctx
);
11172 offset
= extended
? offset
: offset
<< 2;
11173 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11176 check_insn(ctx
, ISA_MIPS3
);
11177 check_mips_64(ctx
);
11178 offset
= extended
? offset
: offset
<< 2;
11179 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11185 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11187 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11188 int op
, rx
, ry
, funct
, sa
;
11189 int16_t imm
, offset
;
11191 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11192 op
= (ctx
->opcode
>> 11) & 0x1f;
11193 sa
= (ctx
->opcode
>> 22) & 0x1f;
11194 funct
= (ctx
->opcode
>> 8) & 0x7;
11195 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11196 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11197 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11198 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11199 | (ctx
->opcode
& 0x1f));
11201 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11204 case M16_OPC_ADDIUSP
:
11205 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11207 case M16_OPC_ADDIUPC
:
11208 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11211 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11212 /* No delay slot, so just process as a normal instruction */
11215 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11216 /* No delay slot, so just process as a normal instruction */
11218 case M16_OPC_BNEQZ
:
11219 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11220 /* No delay slot, so just process as a normal instruction */
11222 case M16_OPC_SHIFT
:
11223 switch (ctx
->opcode
& 0x3) {
11225 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11228 #if defined(TARGET_MIPS64)
11229 check_mips_64(ctx
);
11230 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11232 generate_exception(ctx
, EXCP_RI
);
11236 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11239 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11243 #if defined(TARGET_MIPS64)
11245 check_insn(ctx
, ISA_MIPS3
);
11246 check_mips_64(ctx
);
11247 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11251 imm
= ctx
->opcode
& 0xf;
11252 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11253 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11254 imm
= (int16_t) (imm
<< 1) >> 1;
11255 if ((ctx
->opcode
>> 4) & 0x1) {
11256 #if defined(TARGET_MIPS64)
11257 check_mips_64(ctx
);
11258 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11260 generate_exception(ctx
, EXCP_RI
);
11263 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11266 case M16_OPC_ADDIU8
:
11267 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11270 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11272 case M16_OPC_SLTIU
:
11273 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11278 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11281 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11284 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11287 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11290 check_insn(ctx
, ISA_MIPS32
);
11292 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11293 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11294 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11295 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11296 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11297 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11298 | (ctx
->opcode
& 0xf)) << 3;
11300 if (ctx
->opcode
& (1 << 7)) {
11301 gen_mips16_save(ctx
, xsregs
, aregs
,
11302 do_ra
, do_s0
, do_s1
,
11305 gen_mips16_restore(ctx
, xsregs
, aregs
,
11306 do_ra
, do_s0
, do_s1
,
11312 generate_exception(ctx
, EXCP_RI
);
11317 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11320 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11322 #if defined(TARGET_MIPS64)
11324 check_insn(ctx
, ISA_MIPS3
);
11325 check_mips_64(ctx
);
11326 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11330 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11333 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11336 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11339 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11342 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11345 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11348 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11350 #if defined(TARGET_MIPS64)
11352 check_insn(ctx
, ISA_MIPS3
);
11353 check_mips_64(ctx
);
11354 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11358 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11361 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11364 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11367 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11369 #if defined(TARGET_MIPS64)
11371 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11375 generate_exception(ctx
, EXCP_RI
);
11382 static inline bool is_uhi(int sdbbp_code
)
11384 #ifdef CONFIG_USER_ONLY
11387 return semihosting_enabled() && sdbbp_code
== 1;
11391 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11395 int op
, cnvt_op
, op1
, offset
;
11399 op
= (ctx
->opcode
>> 11) & 0x1f;
11400 sa
= (ctx
->opcode
>> 2) & 0x7;
11401 sa
= sa
== 0 ? 8 : sa
;
11402 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11403 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11404 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11405 op1
= offset
= ctx
->opcode
& 0x1f;
11410 case M16_OPC_ADDIUSP
:
11412 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11414 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11417 case M16_OPC_ADDIUPC
:
11418 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11421 offset
= (ctx
->opcode
& 0x7ff) << 1;
11422 offset
= (int16_t)(offset
<< 4) >> 4;
11423 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11424 /* No delay slot, so just process as a normal instruction */
11427 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11428 offset
= (((ctx
->opcode
& 0x1f) << 21)
11429 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11431 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11432 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11436 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11437 ((int8_t)ctx
->opcode
) << 1, 0);
11438 /* No delay slot, so just process as a normal instruction */
11440 case M16_OPC_BNEQZ
:
11441 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11442 ((int8_t)ctx
->opcode
) << 1, 0);
11443 /* No delay slot, so just process as a normal instruction */
11445 case M16_OPC_SHIFT
:
11446 switch (ctx
->opcode
& 0x3) {
11448 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11451 #if defined(TARGET_MIPS64)
11452 check_insn(ctx
, ISA_MIPS3
);
11453 check_mips_64(ctx
);
11454 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11456 generate_exception(ctx
, EXCP_RI
);
11460 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11463 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11467 #if defined(TARGET_MIPS64)
11469 check_insn(ctx
, ISA_MIPS3
);
11470 check_mips_64(ctx
);
11471 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11476 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11478 if ((ctx
->opcode
>> 4) & 1) {
11479 #if defined(TARGET_MIPS64)
11480 check_insn(ctx
, ISA_MIPS3
);
11481 check_mips_64(ctx
);
11482 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11484 generate_exception(ctx
, EXCP_RI
);
11487 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11491 case M16_OPC_ADDIU8
:
11493 int16_t imm
= (int8_t) ctx
->opcode
;
11495 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11500 int16_t imm
= (uint8_t) ctx
->opcode
;
11501 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11504 case M16_OPC_SLTIU
:
11506 int16_t imm
= (uint8_t) ctx
->opcode
;
11507 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11514 funct
= (ctx
->opcode
>> 8) & 0x7;
11517 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11518 ((int8_t)ctx
->opcode
) << 1, 0);
11521 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11522 ((int8_t)ctx
->opcode
) << 1, 0);
11525 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11528 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11529 ((int8_t)ctx
->opcode
) << 3);
11532 check_insn(ctx
, ISA_MIPS32
);
11534 int do_ra
= ctx
->opcode
& (1 << 6);
11535 int do_s0
= ctx
->opcode
& (1 << 5);
11536 int do_s1
= ctx
->opcode
& (1 << 4);
11537 int framesize
= ctx
->opcode
& 0xf;
11539 if (framesize
== 0) {
11542 framesize
= framesize
<< 3;
11545 if (ctx
->opcode
& (1 << 7)) {
11546 gen_mips16_save(ctx
, 0, 0,
11547 do_ra
, do_s0
, do_s1
, framesize
);
11549 gen_mips16_restore(ctx
, 0, 0,
11550 do_ra
, do_s0
, do_s1
, framesize
);
11556 int rz
= xlat(ctx
->opcode
& 0x7);
11558 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11559 ((ctx
->opcode
>> 5) & 0x7);
11560 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11564 reg32
= ctx
->opcode
& 0x1f;
11565 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11568 generate_exception(ctx
, EXCP_RI
);
11575 int16_t imm
= (uint8_t) ctx
->opcode
;
11577 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11582 int16_t imm
= (uint8_t) ctx
->opcode
;
11583 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11586 #if defined(TARGET_MIPS64)
11588 check_insn(ctx
, ISA_MIPS3
);
11589 check_mips_64(ctx
);
11590 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11594 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11597 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11600 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11603 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11606 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11609 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11612 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11614 #if defined (TARGET_MIPS64)
11616 check_insn(ctx
, ISA_MIPS3
);
11617 check_mips_64(ctx
);
11618 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11622 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11625 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11628 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11631 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11635 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11638 switch (ctx
->opcode
& 0x3) {
11640 mips32_op
= OPC_ADDU
;
11643 mips32_op
= OPC_SUBU
;
11645 #if defined(TARGET_MIPS64)
11647 mips32_op
= OPC_DADDU
;
11648 check_insn(ctx
, ISA_MIPS3
);
11649 check_mips_64(ctx
);
11652 mips32_op
= OPC_DSUBU
;
11653 check_insn(ctx
, ISA_MIPS3
);
11654 check_mips_64(ctx
);
11658 generate_exception(ctx
, EXCP_RI
);
11662 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11671 int nd
= (ctx
->opcode
>> 7) & 0x1;
11672 int link
= (ctx
->opcode
>> 6) & 0x1;
11673 int ra
= (ctx
->opcode
>> 5) & 0x1;
11676 check_insn(ctx
, ISA_MIPS32
);
11685 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11690 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11691 gen_helper_do_semihosting(cpu_env
);
11693 /* XXX: not clear which exception should be raised
11694 * when in debug mode...
11696 check_insn(ctx
, ISA_MIPS32
);
11697 generate_exception(ctx
, EXCP_DBp
);
11701 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11704 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11707 generate_exception(ctx
, EXCP_BREAK
);
11710 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11713 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11716 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11718 #if defined (TARGET_MIPS64)
11720 check_insn(ctx
, ISA_MIPS3
);
11721 check_mips_64(ctx
);
11722 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11726 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11729 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11732 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11735 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11738 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11741 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11744 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11747 check_insn(ctx
, ISA_MIPS32
);
11749 case RR_RY_CNVT_ZEB
:
11750 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11752 case RR_RY_CNVT_ZEH
:
11753 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11755 case RR_RY_CNVT_SEB
:
11756 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11758 case RR_RY_CNVT_SEH
:
11759 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11761 #if defined (TARGET_MIPS64)
11762 case RR_RY_CNVT_ZEW
:
11763 check_insn(ctx
, ISA_MIPS64
);
11764 check_mips_64(ctx
);
11765 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11767 case RR_RY_CNVT_SEW
:
11768 check_insn(ctx
, ISA_MIPS64
);
11769 check_mips_64(ctx
);
11770 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11774 generate_exception(ctx
, EXCP_RI
);
11779 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11781 #if defined (TARGET_MIPS64)
11783 check_insn(ctx
, ISA_MIPS3
);
11784 check_mips_64(ctx
);
11785 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11788 check_insn(ctx
, ISA_MIPS3
);
11789 check_mips_64(ctx
);
11790 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11793 check_insn(ctx
, ISA_MIPS3
);
11794 check_mips_64(ctx
);
11795 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11798 check_insn(ctx
, ISA_MIPS3
);
11799 check_mips_64(ctx
);
11800 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11804 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11807 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11810 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11813 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11815 #if defined (TARGET_MIPS64)
11817 check_insn(ctx
, ISA_MIPS3
);
11818 check_mips_64(ctx
);
11819 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11822 check_insn(ctx
, ISA_MIPS3
);
11823 check_mips_64(ctx
);
11824 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11827 check_insn(ctx
, ISA_MIPS3
);
11828 check_mips_64(ctx
);
11829 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11832 check_insn(ctx
, ISA_MIPS3
);
11833 check_mips_64(ctx
);
11834 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11838 generate_exception(ctx
, EXCP_RI
);
11842 case M16_OPC_EXTEND
:
11843 decode_extended_mips16_opc(env
, ctx
);
11846 #if defined(TARGET_MIPS64)
11848 funct
= (ctx
->opcode
>> 8) & 0x7;
11849 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11853 generate_exception(ctx
, EXCP_RI
);
11860 /* microMIPS extension to MIPS32/MIPS64 */
11863 * microMIPS32/microMIPS64 major opcodes
11865 * 1. MIPS Architecture for Programmers Volume II-B:
11866 * The microMIPS32 Instruction Set (Revision 3.05)
11868 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11870 * 2. MIPS Architecture For Programmers Volume II-A:
11871 * The MIPS64 Instruction Set (Revision 3.51)
11901 POOL32S
= 0x16, /* MIPS64 */
11902 DADDIU32
= 0x17, /* MIPS64 */
11931 /* 0x29 is reserved */
11944 /* 0x31 is reserved */
11957 SD32
= 0x36, /* MIPS64 */
11958 LD32
= 0x37, /* MIPS64 */
11960 /* 0x39 is reserved */
11976 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11986 /* POOL32A encoding of minor opcode field */
11989 /* These opcodes are distinguished only by bits 9..6; those bits are
11990 * what are recorded below. */
12026 /* The following can be distinguished by their lower 6 bits. */
12034 /* POOL32AXF encoding of minor opcode field extension */
12037 * 1. MIPS Architecture for Programmers Volume II-B:
12038 * The microMIPS32 Instruction Set (Revision 3.05)
12040 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12042 * 2. MIPS Architecture for Programmers VolumeIV-e:
12043 * The MIPS DSP Application-Specific Extension
12044 * to the microMIPS32 Architecture (Revision 2.34)
12046 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12061 /* begin of microMIPS32 DSP */
12063 /* bits 13..12 for 0x01 */
12069 /* bits 13..12 for 0x2a */
12075 /* bits 13..12 for 0x32 */
12079 /* end of microMIPS32 DSP */
12081 /* bits 15..12 for 0x2c */
12098 /* bits 15..12 for 0x34 */
12106 /* bits 15..12 for 0x3c */
12108 JR
= 0x0, /* alias */
12116 /* bits 15..12 for 0x05 */
12120 /* bits 15..12 for 0x0d */
12132 /* bits 15..12 for 0x15 */
12138 /* bits 15..12 for 0x1d */
12142 /* bits 15..12 for 0x2d */
12147 /* bits 15..12 for 0x35 */
12154 /* POOL32B encoding of minor opcode field (bits 15..12) */
12170 /* POOL32C encoding of minor opcode field (bits 15..12) */
12178 /* 0xa is reserved */
12185 /* 0x6 is reserved */
12191 /* POOL32F encoding of minor opcode field (bits 5..0) */
12194 /* These are the bit 7..6 values */
12203 /* These are the bit 8..6 values */
12228 MOVZ_FMT_05
= 0x05,
12262 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12269 /* POOL32Fxf encoding of minor opcode extension field */
12307 /* POOL32I encoding of minor opcode field (bits 25..21) */
12337 /* These overlap and are distinguished by bit16 of the instruction */
12346 /* POOL16A encoding of minor opcode field */
12353 /* POOL16B encoding of minor opcode field */
12360 /* POOL16C encoding of minor opcode field */
12380 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12400 /* POOL16D encoding of minor opcode field */
12407 /* POOL16E encoding of minor opcode field */
12414 static int mmreg (int r
)
12416 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12421 /* Used for 16-bit store instructions. */
12422 static int mmreg2 (int r
)
12424 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12429 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12430 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12431 #define uMIPS_RS2(op) uMIPS_RS(op)
12432 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12433 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12434 #define uMIPS_RS5(op) (op & 0x1f)
12436 /* Signed immediate */
12437 #define SIMM(op, start, width) \
12438 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12441 /* Zero-extended immediate */
12442 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12444 static void gen_addiur1sp(DisasContext
*ctx
)
12446 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12448 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12451 static void gen_addiur2(DisasContext
*ctx
)
12453 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12454 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12455 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12457 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12460 static void gen_addiusp(DisasContext
*ctx
)
12462 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12465 if (encoded
<= 1) {
12466 decoded
= 256 + encoded
;
12467 } else if (encoded
<= 255) {
12469 } else if (encoded
<= 509) {
12470 decoded
= encoded
- 512;
12472 decoded
= encoded
- 768;
12475 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12478 static void gen_addius5(DisasContext
*ctx
)
12480 int imm
= SIMM(ctx
->opcode
, 1, 4);
12481 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12483 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12486 static void gen_andi16(DisasContext
*ctx
)
12488 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12489 31, 32, 63, 64, 255, 32768, 65535 };
12490 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12491 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12492 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12494 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12497 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12498 int base
, int16_t offset
)
12503 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12504 generate_exception(ctx
, EXCP_RI
);
12508 t0
= tcg_temp_new();
12510 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12512 t1
= tcg_const_tl(reglist
);
12513 t2
= tcg_const_i32(ctx
->mem_idx
);
12515 save_cpu_state(ctx
, 1);
12518 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12521 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12523 #ifdef TARGET_MIPS64
12525 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12528 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12534 tcg_temp_free_i32(t2
);
12538 static void gen_pool16c_insn(DisasContext
*ctx
)
12540 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12541 int rs
= mmreg(ctx
->opcode
& 0x7);
12543 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12548 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12554 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12560 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12566 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12573 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12574 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12576 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12585 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12586 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12588 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12595 int reg
= ctx
->opcode
& 0x1f;
12597 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12603 int reg
= ctx
->opcode
& 0x1f;
12604 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12605 /* Let normal delay slot handling in our caller take us
12606 to the branch target. */
12611 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12612 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12616 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12617 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12621 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12625 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12628 generate_exception(ctx
, EXCP_BREAK
);
12631 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12632 gen_helper_do_semihosting(cpu_env
);
12634 /* XXX: not clear which exception should be raised
12635 * when in debug mode...
12637 check_insn(ctx
, ISA_MIPS32
);
12638 generate_exception(ctx
, EXCP_DBp
);
12641 case JRADDIUSP
+ 0:
12642 case JRADDIUSP
+ 1:
12644 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12645 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12646 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12647 /* Let normal delay slot handling in our caller take us
12648 to the branch target. */
12652 generate_exception(ctx
, EXCP_RI
);
12657 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12660 int rd
, rs
, re
, rt
;
12661 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12662 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12663 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12664 rd
= rd_enc
[enc_dest
];
12665 re
= re_enc
[enc_dest
];
12666 rs
= rs_rt_enc
[enc_rs
];
12667 rt
= rs_rt_enc
[enc_rt
];
12669 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12671 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12674 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12676 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12680 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12682 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12683 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12685 switch (ctx
->opcode
& 0xf) {
12687 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12690 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12694 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12695 int offset
= extract32(ctx
->opcode
, 4, 4);
12696 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12699 case R6_JRC16
: /* JRCADDIUSP */
12700 if ((ctx
->opcode
>> 4) & 1) {
12702 int imm
= extract32(ctx
->opcode
, 5, 5);
12703 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12704 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12707 int rs
= extract32(ctx
->opcode
, 5, 5);
12708 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12711 case MOVEP
... MOVEP_07
:
12712 case MOVEP_0C
... MOVEP_0F
:
12714 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12715 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12716 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12717 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12721 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12724 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12728 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12729 int offset
= extract32(ctx
->opcode
, 4, 4);
12730 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12733 case JALRC16
: /* BREAK16, SDBBP16 */
12734 switch (ctx
->opcode
& 0x3f) {
12736 case JALRC16
+ 0x20:
12738 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12743 generate_exception(ctx
, EXCP_BREAK
);
12747 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12748 gen_helper_do_semihosting(cpu_env
);
12750 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12751 generate_exception(ctx
, EXCP_RI
);
12753 generate_exception(ctx
, EXCP_DBp
);
12760 generate_exception(ctx
, EXCP_RI
);
12765 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12767 TCGv t0
= tcg_temp_new();
12768 TCGv t1
= tcg_temp_new();
12770 gen_load_gpr(t0
, base
);
12773 gen_load_gpr(t1
, index
);
12774 tcg_gen_shli_tl(t1
, t1
, 2);
12775 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12778 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12779 gen_store_gpr(t1
, rd
);
12785 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12786 int base
, int16_t offset
)
12790 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12791 generate_exception(ctx
, EXCP_RI
);
12795 t0
= tcg_temp_new();
12796 t1
= tcg_temp_new();
12798 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12803 generate_exception(ctx
, EXCP_RI
);
12806 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12807 gen_store_gpr(t1
, rd
);
12808 tcg_gen_movi_tl(t1
, 4);
12809 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12810 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12811 gen_store_gpr(t1
, rd
+1);
12814 gen_load_gpr(t1
, rd
);
12815 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12816 tcg_gen_movi_tl(t1
, 4);
12817 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12818 gen_load_gpr(t1
, rd
+1);
12819 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12821 #ifdef TARGET_MIPS64
12824 generate_exception(ctx
, EXCP_RI
);
12827 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12828 gen_store_gpr(t1
, rd
);
12829 tcg_gen_movi_tl(t1
, 8);
12830 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12831 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12832 gen_store_gpr(t1
, rd
+1);
12835 gen_load_gpr(t1
, rd
);
12836 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12837 tcg_gen_movi_tl(t1
, 8);
12838 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12839 gen_load_gpr(t1
, rd
+1);
12840 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12848 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12850 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12851 int minor
= (ctx
->opcode
>> 12) & 0xf;
12852 uint32_t mips32_op
;
12854 switch (extension
) {
12856 mips32_op
= OPC_TEQ
;
12859 mips32_op
= OPC_TGE
;
12862 mips32_op
= OPC_TGEU
;
12865 mips32_op
= OPC_TLT
;
12868 mips32_op
= OPC_TLTU
;
12871 mips32_op
= OPC_TNE
;
12873 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12875 #ifndef CONFIG_USER_ONLY
12878 check_cp0_enabled(ctx
);
12880 /* Treat as NOP. */
12883 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12887 check_cp0_enabled(ctx
);
12889 TCGv t0
= tcg_temp_new();
12891 gen_load_gpr(t0
, rt
);
12892 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12898 switch (minor
& 3) {
12900 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12903 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12906 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12909 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12912 goto pool32axf_invalid
;
12916 switch (minor
& 3) {
12918 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12921 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12924 goto pool32axf_invalid
;
12930 check_insn(ctx
, ISA_MIPS32R6
);
12931 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12934 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12937 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12940 mips32_op
= OPC_CLO
;
12943 mips32_op
= OPC_CLZ
;
12945 check_insn(ctx
, ISA_MIPS32
);
12946 gen_cl(ctx
, mips32_op
, rt
, rs
);
12949 gen_rdhwr(ctx
, rt
, rs
);
12952 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12955 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12956 mips32_op
= OPC_MULT
;
12959 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12960 mips32_op
= OPC_MULTU
;
12963 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12964 mips32_op
= OPC_DIV
;
12967 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12968 mips32_op
= OPC_DIVU
;
12971 check_insn(ctx
, ISA_MIPS32
);
12972 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12976 mips32_op
= OPC_MADD
;
12979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12980 mips32_op
= OPC_MADDU
;
12983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12984 mips32_op
= OPC_MSUB
;
12987 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12988 mips32_op
= OPC_MSUBU
;
12990 check_insn(ctx
, ISA_MIPS32
);
12991 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12994 goto pool32axf_invalid
;
13005 generate_exception_err(ctx
, EXCP_CpU
, 2);
13008 goto pool32axf_invalid
;
13013 case JALR
: /* JALRC */
13014 case JALR_HB
: /* JALRC_HB */
13015 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13016 /* JALRC, JALRC_HB */
13017 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13019 /* JALR, JALR_HB */
13020 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13021 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13027 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13028 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13031 goto pool32axf_invalid
;
13037 check_cp0_enabled(ctx
);
13038 check_insn(ctx
, ISA_MIPS32R2
);
13039 gen_load_srsgpr(rs
, rt
);
13042 check_cp0_enabled(ctx
);
13043 check_insn(ctx
, ISA_MIPS32R2
);
13044 gen_store_srsgpr(rs
, rt
);
13047 goto pool32axf_invalid
;
13050 #ifndef CONFIG_USER_ONLY
13054 mips32_op
= OPC_TLBP
;
13057 mips32_op
= OPC_TLBR
;
13060 mips32_op
= OPC_TLBWI
;
13063 mips32_op
= OPC_TLBWR
;
13066 mips32_op
= OPC_TLBINV
;
13069 mips32_op
= OPC_TLBINVF
;
13072 mips32_op
= OPC_WAIT
;
13075 mips32_op
= OPC_DERET
;
13078 mips32_op
= OPC_ERET
;
13080 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13083 goto pool32axf_invalid
;
13089 check_cp0_enabled(ctx
);
13091 TCGv t0
= tcg_temp_new();
13093 save_cpu_state(ctx
, 1);
13094 gen_helper_di(t0
, cpu_env
);
13095 gen_store_gpr(t0
, rs
);
13096 /* Stop translation as we may have switched the execution mode */
13097 ctx
->bstate
= BS_STOP
;
13102 check_cp0_enabled(ctx
);
13104 TCGv t0
= tcg_temp_new();
13106 save_cpu_state(ctx
, 1);
13107 gen_helper_ei(t0
, cpu_env
);
13108 gen_store_gpr(t0
, rs
);
13109 /* Stop translation as we may have switched the execution mode */
13110 ctx
->bstate
= BS_STOP
;
13115 goto pool32axf_invalid
;
13125 generate_exception(ctx
, EXCP_SYSCALL
);
13126 ctx
->bstate
= BS_STOP
;
13129 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13130 gen_helper_do_semihosting(cpu_env
);
13132 check_insn(ctx
, ISA_MIPS32
);
13133 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13134 generate_exception(ctx
, EXCP_RI
);
13136 generate_exception(ctx
, EXCP_DBp
);
13141 goto pool32axf_invalid
;
13145 switch (minor
& 3) {
13147 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13150 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13153 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13156 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13159 goto pool32axf_invalid
;
13163 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13166 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13169 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13172 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13175 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13178 goto pool32axf_invalid
;
13183 MIPS_INVAL("pool32axf");
13184 generate_exception(ctx
, EXCP_RI
);
13189 /* Values for microMIPS fmt field. Variable-width, depending on which
13190 formats the instruction supports. */
13209 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13211 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13212 uint32_t mips32_op
;
13214 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13215 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13216 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13218 switch (extension
) {
13219 case FLOAT_1BIT_FMT(CFC1
, 0):
13220 mips32_op
= OPC_CFC1
;
13222 case FLOAT_1BIT_FMT(CTC1
, 0):
13223 mips32_op
= OPC_CTC1
;
13225 case FLOAT_1BIT_FMT(MFC1
, 0):
13226 mips32_op
= OPC_MFC1
;
13228 case FLOAT_1BIT_FMT(MTC1
, 0):
13229 mips32_op
= OPC_MTC1
;
13231 case FLOAT_1BIT_FMT(MFHC1
, 0):
13232 mips32_op
= OPC_MFHC1
;
13234 case FLOAT_1BIT_FMT(MTHC1
, 0):
13235 mips32_op
= OPC_MTHC1
;
13237 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13240 /* Reciprocal square root */
13241 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13242 mips32_op
= OPC_RSQRT_S
;
13244 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13245 mips32_op
= OPC_RSQRT_D
;
13249 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13250 mips32_op
= OPC_SQRT_S
;
13252 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13253 mips32_op
= OPC_SQRT_D
;
13257 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13258 mips32_op
= OPC_RECIP_S
;
13260 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13261 mips32_op
= OPC_RECIP_D
;
13265 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13266 mips32_op
= OPC_FLOOR_L_S
;
13268 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13269 mips32_op
= OPC_FLOOR_L_D
;
13271 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13272 mips32_op
= OPC_FLOOR_W_S
;
13274 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13275 mips32_op
= OPC_FLOOR_W_D
;
13279 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13280 mips32_op
= OPC_CEIL_L_S
;
13282 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13283 mips32_op
= OPC_CEIL_L_D
;
13285 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13286 mips32_op
= OPC_CEIL_W_S
;
13288 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13289 mips32_op
= OPC_CEIL_W_D
;
13293 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13294 mips32_op
= OPC_TRUNC_L_S
;
13296 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13297 mips32_op
= OPC_TRUNC_L_D
;
13299 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13300 mips32_op
= OPC_TRUNC_W_S
;
13302 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13303 mips32_op
= OPC_TRUNC_W_D
;
13307 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13308 mips32_op
= OPC_ROUND_L_S
;
13310 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13311 mips32_op
= OPC_ROUND_L_D
;
13313 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13314 mips32_op
= OPC_ROUND_W_S
;
13316 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13317 mips32_op
= OPC_ROUND_W_D
;
13320 /* Integer to floating-point conversion */
13321 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13322 mips32_op
= OPC_CVT_L_S
;
13324 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13325 mips32_op
= OPC_CVT_L_D
;
13327 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13328 mips32_op
= OPC_CVT_W_S
;
13330 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13331 mips32_op
= OPC_CVT_W_D
;
13334 /* Paired-foo conversions */
13335 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13336 mips32_op
= OPC_CVT_S_PL
;
13338 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13339 mips32_op
= OPC_CVT_S_PU
;
13341 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13342 mips32_op
= OPC_CVT_PW_PS
;
13344 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13345 mips32_op
= OPC_CVT_PS_PW
;
13348 /* Floating-point moves */
13349 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13350 mips32_op
= OPC_MOV_S
;
13352 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13353 mips32_op
= OPC_MOV_D
;
13355 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13356 mips32_op
= OPC_MOV_PS
;
13359 /* Absolute value */
13360 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13361 mips32_op
= OPC_ABS_S
;
13363 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13364 mips32_op
= OPC_ABS_D
;
13366 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13367 mips32_op
= OPC_ABS_PS
;
13371 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13372 mips32_op
= OPC_NEG_S
;
13374 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13375 mips32_op
= OPC_NEG_D
;
13377 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13378 mips32_op
= OPC_NEG_PS
;
13381 /* Reciprocal square root step */
13382 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13383 mips32_op
= OPC_RSQRT1_S
;
13385 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13386 mips32_op
= OPC_RSQRT1_D
;
13388 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13389 mips32_op
= OPC_RSQRT1_PS
;
13392 /* Reciprocal step */
13393 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13394 mips32_op
= OPC_RECIP1_S
;
13396 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13397 mips32_op
= OPC_RECIP1_S
;
13399 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13400 mips32_op
= OPC_RECIP1_PS
;
13403 /* Conversions from double */
13404 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13405 mips32_op
= OPC_CVT_D_S
;
13407 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13408 mips32_op
= OPC_CVT_D_W
;
13410 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13411 mips32_op
= OPC_CVT_D_L
;
13414 /* Conversions from single */
13415 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13416 mips32_op
= OPC_CVT_S_D
;
13418 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13419 mips32_op
= OPC_CVT_S_W
;
13421 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13422 mips32_op
= OPC_CVT_S_L
;
13424 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13427 /* Conditional moves on floating-point codes */
13428 case COND_FLOAT_MOV(MOVT
, 0):
13429 case COND_FLOAT_MOV(MOVT
, 1):
13430 case COND_FLOAT_MOV(MOVT
, 2):
13431 case COND_FLOAT_MOV(MOVT
, 3):
13432 case COND_FLOAT_MOV(MOVT
, 4):
13433 case COND_FLOAT_MOV(MOVT
, 5):
13434 case COND_FLOAT_MOV(MOVT
, 6):
13435 case COND_FLOAT_MOV(MOVT
, 7):
13436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13437 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13439 case COND_FLOAT_MOV(MOVF
, 0):
13440 case COND_FLOAT_MOV(MOVF
, 1):
13441 case COND_FLOAT_MOV(MOVF
, 2):
13442 case COND_FLOAT_MOV(MOVF
, 3):
13443 case COND_FLOAT_MOV(MOVF
, 4):
13444 case COND_FLOAT_MOV(MOVF
, 5):
13445 case COND_FLOAT_MOV(MOVF
, 6):
13446 case COND_FLOAT_MOV(MOVF
, 7):
13447 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13448 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13451 MIPS_INVAL("pool32fxf");
13452 generate_exception(ctx
, EXCP_RI
);
13457 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13461 int rt
, rs
, rd
, rr
;
13463 uint32_t op
, minor
, mips32_op
;
13464 uint32_t cond
, fmt
, cc
;
13466 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13467 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13469 rt
= (ctx
->opcode
>> 21) & 0x1f;
13470 rs
= (ctx
->opcode
>> 16) & 0x1f;
13471 rd
= (ctx
->opcode
>> 11) & 0x1f;
13472 rr
= (ctx
->opcode
>> 6) & 0x1f;
13473 imm
= (int16_t) ctx
->opcode
;
13475 op
= (ctx
->opcode
>> 26) & 0x3f;
13478 minor
= ctx
->opcode
& 0x3f;
13481 minor
= (ctx
->opcode
>> 6) & 0xf;
13484 mips32_op
= OPC_SLL
;
13487 mips32_op
= OPC_SRA
;
13490 mips32_op
= OPC_SRL
;
13493 mips32_op
= OPC_ROTR
;
13495 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13498 check_insn(ctx
, ISA_MIPS32R6
);
13499 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13502 check_insn(ctx
, ISA_MIPS32R6
);
13503 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13506 goto pool32a_invalid
;
13510 minor
= (ctx
->opcode
>> 6) & 0xf;
13514 mips32_op
= OPC_ADD
;
13517 mips32_op
= OPC_ADDU
;
13520 mips32_op
= OPC_SUB
;
13523 mips32_op
= OPC_SUBU
;
13526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13527 mips32_op
= OPC_MUL
;
13529 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13533 mips32_op
= OPC_SLLV
;
13536 mips32_op
= OPC_SRLV
;
13539 mips32_op
= OPC_SRAV
;
13542 mips32_op
= OPC_ROTRV
;
13544 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13546 /* Logical operations */
13548 mips32_op
= OPC_AND
;
13551 mips32_op
= OPC_OR
;
13554 mips32_op
= OPC_NOR
;
13557 mips32_op
= OPC_XOR
;
13559 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13561 /* Set less than */
13563 mips32_op
= OPC_SLT
;
13566 mips32_op
= OPC_SLTU
;
13568 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13571 goto pool32a_invalid
;
13575 minor
= (ctx
->opcode
>> 6) & 0xf;
13577 /* Conditional moves */
13578 case MOVN
: /* MUL */
13579 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13581 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13584 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13587 case MOVZ
: /* MUH */
13588 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13590 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13593 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13597 check_insn(ctx
, ISA_MIPS32R6
);
13598 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13601 check_insn(ctx
, ISA_MIPS32R6
);
13602 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13604 case LWXS
: /* DIV */
13605 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13607 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13610 gen_ldxs(ctx
, rs
, rt
, rd
);
13614 check_insn(ctx
, ISA_MIPS32R6
);
13615 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13618 check_insn(ctx
, ISA_MIPS32R6
);
13619 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13622 check_insn(ctx
, ISA_MIPS32R6
);
13623 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13626 goto pool32a_invalid
;
13630 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13633 check_insn(ctx
, ISA_MIPS32R6
);
13634 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13635 extract32(ctx
->opcode
, 9, 2));
13638 check_insn(ctx
, ISA_MIPS32R6
);
13639 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13640 extract32(ctx
->opcode
, 9, 2));
13643 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13646 gen_pool32axf(env
, ctx
, rt
, rs
);
13649 generate_exception(ctx
, EXCP_BREAK
);
13653 MIPS_INVAL("pool32a");
13654 generate_exception(ctx
, EXCP_RI
);
13659 minor
= (ctx
->opcode
>> 12) & 0xf;
13662 check_cp0_enabled(ctx
);
13663 /* Treat as no-op. */
13667 /* COP2: Not implemented. */
13668 generate_exception_err(ctx
, EXCP_CpU
, 2);
13670 #ifdef TARGET_MIPS64
13673 check_insn(ctx
, ISA_MIPS3
);
13674 check_mips_64(ctx
);
13679 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13681 #ifdef TARGET_MIPS64
13684 check_insn(ctx
, ISA_MIPS3
);
13685 check_mips_64(ctx
);
13690 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13693 MIPS_INVAL("pool32b");
13694 generate_exception(ctx
, EXCP_RI
);
13699 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13700 minor
= ctx
->opcode
& 0x3f;
13701 check_cp1_enabled(ctx
);
13704 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13705 mips32_op
= OPC_ALNV_PS
;
13708 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13709 mips32_op
= OPC_MADD_S
;
13712 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13713 mips32_op
= OPC_MADD_D
;
13716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13717 mips32_op
= OPC_MADD_PS
;
13720 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13721 mips32_op
= OPC_MSUB_S
;
13724 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13725 mips32_op
= OPC_MSUB_D
;
13728 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13729 mips32_op
= OPC_MSUB_PS
;
13732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13733 mips32_op
= OPC_NMADD_S
;
13736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13737 mips32_op
= OPC_NMADD_D
;
13740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13741 mips32_op
= OPC_NMADD_PS
;
13744 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13745 mips32_op
= OPC_NMSUB_S
;
13748 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13749 mips32_op
= OPC_NMSUB_D
;
13752 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13753 mips32_op
= OPC_NMSUB_PS
;
13755 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13757 case CABS_COND_FMT
:
13758 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13759 cond
= (ctx
->opcode
>> 6) & 0xf;
13760 cc
= (ctx
->opcode
>> 13) & 0x7;
13761 fmt
= (ctx
->opcode
>> 10) & 0x3;
13764 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13767 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13770 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13773 goto pool32f_invalid
;
13777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13778 cond
= (ctx
->opcode
>> 6) & 0xf;
13779 cc
= (ctx
->opcode
>> 13) & 0x7;
13780 fmt
= (ctx
->opcode
>> 10) & 0x3;
13783 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13786 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13789 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13792 goto pool32f_invalid
;
13796 check_insn(ctx
, ISA_MIPS32R6
);
13797 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13800 check_insn(ctx
, ISA_MIPS32R6
);
13801 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13804 gen_pool32fxf(ctx
, rt
, rs
);
13808 switch ((ctx
->opcode
>> 6) & 0x7) {
13810 mips32_op
= OPC_PLL_PS
;
13813 mips32_op
= OPC_PLU_PS
;
13816 mips32_op
= OPC_PUL_PS
;
13819 mips32_op
= OPC_PUU_PS
;
13822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13823 mips32_op
= OPC_CVT_PS_S
;
13825 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13828 goto pool32f_invalid
;
13832 check_insn(ctx
, ISA_MIPS32R6
);
13833 switch ((ctx
->opcode
>> 9) & 0x3) {
13835 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13838 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13841 goto pool32f_invalid
;
13846 switch ((ctx
->opcode
>> 6) & 0x7) {
13848 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13849 mips32_op
= OPC_LWXC1
;
13852 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13853 mips32_op
= OPC_SWXC1
;
13856 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13857 mips32_op
= OPC_LDXC1
;
13860 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13861 mips32_op
= OPC_SDXC1
;
13864 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13865 mips32_op
= OPC_LUXC1
;
13868 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13869 mips32_op
= OPC_SUXC1
;
13871 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13874 goto pool32f_invalid
;
13878 check_insn(ctx
, ISA_MIPS32R6
);
13879 switch ((ctx
->opcode
>> 9) & 0x3) {
13881 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13884 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13887 goto pool32f_invalid
;
13892 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13893 fmt
= (ctx
->opcode
>> 9) & 0x3;
13894 switch ((ctx
->opcode
>> 6) & 0x7) {
13898 mips32_op
= OPC_RSQRT2_S
;
13901 mips32_op
= OPC_RSQRT2_D
;
13904 mips32_op
= OPC_RSQRT2_PS
;
13907 goto pool32f_invalid
;
13913 mips32_op
= OPC_RECIP2_S
;
13916 mips32_op
= OPC_RECIP2_D
;
13919 mips32_op
= OPC_RECIP2_PS
;
13922 goto pool32f_invalid
;
13926 mips32_op
= OPC_ADDR_PS
;
13929 mips32_op
= OPC_MULR_PS
;
13931 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13934 goto pool32f_invalid
;
13938 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13939 cc
= (ctx
->opcode
>> 13) & 0x7;
13940 fmt
= (ctx
->opcode
>> 9) & 0x3;
13941 switch ((ctx
->opcode
>> 6) & 0x7) {
13942 case MOVF_FMT
: /* RINT_FMT */
13943 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13947 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13950 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13953 goto pool32f_invalid
;
13959 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13962 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13966 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13969 goto pool32f_invalid
;
13973 case MOVT_FMT
: /* CLASS_FMT */
13974 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13978 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
13981 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
13984 goto pool32f_invalid
;
13990 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
13993 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
13997 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14000 goto pool32f_invalid
;
14005 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14008 goto pool32f_invalid
;
14011 #define FINSN_3ARG_SDPS(prfx) \
14012 switch ((ctx->opcode >> 8) & 0x3) { \
14014 mips32_op = OPC_##prfx##_S; \
14017 mips32_op = OPC_##prfx##_D; \
14019 case FMT_SDPS_PS: \
14021 mips32_op = OPC_##prfx##_PS; \
14024 goto pool32f_invalid; \
14027 check_insn(ctx
, ISA_MIPS32R6
);
14028 switch ((ctx
->opcode
>> 9) & 0x3) {
14030 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14033 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14036 goto pool32f_invalid
;
14040 check_insn(ctx
, ISA_MIPS32R6
);
14041 switch ((ctx
->opcode
>> 9) & 0x3) {
14043 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14046 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14049 goto pool32f_invalid
;
14053 /* regular FP ops */
14054 switch ((ctx
->opcode
>> 6) & 0x3) {
14056 FINSN_3ARG_SDPS(ADD
);
14059 FINSN_3ARG_SDPS(SUB
);
14062 FINSN_3ARG_SDPS(MUL
);
14065 fmt
= (ctx
->opcode
>> 8) & 0x3;
14067 mips32_op
= OPC_DIV_D
;
14068 } else if (fmt
== 0) {
14069 mips32_op
= OPC_DIV_S
;
14071 goto pool32f_invalid
;
14075 goto pool32f_invalid
;
14080 switch ((ctx
->opcode
>> 6) & 0x7) {
14081 case MOVN_FMT
: /* SELNEZ_FMT */
14082 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14084 switch ((ctx
->opcode
>> 9) & 0x3) {
14086 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14089 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14092 goto pool32f_invalid
;
14096 FINSN_3ARG_SDPS(MOVN
);
14100 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14101 FINSN_3ARG_SDPS(MOVN
);
14103 case MOVZ_FMT
: /* SELEQZ_FMT */
14104 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14106 switch ((ctx
->opcode
>> 9) & 0x3) {
14108 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14111 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14114 goto pool32f_invalid
;
14118 FINSN_3ARG_SDPS(MOVZ
);
14122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14123 FINSN_3ARG_SDPS(MOVZ
);
14126 check_insn(ctx
, ISA_MIPS32R6
);
14127 switch ((ctx
->opcode
>> 9) & 0x3) {
14129 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14132 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14135 goto pool32f_invalid
;
14139 check_insn(ctx
, ISA_MIPS32R6
);
14140 switch ((ctx
->opcode
>> 9) & 0x3) {
14142 mips32_op
= OPC_MADDF_S
;
14145 mips32_op
= OPC_MADDF_D
;
14148 goto pool32f_invalid
;
14152 check_insn(ctx
, ISA_MIPS32R6
);
14153 switch ((ctx
->opcode
>> 9) & 0x3) {
14155 mips32_op
= OPC_MSUBF_S
;
14158 mips32_op
= OPC_MSUBF_D
;
14161 goto pool32f_invalid
;
14165 goto pool32f_invalid
;
14169 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14173 MIPS_INVAL("pool32f");
14174 generate_exception(ctx
, EXCP_RI
);
14178 generate_exception_err(ctx
, EXCP_CpU
, 1);
14182 minor
= (ctx
->opcode
>> 21) & 0x1f;
14185 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14186 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14189 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14190 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14191 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14195 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14196 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14199 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14200 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14204 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14205 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14209 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14210 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14214 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14217 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14218 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14222 case TLTI
: /* BC1EQZC */
14223 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14225 check_cp1_enabled(ctx
);
14226 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14229 mips32_op
= OPC_TLTI
;
14233 case TGEI
: /* BC1NEZC */
14234 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14236 check_cp1_enabled(ctx
);
14237 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14240 mips32_op
= OPC_TGEI
;
14245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14246 mips32_op
= OPC_TLTIU
;
14249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14250 mips32_op
= OPC_TGEIU
;
14252 case TNEI
: /* SYNCI */
14253 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14255 /* Break the TB to be able to sync copied instructions
14257 ctx
->bstate
= BS_STOP
;
14260 mips32_op
= OPC_TNEI
;
14265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14266 mips32_op
= OPC_TEQI
;
14268 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14274 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14275 4, rs
, 0, imm
<< 1, 0);
14276 /* Compact branches don't have a delay slot, so just let
14277 the normal delay slot handling take us to the branch
14281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14282 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14285 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14286 /* Break the TB to be able to sync copied instructions
14288 ctx
->bstate
= BS_STOP
;
14292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14293 /* COP2: Not implemented. */
14294 generate_exception_err(ctx
, EXCP_CpU
, 2);
14297 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14298 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14301 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14302 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14306 mips32_op
= OPC_BC1FANY4
;
14309 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14310 mips32_op
= OPC_BC1TANY4
;
14313 check_insn(ctx
, ASE_MIPS3D
);
14316 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14317 check_cp1_enabled(ctx
);
14318 gen_compute_branch1(ctx
, mips32_op
,
14319 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14321 generate_exception_err(ctx
, EXCP_CpU
, 1);
14326 /* MIPS DSP: not implemented */
14329 MIPS_INVAL("pool32i");
14330 generate_exception(ctx
, EXCP_RI
);
14335 minor
= (ctx
->opcode
>> 12) & 0xf;
14336 offset
= sextract32(ctx
->opcode
, 0,
14337 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14340 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14341 mips32_op
= OPC_LWL
;
14344 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14345 mips32_op
= OPC_SWL
;
14348 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14349 mips32_op
= OPC_LWR
;
14352 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14353 mips32_op
= OPC_SWR
;
14355 #if defined(TARGET_MIPS64)
14357 check_insn(ctx
, ISA_MIPS3
);
14358 check_mips_64(ctx
);
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 mips32_op
= OPC_LDL
;
14363 check_insn(ctx
, ISA_MIPS3
);
14364 check_mips_64(ctx
);
14365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14366 mips32_op
= OPC_SDL
;
14369 check_insn(ctx
, ISA_MIPS3
);
14370 check_mips_64(ctx
);
14371 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14372 mips32_op
= OPC_LDR
;
14375 check_insn(ctx
, ISA_MIPS3
);
14376 check_mips_64(ctx
);
14377 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14378 mips32_op
= OPC_SDR
;
14381 check_insn(ctx
, ISA_MIPS3
);
14382 check_mips_64(ctx
);
14383 mips32_op
= OPC_LWU
;
14386 check_insn(ctx
, ISA_MIPS3
);
14387 check_mips_64(ctx
);
14388 mips32_op
= OPC_LLD
;
14392 mips32_op
= OPC_LL
;
14395 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14398 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14401 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14403 #if defined(TARGET_MIPS64)
14405 check_insn(ctx
, ISA_MIPS3
);
14406 check_mips_64(ctx
);
14407 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14411 /* Treat as no-op */
14412 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14413 /* hint codes 24-31 are reserved and signal RI */
14414 generate_exception(ctx
, EXCP_RI
);
14418 MIPS_INVAL("pool32c");
14419 generate_exception(ctx
, EXCP_RI
);
14423 case ADDI32
: /* AUI, LUI */
14424 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14426 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14429 mips32_op
= OPC_ADDI
;
14434 mips32_op
= OPC_ADDIU
;
14436 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14439 /* Logical operations */
14441 mips32_op
= OPC_ORI
;
14444 mips32_op
= OPC_XORI
;
14447 mips32_op
= OPC_ANDI
;
14449 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14452 /* Set less than immediate */
14454 mips32_op
= OPC_SLTI
;
14457 mips32_op
= OPC_SLTIU
;
14459 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14462 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14463 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14464 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14465 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14467 case JALS32
: /* BOVC, BEQC, BEQZALC */
14468 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14471 mips32_op
= OPC_BOVC
;
14472 } else if (rs
< rt
&& rs
== 0) {
14474 mips32_op
= OPC_BEQZALC
;
14477 mips32_op
= OPC_BEQC
;
14479 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14482 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14483 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14484 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14487 case BEQ32
: /* BC */
14488 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14490 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14491 sextract32(ctx
->opcode
<< 1, 0, 27));
14494 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14497 case BNE32
: /* BALC */
14498 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14500 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14501 sextract32(ctx
->opcode
<< 1, 0, 27));
14504 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14507 case J32
: /* BGTZC, BLTZC, BLTC */
14508 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14509 if (rs
== 0 && rt
!= 0) {
14511 mips32_op
= OPC_BGTZC
;
14512 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14514 mips32_op
= OPC_BLTZC
;
14517 mips32_op
= OPC_BLTC
;
14519 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14522 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14523 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14526 case JAL32
: /* BLEZC, BGEZC, BGEC */
14527 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14528 if (rs
== 0 && rt
!= 0) {
14530 mips32_op
= OPC_BLEZC
;
14531 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14533 mips32_op
= OPC_BGEZC
;
14536 mips32_op
= OPC_BGEC
;
14538 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14541 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14542 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14543 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14546 /* Floating point (COP1) */
14548 mips32_op
= OPC_LWC1
;
14551 mips32_op
= OPC_LDC1
;
14554 mips32_op
= OPC_SWC1
;
14557 mips32_op
= OPC_SDC1
;
14559 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14561 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14562 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14563 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14564 switch ((ctx
->opcode
>> 16) & 0x1f) {
14565 case ADDIUPC_00
... ADDIUPC_07
:
14566 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14569 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14572 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14574 case LWPC_08
... LWPC_0F
:
14575 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14578 generate_exception(ctx
, EXCP_RI
);
14583 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14584 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14586 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14589 case BNVC
: /* BNEC, BNEZALC */
14590 check_insn(ctx
, ISA_MIPS32R6
);
14593 mips32_op
= OPC_BNVC
;
14594 } else if (rs
< rt
&& rs
== 0) {
14596 mips32_op
= OPC_BNEZALC
;
14599 mips32_op
= OPC_BNEC
;
14601 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14603 case R6_BNEZC
: /* JIALC */
14604 check_insn(ctx
, ISA_MIPS32R6
);
14607 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14608 sextract32(ctx
->opcode
<< 1, 0, 22));
14611 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14614 case R6_BEQZC
: /* JIC */
14615 check_insn(ctx
, ISA_MIPS32R6
);
14618 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14619 sextract32(ctx
->opcode
<< 1, 0, 22));
14622 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14625 case BLEZALC
: /* BGEZALC, BGEUC */
14626 check_insn(ctx
, ISA_MIPS32R6
);
14627 if (rs
== 0 && rt
!= 0) {
14629 mips32_op
= OPC_BLEZALC
;
14630 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14632 mips32_op
= OPC_BGEZALC
;
14635 mips32_op
= OPC_BGEUC
;
14637 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14639 case BGTZALC
: /* BLTZALC, BLTUC */
14640 check_insn(ctx
, ISA_MIPS32R6
);
14641 if (rs
== 0 && rt
!= 0) {
14643 mips32_op
= OPC_BGTZALC
;
14644 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14646 mips32_op
= OPC_BLTZALC
;
14649 mips32_op
= OPC_BLTUC
;
14651 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14653 /* Loads and stores */
14655 mips32_op
= OPC_LB
;
14658 mips32_op
= OPC_LBU
;
14661 mips32_op
= OPC_LH
;
14664 mips32_op
= OPC_LHU
;
14667 mips32_op
= OPC_LW
;
14669 #ifdef TARGET_MIPS64
14671 check_insn(ctx
, ISA_MIPS3
);
14672 check_mips_64(ctx
);
14673 mips32_op
= OPC_LD
;
14676 check_insn(ctx
, ISA_MIPS3
);
14677 check_mips_64(ctx
);
14678 mips32_op
= OPC_SD
;
14682 mips32_op
= OPC_SB
;
14685 mips32_op
= OPC_SH
;
14688 mips32_op
= OPC_SW
;
14691 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14694 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14697 generate_exception(ctx
, EXCP_RI
);
14702 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14706 /* make sure instructions are on a halfword boundary */
14707 if (ctx
->pc
& 0x1) {
14708 env
->CP0_BadVAddr
= ctx
->pc
;
14709 generate_exception(ctx
, EXCP_AdEL
);
14710 ctx
->bstate
= BS_STOP
;
14714 op
= (ctx
->opcode
>> 10) & 0x3f;
14715 /* Enforce properly-sized instructions in a delay slot */
14716 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14717 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14719 /* POOL32A, POOL32B, POOL32I, POOL32C */
14721 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14723 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14725 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14727 /* LB32, LH32, LWC132, LDC132, LW32 */
14728 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14729 generate_exception(ctx
, EXCP_RI
);
14730 /* Just stop translation; the user is confused. */
14731 ctx
->bstate
= BS_STOP
;
14736 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14738 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14740 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14741 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14742 generate_exception(ctx
, EXCP_RI
);
14743 /* Just stop translation; the user is confused. */
14744 ctx
->bstate
= BS_STOP
;
14754 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14755 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14756 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14759 switch (ctx
->opcode
& 0x1) {
14767 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14768 /* In the Release 6 the register number location in
14769 * the instruction encoding has changed.
14771 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14773 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14779 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14780 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14781 int amount
= (ctx
->opcode
>> 1) & 0x7;
14783 amount
= amount
== 0 ? 8 : amount
;
14785 switch (ctx
->opcode
& 0x1) {
14794 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14798 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14799 gen_pool16c_r6_insn(ctx
);
14801 gen_pool16c_insn(ctx
);
14806 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14807 int rb
= 28; /* GP */
14808 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14810 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14814 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14815 if (ctx
->opcode
& 1) {
14816 generate_exception(ctx
, EXCP_RI
);
14819 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14820 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14821 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14822 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14827 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14828 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14829 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14830 offset
= (offset
== 0xf ? -1 : offset
);
14832 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14837 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14838 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14839 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14841 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14846 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14847 int rb
= 29; /* SP */
14848 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14850 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14855 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14856 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14857 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14859 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14864 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14865 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14866 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14868 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14873 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14874 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14875 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14877 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14882 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14883 int rb
= 29; /* SP */
14884 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14886 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14891 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14892 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14893 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14895 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14900 int rd
= uMIPS_RD5(ctx
->opcode
);
14901 int rs
= uMIPS_RS5(ctx
->opcode
);
14903 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14910 switch (ctx
->opcode
& 0x1) {
14920 switch (ctx
->opcode
& 0x1) {
14925 gen_addiur1sp(ctx
);
14929 case B16
: /* BC16 */
14930 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14931 sextract32(ctx
->opcode
, 0, 10) << 1,
14932 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14934 case BNEZ16
: /* BNEZC16 */
14935 case BEQZ16
: /* BEQZC16 */
14936 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14937 mmreg(uMIPS_RD(ctx
->opcode
)),
14938 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14939 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14944 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14945 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14947 imm
= (imm
== 0x7f ? -1 : imm
);
14948 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14954 generate_exception(ctx
, EXCP_RI
);
14957 decode_micromips32_opc(env
, ctx
);
14964 /* SmartMIPS extension to MIPS32 */
14966 #if defined(TARGET_MIPS64)
14968 /* MDMX extension to MIPS64 */
14972 /* MIPSDSP functions. */
14973 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14974 int rd
, int base
, int offset
)
14979 t0
= tcg_temp_new();
14982 gen_load_gpr(t0
, offset
);
14983 } else if (offset
== 0) {
14984 gen_load_gpr(t0
, base
);
14986 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
14991 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
14992 gen_store_gpr(t0
, rd
);
14995 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
14996 gen_store_gpr(t0
, rd
);
14999 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15000 gen_store_gpr(t0
, rd
);
15002 #if defined(TARGET_MIPS64)
15004 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15005 gen_store_gpr(t0
, rd
);
15012 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15013 int ret
, int v1
, int v2
)
15019 /* Treat as NOP. */
15023 v1_t
= tcg_temp_new();
15024 v2_t
= tcg_temp_new();
15026 gen_load_gpr(v1_t
, v1
);
15027 gen_load_gpr(v2_t
, v2
);
15030 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15031 case OPC_MULT_G_2E
:
15035 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15037 case OPC_ADDUH_R_QB
:
15038 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15041 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15043 case OPC_ADDQH_R_PH
:
15044 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15047 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15049 case OPC_ADDQH_R_W
:
15050 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15053 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15055 case OPC_SUBUH_R_QB
:
15056 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15059 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15061 case OPC_SUBQH_R_PH
:
15062 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15065 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15067 case OPC_SUBQH_R_W
:
15068 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15072 case OPC_ABSQ_S_PH_DSP
:
15074 case OPC_ABSQ_S_QB
:
15076 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15078 case OPC_ABSQ_S_PH
:
15080 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15084 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15086 case OPC_PRECEQ_W_PHL
:
15088 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15089 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15091 case OPC_PRECEQ_W_PHR
:
15093 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15094 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15095 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15097 case OPC_PRECEQU_PH_QBL
:
15099 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15101 case OPC_PRECEQU_PH_QBR
:
15103 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15105 case OPC_PRECEQU_PH_QBLA
:
15107 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15109 case OPC_PRECEQU_PH_QBRA
:
15111 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15113 case OPC_PRECEU_PH_QBL
:
15115 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15117 case OPC_PRECEU_PH_QBR
:
15119 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15121 case OPC_PRECEU_PH_QBLA
:
15123 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15125 case OPC_PRECEU_PH_QBRA
:
15127 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15131 case OPC_ADDU_QB_DSP
:
15135 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15137 case OPC_ADDQ_S_PH
:
15139 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15143 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15147 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15149 case OPC_ADDU_S_QB
:
15151 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15155 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15157 case OPC_ADDU_S_PH
:
15159 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15163 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15165 case OPC_SUBQ_S_PH
:
15167 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15171 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15175 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15177 case OPC_SUBU_S_QB
:
15179 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15183 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15185 case OPC_SUBU_S_PH
:
15187 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15191 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15195 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15199 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15201 case OPC_RADDU_W_QB
:
15203 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15207 case OPC_CMPU_EQ_QB_DSP
:
15209 case OPC_PRECR_QB_PH
:
15211 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15213 case OPC_PRECRQ_QB_PH
:
15215 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15217 case OPC_PRECR_SRA_PH_W
:
15220 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15221 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15223 tcg_temp_free_i32(sa_t
);
15226 case OPC_PRECR_SRA_R_PH_W
:
15229 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15230 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15232 tcg_temp_free_i32(sa_t
);
15235 case OPC_PRECRQ_PH_W
:
15237 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15239 case OPC_PRECRQ_RS_PH_W
:
15241 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15243 case OPC_PRECRQU_S_QB_PH
:
15245 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15249 #ifdef TARGET_MIPS64
15250 case OPC_ABSQ_S_QH_DSP
:
15252 case OPC_PRECEQ_L_PWL
:
15254 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15256 case OPC_PRECEQ_L_PWR
:
15258 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15260 case OPC_PRECEQ_PW_QHL
:
15262 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15264 case OPC_PRECEQ_PW_QHR
:
15266 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15268 case OPC_PRECEQ_PW_QHLA
:
15270 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15272 case OPC_PRECEQ_PW_QHRA
:
15274 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15276 case OPC_PRECEQU_QH_OBL
:
15278 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15280 case OPC_PRECEQU_QH_OBR
:
15282 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15284 case OPC_PRECEQU_QH_OBLA
:
15286 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15288 case OPC_PRECEQU_QH_OBRA
:
15290 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15292 case OPC_PRECEU_QH_OBL
:
15294 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15296 case OPC_PRECEU_QH_OBR
:
15298 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15300 case OPC_PRECEU_QH_OBLA
:
15302 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15304 case OPC_PRECEU_QH_OBRA
:
15306 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15308 case OPC_ABSQ_S_OB
:
15310 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15312 case OPC_ABSQ_S_PW
:
15314 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15316 case OPC_ABSQ_S_QH
:
15318 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15322 case OPC_ADDU_OB_DSP
:
15324 case OPC_RADDU_L_OB
:
15326 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15330 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15332 case OPC_SUBQ_S_PW
:
15334 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15338 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15340 case OPC_SUBQ_S_QH
:
15342 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15346 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15348 case OPC_SUBU_S_OB
:
15350 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15354 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 case OPC_SUBU_S_QH
:
15358 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15362 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15364 case OPC_SUBUH_R_OB
:
15366 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15370 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15372 case OPC_ADDQ_S_PW
:
15374 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15378 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15380 case OPC_ADDQ_S_QH
:
15382 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15386 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15388 case OPC_ADDU_S_OB
:
15390 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15394 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15396 case OPC_ADDU_S_QH
:
15398 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15402 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15404 case OPC_ADDUH_R_OB
:
15406 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15410 case OPC_CMPU_EQ_OB_DSP
:
15412 case OPC_PRECR_OB_QH
:
15414 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15416 case OPC_PRECR_SRA_QH_PW
:
15419 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15420 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15421 tcg_temp_free_i32(ret_t
);
15424 case OPC_PRECR_SRA_R_QH_PW
:
15427 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15428 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15429 tcg_temp_free_i32(sa_v
);
15432 case OPC_PRECRQ_OB_QH
:
15434 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15436 case OPC_PRECRQ_PW_L
:
15438 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15440 case OPC_PRECRQ_QH_PW
:
15442 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15444 case OPC_PRECRQ_RS_QH_PW
:
15446 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15448 case OPC_PRECRQU_S_OB_QH
:
15450 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15457 tcg_temp_free(v1_t
);
15458 tcg_temp_free(v2_t
);
15461 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15462 int ret
, int v1
, int v2
)
15470 /* Treat as NOP. */
15474 t0
= tcg_temp_new();
15475 v1_t
= tcg_temp_new();
15476 v2_t
= tcg_temp_new();
15478 tcg_gen_movi_tl(t0
, v1
);
15479 gen_load_gpr(v1_t
, v1
);
15480 gen_load_gpr(v2_t
, v2
);
15483 case OPC_SHLL_QB_DSP
:
15485 op2
= MASK_SHLL_QB(ctx
->opcode
);
15489 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15493 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15497 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15501 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15503 case OPC_SHLL_S_PH
:
15505 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15507 case OPC_SHLLV_S_PH
:
15509 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15513 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15515 case OPC_SHLLV_S_W
:
15517 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15521 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15525 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15529 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15533 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15537 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15539 case OPC_SHRA_R_QB
:
15541 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15545 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15547 case OPC_SHRAV_R_QB
:
15549 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15553 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15555 case OPC_SHRA_R_PH
:
15557 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15561 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15563 case OPC_SHRAV_R_PH
:
15565 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15569 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15571 case OPC_SHRAV_R_W
:
15573 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15575 default: /* Invalid */
15576 MIPS_INVAL("MASK SHLL.QB");
15577 generate_exception(ctx
, EXCP_RI
);
15582 #ifdef TARGET_MIPS64
15583 case OPC_SHLL_OB_DSP
:
15584 op2
= MASK_SHLL_OB(ctx
->opcode
);
15588 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15592 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15594 case OPC_SHLL_S_PW
:
15596 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15598 case OPC_SHLLV_S_PW
:
15600 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15604 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15608 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15612 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15616 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15618 case OPC_SHLL_S_QH
:
15620 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15622 case OPC_SHLLV_S_QH
:
15624 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15628 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15632 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15634 case OPC_SHRA_R_OB
:
15636 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15638 case OPC_SHRAV_R_OB
:
15640 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15644 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15648 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15650 case OPC_SHRA_R_PW
:
15652 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15654 case OPC_SHRAV_R_PW
:
15656 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15660 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15664 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15666 case OPC_SHRA_R_QH
:
15668 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15670 case OPC_SHRAV_R_QH
:
15672 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15676 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15680 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15684 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15688 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15690 default: /* Invalid */
15691 MIPS_INVAL("MASK SHLL.OB");
15692 generate_exception(ctx
, EXCP_RI
);
15700 tcg_temp_free(v1_t
);
15701 tcg_temp_free(v2_t
);
15704 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15705 int ret
, int v1
, int v2
, int check_ret
)
15711 if ((ret
== 0) && (check_ret
== 1)) {
15712 /* Treat as NOP. */
15716 t0
= tcg_temp_new_i32();
15717 v1_t
= tcg_temp_new();
15718 v2_t
= tcg_temp_new();
15720 tcg_gen_movi_i32(t0
, ret
);
15721 gen_load_gpr(v1_t
, v1
);
15722 gen_load_gpr(v2_t
, v2
);
15725 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15726 * the same mask and op1. */
15727 case OPC_MULT_G_2E
:
15731 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15734 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15737 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15739 case OPC_MULQ_RS_W
:
15740 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15744 case OPC_DPA_W_PH_DSP
:
15746 case OPC_DPAU_H_QBL
:
15748 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15750 case OPC_DPAU_H_QBR
:
15752 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15754 case OPC_DPSU_H_QBL
:
15756 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15758 case OPC_DPSU_H_QBR
:
15760 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15764 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15766 case OPC_DPAX_W_PH
:
15768 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15770 case OPC_DPAQ_S_W_PH
:
15772 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15774 case OPC_DPAQX_S_W_PH
:
15776 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15778 case OPC_DPAQX_SA_W_PH
:
15780 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15784 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15786 case OPC_DPSX_W_PH
:
15788 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15790 case OPC_DPSQ_S_W_PH
:
15792 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15794 case OPC_DPSQX_S_W_PH
:
15796 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15798 case OPC_DPSQX_SA_W_PH
:
15800 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15802 case OPC_MULSAQ_S_W_PH
:
15804 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15806 case OPC_DPAQ_SA_L_W
:
15808 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15810 case OPC_DPSQ_SA_L_W
:
15812 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15814 case OPC_MAQ_S_W_PHL
:
15816 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15818 case OPC_MAQ_S_W_PHR
:
15820 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15822 case OPC_MAQ_SA_W_PHL
:
15824 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15826 case OPC_MAQ_SA_W_PHR
:
15828 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15830 case OPC_MULSA_W_PH
:
15832 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15836 #ifdef TARGET_MIPS64
15837 case OPC_DPAQ_W_QH_DSP
:
15839 int ac
= ret
& 0x03;
15840 tcg_gen_movi_i32(t0
, ac
);
15845 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15849 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15853 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15857 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15861 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15863 case OPC_DPAQ_S_W_QH
:
15865 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15867 case OPC_DPAQ_SA_L_PW
:
15869 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15871 case OPC_DPAU_H_OBL
:
15873 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15875 case OPC_DPAU_H_OBR
:
15877 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15881 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15883 case OPC_DPSQ_S_W_QH
:
15885 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15887 case OPC_DPSQ_SA_L_PW
:
15889 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15891 case OPC_DPSU_H_OBL
:
15893 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15895 case OPC_DPSU_H_OBR
:
15897 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15899 case OPC_MAQ_S_L_PWL
:
15901 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15903 case OPC_MAQ_S_L_PWR
:
15905 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15907 case OPC_MAQ_S_W_QHLL
:
15909 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15911 case OPC_MAQ_SA_W_QHLL
:
15913 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15915 case OPC_MAQ_S_W_QHLR
:
15917 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15919 case OPC_MAQ_SA_W_QHLR
:
15921 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15923 case OPC_MAQ_S_W_QHRL
:
15925 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15927 case OPC_MAQ_SA_W_QHRL
:
15929 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15931 case OPC_MAQ_S_W_QHRR
:
15933 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15935 case OPC_MAQ_SA_W_QHRR
:
15937 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15939 case OPC_MULSAQ_S_L_PW
:
15941 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15943 case OPC_MULSAQ_S_W_QH
:
15945 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15951 case OPC_ADDU_QB_DSP
:
15953 case OPC_MULEU_S_PH_QBL
:
15955 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15957 case OPC_MULEU_S_PH_QBR
:
15959 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15961 case OPC_MULQ_RS_PH
:
15963 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15965 case OPC_MULEQ_S_W_PHL
:
15967 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15969 case OPC_MULEQ_S_W_PHR
:
15971 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15973 case OPC_MULQ_S_PH
:
15975 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15979 #ifdef TARGET_MIPS64
15980 case OPC_ADDU_OB_DSP
:
15982 case OPC_MULEQ_S_PW_QHL
:
15984 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15986 case OPC_MULEQ_S_PW_QHR
:
15988 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15990 case OPC_MULEU_S_QH_OBL
:
15992 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15994 case OPC_MULEU_S_QH_OBR
:
15996 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15998 case OPC_MULQ_RS_QH
:
16000 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16007 tcg_temp_free_i32(t0
);
16008 tcg_temp_free(v1_t
);
16009 tcg_temp_free(v2_t
);
16012 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16020 /* Treat as NOP. */
16024 t0
= tcg_temp_new();
16025 val_t
= tcg_temp_new();
16026 gen_load_gpr(val_t
, val
);
16029 case OPC_ABSQ_S_PH_DSP
:
16033 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16038 target_long result
;
16039 imm
= (ctx
->opcode
>> 16) & 0xFF;
16040 result
= (uint32_t)imm
<< 24 |
16041 (uint32_t)imm
<< 16 |
16042 (uint32_t)imm
<< 8 |
16044 result
= (int32_t)result
;
16045 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16050 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16051 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16052 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16053 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16054 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16055 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16060 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16061 imm
= (int16_t)(imm
<< 6) >> 6;
16062 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16063 (target_long
)((int32_t)imm
<< 16 | \
16069 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16070 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16071 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16072 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16076 #ifdef TARGET_MIPS64
16077 case OPC_ABSQ_S_QH_DSP
:
16084 imm
= (ctx
->opcode
>> 16) & 0xFF;
16085 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16086 temp
= (temp
<< 16) | temp
;
16087 temp
= (temp
<< 32) | temp
;
16088 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16096 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16097 imm
= (int16_t)(imm
<< 6) >> 6;
16098 temp
= ((target_long
)imm
<< 32) \
16099 | ((target_long
)imm
& 0xFFFFFFFF);
16100 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16108 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16109 imm
= (int16_t)(imm
<< 6) >> 6;
16111 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16112 ((uint64_t)(uint16_t)imm
<< 32) |
16113 ((uint64_t)(uint16_t)imm
<< 16) |
16114 (uint64_t)(uint16_t)imm
;
16115 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16120 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16121 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16122 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16123 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16124 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16125 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16126 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16130 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16131 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16132 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16136 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16137 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16138 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16139 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16140 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16147 tcg_temp_free(val_t
);
16150 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16151 uint32_t op1
, uint32_t op2
,
16152 int ret
, int v1
, int v2
, int check_ret
)
16158 if ((ret
== 0) && (check_ret
== 1)) {
16159 /* Treat as NOP. */
16163 t1
= tcg_temp_new();
16164 v1_t
= tcg_temp_new();
16165 v2_t
= tcg_temp_new();
16167 gen_load_gpr(v1_t
, v1
);
16168 gen_load_gpr(v2_t
, v2
);
16171 case OPC_CMPU_EQ_QB_DSP
:
16173 case OPC_CMPU_EQ_QB
:
16175 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16177 case OPC_CMPU_LT_QB
:
16179 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16181 case OPC_CMPU_LE_QB
:
16183 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16185 case OPC_CMPGU_EQ_QB
:
16187 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16189 case OPC_CMPGU_LT_QB
:
16191 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16193 case OPC_CMPGU_LE_QB
:
16195 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16197 case OPC_CMPGDU_EQ_QB
:
16199 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16200 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16201 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16202 tcg_gen_shli_tl(t1
, t1
, 24);
16203 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16205 case OPC_CMPGDU_LT_QB
:
16207 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16208 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16209 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16210 tcg_gen_shli_tl(t1
, t1
, 24);
16211 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16213 case OPC_CMPGDU_LE_QB
:
16215 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16216 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16217 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16218 tcg_gen_shli_tl(t1
, t1
, 24);
16219 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16221 case OPC_CMP_EQ_PH
:
16223 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16225 case OPC_CMP_LT_PH
:
16227 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16229 case OPC_CMP_LE_PH
:
16231 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16235 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16239 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16241 case OPC_PACKRL_PH
:
16243 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16247 #ifdef TARGET_MIPS64
16248 case OPC_CMPU_EQ_OB_DSP
:
16250 case OPC_CMP_EQ_PW
:
16252 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16254 case OPC_CMP_LT_PW
:
16256 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16258 case OPC_CMP_LE_PW
:
16260 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16262 case OPC_CMP_EQ_QH
:
16264 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16266 case OPC_CMP_LT_QH
:
16268 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16270 case OPC_CMP_LE_QH
:
16272 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16274 case OPC_CMPGDU_EQ_OB
:
16276 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16278 case OPC_CMPGDU_LT_OB
:
16280 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16282 case OPC_CMPGDU_LE_OB
:
16284 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16286 case OPC_CMPGU_EQ_OB
:
16288 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16290 case OPC_CMPGU_LT_OB
:
16292 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16294 case OPC_CMPGU_LE_OB
:
16296 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16298 case OPC_CMPU_EQ_OB
:
16300 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16302 case OPC_CMPU_LT_OB
:
16304 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16306 case OPC_CMPU_LE_OB
:
16308 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16310 case OPC_PACKRL_PW
:
16312 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16316 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16320 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16324 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16332 tcg_temp_free(v1_t
);
16333 tcg_temp_free(v2_t
);
16336 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16337 uint32_t op1
, int rt
, int rs
, int sa
)
16344 /* Treat as NOP. */
16348 t0
= tcg_temp_new();
16349 gen_load_gpr(t0
, rs
);
16352 case OPC_APPEND_DSP
:
16353 switch (MASK_APPEND(ctx
->opcode
)) {
16356 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16358 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16362 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16363 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16364 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16365 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16367 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16371 if (sa
!= 0 && sa
!= 2) {
16372 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16373 tcg_gen_ext32u_tl(t0
, t0
);
16374 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16375 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16377 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16379 default: /* Invalid */
16380 MIPS_INVAL("MASK APPEND");
16381 generate_exception(ctx
, EXCP_RI
);
16385 #ifdef TARGET_MIPS64
16386 case OPC_DAPPEND_DSP
:
16387 switch (MASK_DAPPEND(ctx
->opcode
)) {
16390 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16394 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16395 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16396 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16400 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16401 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16402 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16407 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16408 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16409 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16410 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16413 default: /* Invalid */
16414 MIPS_INVAL("MASK DAPPEND");
16415 generate_exception(ctx
, EXCP_RI
);
16424 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16425 int ret
, int v1
, int v2
, int check_ret
)
16434 if ((ret
== 0) && (check_ret
== 1)) {
16435 /* Treat as NOP. */
16439 t0
= tcg_temp_new();
16440 t1
= tcg_temp_new();
16441 v1_t
= tcg_temp_new();
16442 v2_t
= tcg_temp_new();
16444 gen_load_gpr(v1_t
, v1
);
16445 gen_load_gpr(v2_t
, v2
);
16448 case OPC_EXTR_W_DSP
:
16452 tcg_gen_movi_tl(t0
, v2
);
16453 tcg_gen_movi_tl(t1
, v1
);
16454 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16457 tcg_gen_movi_tl(t0
, v2
);
16458 tcg_gen_movi_tl(t1
, v1
);
16459 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16461 case OPC_EXTR_RS_W
:
16462 tcg_gen_movi_tl(t0
, v2
);
16463 tcg_gen_movi_tl(t1
, v1
);
16464 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16467 tcg_gen_movi_tl(t0
, v2
);
16468 tcg_gen_movi_tl(t1
, v1
);
16469 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16471 case OPC_EXTRV_S_H
:
16472 tcg_gen_movi_tl(t0
, v2
);
16473 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16476 tcg_gen_movi_tl(t0
, v2
);
16477 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16479 case OPC_EXTRV_R_W
:
16480 tcg_gen_movi_tl(t0
, v2
);
16481 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16483 case OPC_EXTRV_RS_W
:
16484 tcg_gen_movi_tl(t0
, v2
);
16485 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16488 tcg_gen_movi_tl(t0
, v2
);
16489 tcg_gen_movi_tl(t1
, v1
);
16490 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16493 tcg_gen_movi_tl(t0
, v2
);
16494 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16497 tcg_gen_movi_tl(t0
, v2
);
16498 tcg_gen_movi_tl(t1
, v1
);
16499 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16502 tcg_gen_movi_tl(t0
, v2
);
16503 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16506 imm
= (ctx
->opcode
>> 20) & 0x3F;
16507 tcg_gen_movi_tl(t0
, ret
);
16508 tcg_gen_movi_tl(t1
, imm
);
16509 gen_helper_shilo(t0
, t1
, cpu_env
);
16512 tcg_gen_movi_tl(t0
, ret
);
16513 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16516 tcg_gen_movi_tl(t0
, ret
);
16517 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16520 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16521 tcg_gen_movi_tl(t0
, imm
);
16522 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16525 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16526 tcg_gen_movi_tl(t0
, imm
);
16527 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16531 #ifdef TARGET_MIPS64
16532 case OPC_DEXTR_W_DSP
:
16536 tcg_gen_movi_tl(t0
, ret
);
16537 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16541 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16542 int ac
= (ctx
->opcode
>> 11) & 0x03;
16543 tcg_gen_movi_tl(t0
, shift
);
16544 tcg_gen_movi_tl(t1
, ac
);
16545 gen_helper_dshilo(t0
, t1
, cpu_env
);
16550 int ac
= (ctx
->opcode
>> 11) & 0x03;
16551 tcg_gen_movi_tl(t0
, ac
);
16552 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16556 tcg_gen_movi_tl(t0
, v2
);
16557 tcg_gen_movi_tl(t1
, v1
);
16559 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16562 tcg_gen_movi_tl(t0
, v2
);
16563 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16566 tcg_gen_movi_tl(t0
, v2
);
16567 tcg_gen_movi_tl(t1
, v1
);
16568 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16571 tcg_gen_movi_tl(t0
, v2
);
16572 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16575 tcg_gen_movi_tl(t0
, v2
);
16576 tcg_gen_movi_tl(t1
, v1
);
16577 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16579 case OPC_DEXTR_R_L
:
16580 tcg_gen_movi_tl(t0
, v2
);
16581 tcg_gen_movi_tl(t1
, v1
);
16582 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16584 case OPC_DEXTR_RS_L
:
16585 tcg_gen_movi_tl(t0
, v2
);
16586 tcg_gen_movi_tl(t1
, v1
);
16587 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16590 tcg_gen_movi_tl(t0
, v2
);
16591 tcg_gen_movi_tl(t1
, v1
);
16592 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16594 case OPC_DEXTR_R_W
:
16595 tcg_gen_movi_tl(t0
, v2
);
16596 tcg_gen_movi_tl(t1
, v1
);
16597 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16599 case OPC_DEXTR_RS_W
:
16600 tcg_gen_movi_tl(t0
, v2
);
16601 tcg_gen_movi_tl(t1
, v1
);
16602 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16604 case OPC_DEXTR_S_H
:
16605 tcg_gen_movi_tl(t0
, v2
);
16606 tcg_gen_movi_tl(t1
, v1
);
16607 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16609 case OPC_DEXTRV_S_H
:
16610 tcg_gen_movi_tl(t0
, v2
);
16611 tcg_gen_movi_tl(t1
, v1
);
16612 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16615 tcg_gen_movi_tl(t0
, v2
);
16616 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16618 case OPC_DEXTRV_R_L
:
16619 tcg_gen_movi_tl(t0
, v2
);
16620 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16622 case OPC_DEXTRV_RS_L
:
16623 tcg_gen_movi_tl(t0
, v2
);
16624 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16627 tcg_gen_movi_tl(t0
, v2
);
16628 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16630 case OPC_DEXTRV_R_W
:
16631 tcg_gen_movi_tl(t0
, v2
);
16632 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16634 case OPC_DEXTRV_RS_W
:
16635 tcg_gen_movi_tl(t0
, v2
);
16636 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16645 tcg_temp_free(v1_t
);
16646 tcg_temp_free(v2_t
);
16649 /* End MIPSDSP functions. */
16651 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16653 int rs
, rt
, rd
, sa
;
16656 rs
= (ctx
->opcode
>> 21) & 0x1f;
16657 rt
= (ctx
->opcode
>> 16) & 0x1f;
16658 rd
= (ctx
->opcode
>> 11) & 0x1f;
16659 sa
= (ctx
->opcode
>> 6) & 0x1f;
16661 op1
= MASK_SPECIAL(ctx
->opcode
);
16664 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16666 case OPC_MULT
... OPC_DIVU
:
16667 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16677 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16680 MIPS_INVAL("special_r6 muldiv");
16681 generate_exception(ctx
, EXCP_RI
);
16687 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16691 if (rt
== 0 && sa
== 1) {
16692 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16693 We need additionally to check other fields */
16694 gen_cl(ctx
, op1
, rd
, rs
);
16696 generate_exception(ctx
, EXCP_RI
);
16700 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16701 gen_helper_do_semihosting(cpu_env
);
16703 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16704 generate_exception(ctx
, EXCP_RI
);
16706 generate_exception(ctx
, EXCP_DBp
);
16710 #if defined(TARGET_MIPS64)
16712 check_mips_64(ctx
);
16713 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16717 if (rt
== 0 && sa
== 1) {
16718 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16719 We need additionally to check other fields */
16720 check_mips_64(ctx
);
16721 gen_cl(ctx
, op1
, rd
, rs
);
16723 generate_exception(ctx
, EXCP_RI
);
16726 case OPC_DMULT
... OPC_DDIVU
:
16727 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16737 check_mips_64(ctx
);
16738 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16741 MIPS_INVAL("special_r6 muldiv");
16742 generate_exception(ctx
, EXCP_RI
);
16747 default: /* Invalid */
16748 MIPS_INVAL("special_r6");
16749 generate_exception(ctx
, EXCP_RI
);
16754 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16756 int rs
, rt
, rd
, sa
;
16759 rs
= (ctx
->opcode
>> 21) & 0x1f;
16760 rt
= (ctx
->opcode
>> 16) & 0x1f;
16761 rd
= (ctx
->opcode
>> 11) & 0x1f;
16762 sa
= (ctx
->opcode
>> 6) & 0x1f;
16764 op1
= MASK_SPECIAL(ctx
->opcode
);
16766 case OPC_MOVN
: /* Conditional move */
16768 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16769 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16770 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16772 case OPC_MFHI
: /* Move from HI/LO */
16774 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16777 case OPC_MTLO
: /* Move to HI/LO */
16778 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16781 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16782 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16783 check_cp1_enabled(ctx
);
16784 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16785 (ctx
->opcode
>> 16) & 1);
16787 generate_exception_err(ctx
, EXCP_CpU
, 1);
16793 check_insn(ctx
, INSN_VR54XX
);
16794 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16795 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16797 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16802 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16804 #if defined(TARGET_MIPS64)
16805 case OPC_DMULT
... OPC_DDIVU
:
16806 check_insn(ctx
, ISA_MIPS3
);
16807 check_mips_64(ctx
);
16808 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16812 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16815 #ifdef MIPS_STRICT_STANDARD
16816 MIPS_INVAL("SPIM");
16817 generate_exception(ctx
, EXCP_RI
);
16819 /* Implemented as RI exception for now. */
16820 MIPS_INVAL("spim (unofficial)");
16821 generate_exception(ctx
, EXCP_RI
);
16824 default: /* Invalid */
16825 MIPS_INVAL("special_legacy");
16826 generate_exception(ctx
, EXCP_RI
);
16831 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16833 int rs
, rt
, rd
, sa
;
16836 rs
= (ctx
->opcode
>> 21) & 0x1f;
16837 rt
= (ctx
->opcode
>> 16) & 0x1f;
16838 rd
= (ctx
->opcode
>> 11) & 0x1f;
16839 sa
= (ctx
->opcode
>> 6) & 0x1f;
16841 op1
= MASK_SPECIAL(ctx
->opcode
);
16843 case OPC_SLL
: /* Shift with immediate */
16844 if (sa
== 5 && rd
== 0 &&
16845 rs
== 0 && rt
== 0) { /* PAUSE */
16846 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16847 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16848 generate_exception(ctx
, EXCP_RI
);
16854 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16857 switch ((ctx
->opcode
>> 21) & 0x1f) {
16859 /* rotr is decoded as srl on non-R2 CPUs */
16860 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16865 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16868 generate_exception(ctx
, EXCP_RI
);
16872 case OPC_ADD
... OPC_SUBU
:
16873 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16875 case OPC_SLLV
: /* Shifts */
16877 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16880 switch ((ctx
->opcode
>> 6) & 0x1f) {
16882 /* rotrv is decoded as srlv on non-R2 CPUs */
16883 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16888 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16891 generate_exception(ctx
, EXCP_RI
);
16895 case OPC_SLT
: /* Set on less than */
16897 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16899 case OPC_AND
: /* Logic*/
16903 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16906 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16908 case OPC_TGE
... OPC_TEQ
: /* Traps */
16910 check_insn(ctx
, ISA_MIPS2
);
16911 gen_trap(ctx
, op1
, rs
, rt
, -1);
16913 case OPC_LSA
: /* OPC_PMON */
16914 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16915 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16916 decode_opc_special_r6(env
, ctx
);
16918 /* Pmon entry point, also R4010 selsl */
16919 #ifdef MIPS_STRICT_STANDARD
16920 MIPS_INVAL("PMON / selsl");
16921 generate_exception(ctx
, EXCP_RI
);
16923 gen_helper_0e0i(pmon
, sa
);
16928 generate_exception(ctx
, EXCP_SYSCALL
);
16929 ctx
->bstate
= BS_STOP
;
16932 generate_exception(ctx
, EXCP_BREAK
);
16935 check_insn(ctx
, ISA_MIPS2
);
16936 /* Treat as NOP. */
16939 #if defined(TARGET_MIPS64)
16940 /* MIPS64 specific opcodes */
16945 check_insn(ctx
, ISA_MIPS3
);
16946 check_mips_64(ctx
);
16947 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16950 switch ((ctx
->opcode
>> 21) & 0x1f) {
16952 /* drotr is decoded as dsrl on non-R2 CPUs */
16953 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16958 check_insn(ctx
, ISA_MIPS3
);
16959 check_mips_64(ctx
);
16960 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16963 generate_exception(ctx
, EXCP_RI
);
16968 switch ((ctx
->opcode
>> 21) & 0x1f) {
16970 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16971 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16976 check_insn(ctx
, ISA_MIPS3
);
16977 check_mips_64(ctx
);
16978 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16981 generate_exception(ctx
, EXCP_RI
);
16985 case OPC_DADD
... OPC_DSUBU
:
16986 check_insn(ctx
, ISA_MIPS3
);
16987 check_mips_64(ctx
);
16988 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16992 check_insn(ctx
, ISA_MIPS3
);
16993 check_mips_64(ctx
);
16994 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16997 switch ((ctx
->opcode
>> 6) & 0x1f) {
16999 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17000 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17005 check_insn(ctx
, ISA_MIPS3
);
17006 check_mips_64(ctx
);
17007 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17010 generate_exception(ctx
, EXCP_RI
);
17015 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17016 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17017 decode_opc_special_r6(env
, ctx
);
17022 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17023 decode_opc_special_r6(env
, ctx
);
17025 decode_opc_special_legacy(env
, ctx
);
17030 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17035 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17037 rs
= (ctx
->opcode
>> 21) & 0x1f;
17038 rt
= (ctx
->opcode
>> 16) & 0x1f;
17039 rd
= (ctx
->opcode
>> 11) & 0x1f;
17041 op1
= MASK_SPECIAL2(ctx
->opcode
);
17043 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17044 case OPC_MSUB
... OPC_MSUBU
:
17045 check_insn(ctx
, ISA_MIPS32
);
17046 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17049 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17052 case OPC_DIVU_G_2F
:
17053 case OPC_MULT_G_2F
:
17054 case OPC_MULTU_G_2F
:
17056 case OPC_MODU_G_2F
:
17057 check_insn(ctx
, INSN_LOONGSON2F
);
17058 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17062 check_insn(ctx
, ISA_MIPS32
);
17063 gen_cl(ctx
, op1
, rd
, rs
);
17066 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17067 gen_helper_do_semihosting(cpu_env
);
17069 /* XXX: not clear which exception should be raised
17070 * when in debug mode...
17072 check_insn(ctx
, ISA_MIPS32
);
17073 generate_exception(ctx
, EXCP_DBp
);
17076 #if defined(TARGET_MIPS64)
17079 check_insn(ctx
, ISA_MIPS64
);
17080 check_mips_64(ctx
);
17081 gen_cl(ctx
, op1
, rd
, rs
);
17083 case OPC_DMULT_G_2F
:
17084 case OPC_DMULTU_G_2F
:
17085 case OPC_DDIV_G_2F
:
17086 case OPC_DDIVU_G_2F
:
17087 case OPC_DMOD_G_2F
:
17088 case OPC_DMODU_G_2F
:
17089 check_insn(ctx
, INSN_LOONGSON2F
);
17090 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17093 default: /* Invalid */
17094 MIPS_INVAL("special2_legacy");
17095 generate_exception(ctx
, EXCP_RI
);
17100 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17102 int rs
, rt
, rd
, sa
;
17106 rs
= (ctx
->opcode
>> 21) & 0x1f;
17107 rt
= (ctx
->opcode
>> 16) & 0x1f;
17108 rd
= (ctx
->opcode
>> 11) & 0x1f;
17109 sa
= (ctx
->opcode
>> 6) & 0x1f;
17110 imm
= (int16_t)ctx
->opcode
>> 7;
17112 op1
= MASK_SPECIAL3(ctx
->opcode
);
17116 /* hint codes 24-31 are reserved and signal RI */
17117 generate_exception(ctx
, EXCP_RI
);
17119 /* Treat as NOP. */
17122 /* Treat as NOP. */
17125 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17128 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17133 /* Treat as NOP. */
17136 op2
= MASK_BSHFL(ctx
->opcode
);
17138 case OPC_ALIGN
... OPC_ALIGN_END
:
17139 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17142 gen_bitswap(ctx
, op2
, rd
, rt
);
17147 #if defined(TARGET_MIPS64)
17149 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17152 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17155 check_mips_64(ctx
);
17158 /* Treat as NOP. */
17161 op2
= MASK_DBSHFL(ctx
->opcode
);
17163 case OPC_DALIGN
... OPC_DALIGN_END
:
17164 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17167 gen_bitswap(ctx
, op2
, rd
, rt
);
17174 default: /* Invalid */
17175 MIPS_INVAL("special3_r6");
17176 generate_exception(ctx
, EXCP_RI
);
17181 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17186 rs
= (ctx
->opcode
>> 21) & 0x1f;
17187 rt
= (ctx
->opcode
>> 16) & 0x1f;
17188 rd
= (ctx
->opcode
>> 11) & 0x1f;
17190 op1
= MASK_SPECIAL3(ctx
->opcode
);
17192 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17193 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17194 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17195 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17196 * the same mask and op1. */
17197 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17198 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17201 case OPC_ADDUH_R_QB
:
17203 case OPC_ADDQH_R_PH
:
17205 case OPC_ADDQH_R_W
:
17207 case OPC_SUBUH_R_QB
:
17209 case OPC_SUBQH_R_PH
:
17211 case OPC_SUBQH_R_W
:
17212 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17217 case OPC_MULQ_RS_W
:
17218 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17221 MIPS_INVAL("MASK ADDUH.QB");
17222 generate_exception(ctx
, EXCP_RI
);
17225 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17226 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17228 generate_exception(ctx
, EXCP_RI
);
17232 op2
= MASK_LX(ctx
->opcode
);
17234 #if defined(TARGET_MIPS64)
17240 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17242 default: /* Invalid */
17243 MIPS_INVAL("MASK LX");
17244 generate_exception(ctx
, EXCP_RI
);
17248 case OPC_ABSQ_S_PH_DSP
:
17249 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17251 case OPC_ABSQ_S_QB
:
17252 case OPC_ABSQ_S_PH
:
17254 case OPC_PRECEQ_W_PHL
:
17255 case OPC_PRECEQ_W_PHR
:
17256 case OPC_PRECEQU_PH_QBL
:
17257 case OPC_PRECEQU_PH_QBR
:
17258 case OPC_PRECEQU_PH_QBLA
:
17259 case OPC_PRECEQU_PH_QBRA
:
17260 case OPC_PRECEU_PH_QBL
:
17261 case OPC_PRECEU_PH_QBR
:
17262 case OPC_PRECEU_PH_QBLA
:
17263 case OPC_PRECEU_PH_QBRA
:
17264 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17271 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17274 MIPS_INVAL("MASK ABSQ_S.PH");
17275 generate_exception(ctx
, EXCP_RI
);
17279 case OPC_ADDU_QB_DSP
:
17280 op2
= MASK_ADDU_QB(ctx
->opcode
);
17283 case OPC_ADDQ_S_PH
:
17286 case OPC_ADDU_S_QB
:
17288 case OPC_ADDU_S_PH
:
17290 case OPC_SUBQ_S_PH
:
17293 case OPC_SUBU_S_QB
:
17295 case OPC_SUBU_S_PH
:
17299 case OPC_RADDU_W_QB
:
17300 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17302 case OPC_MULEU_S_PH_QBL
:
17303 case OPC_MULEU_S_PH_QBR
:
17304 case OPC_MULQ_RS_PH
:
17305 case OPC_MULEQ_S_W_PHL
:
17306 case OPC_MULEQ_S_W_PHR
:
17307 case OPC_MULQ_S_PH
:
17308 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17310 default: /* Invalid */
17311 MIPS_INVAL("MASK ADDU.QB");
17312 generate_exception(ctx
, EXCP_RI
);
17317 case OPC_CMPU_EQ_QB_DSP
:
17318 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17320 case OPC_PRECR_SRA_PH_W
:
17321 case OPC_PRECR_SRA_R_PH_W
:
17322 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17324 case OPC_PRECR_QB_PH
:
17325 case OPC_PRECRQ_QB_PH
:
17326 case OPC_PRECRQ_PH_W
:
17327 case OPC_PRECRQ_RS_PH_W
:
17328 case OPC_PRECRQU_S_QB_PH
:
17329 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17331 case OPC_CMPU_EQ_QB
:
17332 case OPC_CMPU_LT_QB
:
17333 case OPC_CMPU_LE_QB
:
17334 case OPC_CMP_EQ_PH
:
17335 case OPC_CMP_LT_PH
:
17336 case OPC_CMP_LE_PH
:
17337 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17339 case OPC_CMPGU_EQ_QB
:
17340 case OPC_CMPGU_LT_QB
:
17341 case OPC_CMPGU_LE_QB
:
17342 case OPC_CMPGDU_EQ_QB
:
17343 case OPC_CMPGDU_LT_QB
:
17344 case OPC_CMPGDU_LE_QB
:
17347 case OPC_PACKRL_PH
:
17348 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17350 default: /* Invalid */
17351 MIPS_INVAL("MASK CMPU.EQ.QB");
17352 generate_exception(ctx
, EXCP_RI
);
17356 case OPC_SHLL_QB_DSP
:
17357 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17359 case OPC_DPA_W_PH_DSP
:
17360 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17362 case OPC_DPAU_H_QBL
:
17363 case OPC_DPAU_H_QBR
:
17364 case OPC_DPSU_H_QBL
:
17365 case OPC_DPSU_H_QBR
:
17367 case OPC_DPAX_W_PH
:
17368 case OPC_DPAQ_S_W_PH
:
17369 case OPC_DPAQX_S_W_PH
:
17370 case OPC_DPAQX_SA_W_PH
:
17372 case OPC_DPSX_W_PH
:
17373 case OPC_DPSQ_S_W_PH
:
17374 case OPC_DPSQX_S_W_PH
:
17375 case OPC_DPSQX_SA_W_PH
:
17376 case OPC_MULSAQ_S_W_PH
:
17377 case OPC_DPAQ_SA_L_W
:
17378 case OPC_DPSQ_SA_L_W
:
17379 case OPC_MAQ_S_W_PHL
:
17380 case OPC_MAQ_S_W_PHR
:
17381 case OPC_MAQ_SA_W_PHL
:
17382 case OPC_MAQ_SA_W_PHR
:
17383 case OPC_MULSA_W_PH
:
17384 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17386 default: /* Invalid */
17387 MIPS_INVAL("MASK DPAW.PH");
17388 generate_exception(ctx
, EXCP_RI
);
17393 op2
= MASK_INSV(ctx
->opcode
);
17404 t0
= tcg_temp_new();
17405 t1
= tcg_temp_new();
17407 gen_load_gpr(t0
, rt
);
17408 gen_load_gpr(t1
, rs
);
17410 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17416 default: /* Invalid */
17417 MIPS_INVAL("MASK INSV");
17418 generate_exception(ctx
, EXCP_RI
);
17422 case OPC_APPEND_DSP
:
17423 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17425 case OPC_EXTR_W_DSP
:
17426 op2
= MASK_EXTR_W(ctx
->opcode
);
17430 case OPC_EXTR_RS_W
:
17432 case OPC_EXTRV_S_H
:
17434 case OPC_EXTRV_R_W
:
17435 case OPC_EXTRV_RS_W
:
17440 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17443 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17449 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17451 default: /* Invalid */
17452 MIPS_INVAL("MASK EXTR.W");
17453 generate_exception(ctx
, EXCP_RI
);
17457 #if defined(TARGET_MIPS64)
17458 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17459 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17460 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17461 check_insn(ctx
, INSN_LOONGSON2E
);
17462 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17464 case OPC_ABSQ_S_QH_DSP
:
17465 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17467 case OPC_PRECEQ_L_PWL
:
17468 case OPC_PRECEQ_L_PWR
:
17469 case OPC_PRECEQ_PW_QHL
:
17470 case OPC_PRECEQ_PW_QHR
:
17471 case OPC_PRECEQ_PW_QHLA
:
17472 case OPC_PRECEQ_PW_QHRA
:
17473 case OPC_PRECEQU_QH_OBL
:
17474 case OPC_PRECEQU_QH_OBR
:
17475 case OPC_PRECEQU_QH_OBLA
:
17476 case OPC_PRECEQU_QH_OBRA
:
17477 case OPC_PRECEU_QH_OBL
:
17478 case OPC_PRECEU_QH_OBR
:
17479 case OPC_PRECEU_QH_OBLA
:
17480 case OPC_PRECEU_QH_OBRA
:
17481 case OPC_ABSQ_S_OB
:
17482 case OPC_ABSQ_S_PW
:
17483 case OPC_ABSQ_S_QH
:
17484 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17492 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17494 default: /* Invalid */
17495 MIPS_INVAL("MASK ABSQ_S.QH");
17496 generate_exception(ctx
, EXCP_RI
);
17500 case OPC_ADDU_OB_DSP
:
17501 op2
= MASK_ADDU_OB(ctx
->opcode
);
17503 case OPC_RADDU_L_OB
:
17505 case OPC_SUBQ_S_PW
:
17507 case OPC_SUBQ_S_QH
:
17509 case OPC_SUBU_S_OB
:
17511 case OPC_SUBU_S_QH
:
17513 case OPC_SUBUH_R_OB
:
17515 case OPC_ADDQ_S_PW
:
17517 case OPC_ADDQ_S_QH
:
17519 case OPC_ADDU_S_OB
:
17521 case OPC_ADDU_S_QH
:
17523 case OPC_ADDUH_R_OB
:
17524 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17526 case OPC_MULEQ_S_PW_QHL
:
17527 case OPC_MULEQ_S_PW_QHR
:
17528 case OPC_MULEU_S_QH_OBL
:
17529 case OPC_MULEU_S_QH_OBR
:
17530 case OPC_MULQ_RS_QH
:
17531 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17533 default: /* Invalid */
17534 MIPS_INVAL("MASK ADDU.OB");
17535 generate_exception(ctx
, EXCP_RI
);
17539 case OPC_CMPU_EQ_OB_DSP
:
17540 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17542 case OPC_PRECR_SRA_QH_PW
:
17543 case OPC_PRECR_SRA_R_QH_PW
:
17544 /* Return value is rt. */
17545 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17547 case OPC_PRECR_OB_QH
:
17548 case OPC_PRECRQ_OB_QH
:
17549 case OPC_PRECRQ_PW_L
:
17550 case OPC_PRECRQ_QH_PW
:
17551 case OPC_PRECRQ_RS_QH_PW
:
17552 case OPC_PRECRQU_S_OB_QH
:
17553 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17555 case OPC_CMPU_EQ_OB
:
17556 case OPC_CMPU_LT_OB
:
17557 case OPC_CMPU_LE_OB
:
17558 case OPC_CMP_EQ_QH
:
17559 case OPC_CMP_LT_QH
:
17560 case OPC_CMP_LE_QH
:
17561 case OPC_CMP_EQ_PW
:
17562 case OPC_CMP_LT_PW
:
17563 case OPC_CMP_LE_PW
:
17564 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17566 case OPC_CMPGDU_EQ_OB
:
17567 case OPC_CMPGDU_LT_OB
:
17568 case OPC_CMPGDU_LE_OB
:
17569 case OPC_CMPGU_EQ_OB
:
17570 case OPC_CMPGU_LT_OB
:
17571 case OPC_CMPGU_LE_OB
:
17572 case OPC_PACKRL_PW
:
17576 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17578 default: /* Invalid */
17579 MIPS_INVAL("MASK CMPU_EQ.OB");
17580 generate_exception(ctx
, EXCP_RI
);
17584 case OPC_DAPPEND_DSP
:
17585 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17587 case OPC_DEXTR_W_DSP
:
17588 op2
= MASK_DEXTR_W(ctx
->opcode
);
17595 case OPC_DEXTR_R_L
:
17596 case OPC_DEXTR_RS_L
:
17598 case OPC_DEXTR_R_W
:
17599 case OPC_DEXTR_RS_W
:
17600 case OPC_DEXTR_S_H
:
17602 case OPC_DEXTRV_R_L
:
17603 case OPC_DEXTRV_RS_L
:
17604 case OPC_DEXTRV_S_H
:
17606 case OPC_DEXTRV_R_W
:
17607 case OPC_DEXTRV_RS_W
:
17608 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17613 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17615 default: /* Invalid */
17616 MIPS_INVAL("MASK EXTR.W");
17617 generate_exception(ctx
, EXCP_RI
);
17621 case OPC_DPAQ_W_QH_DSP
:
17622 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17624 case OPC_DPAU_H_OBL
:
17625 case OPC_DPAU_H_OBR
:
17626 case OPC_DPSU_H_OBL
:
17627 case OPC_DPSU_H_OBR
:
17629 case OPC_DPAQ_S_W_QH
:
17631 case OPC_DPSQ_S_W_QH
:
17632 case OPC_MULSAQ_S_W_QH
:
17633 case OPC_DPAQ_SA_L_PW
:
17634 case OPC_DPSQ_SA_L_PW
:
17635 case OPC_MULSAQ_S_L_PW
:
17636 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17638 case OPC_MAQ_S_W_QHLL
:
17639 case OPC_MAQ_S_W_QHLR
:
17640 case OPC_MAQ_S_W_QHRL
:
17641 case OPC_MAQ_S_W_QHRR
:
17642 case OPC_MAQ_SA_W_QHLL
:
17643 case OPC_MAQ_SA_W_QHLR
:
17644 case OPC_MAQ_SA_W_QHRL
:
17645 case OPC_MAQ_SA_W_QHRR
:
17646 case OPC_MAQ_S_L_PWL
:
17647 case OPC_MAQ_S_L_PWR
:
17652 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17654 default: /* Invalid */
17655 MIPS_INVAL("MASK DPAQ.W.QH");
17656 generate_exception(ctx
, EXCP_RI
);
17660 case OPC_DINSV_DSP
:
17661 op2
= MASK_INSV(ctx
->opcode
);
17672 t0
= tcg_temp_new();
17673 t1
= tcg_temp_new();
17675 gen_load_gpr(t0
, rt
);
17676 gen_load_gpr(t1
, rs
);
17678 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17684 default: /* Invalid */
17685 MIPS_INVAL("MASK DINSV");
17686 generate_exception(ctx
, EXCP_RI
);
17690 case OPC_SHLL_OB_DSP
:
17691 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17694 default: /* Invalid */
17695 MIPS_INVAL("special3_legacy");
17696 generate_exception(ctx
, EXCP_RI
);
17701 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17703 int rs
, rt
, rd
, sa
;
17706 rs
= (ctx
->opcode
>> 21) & 0x1f;
17707 rt
= (ctx
->opcode
>> 16) & 0x1f;
17708 rd
= (ctx
->opcode
>> 11) & 0x1f;
17709 sa
= (ctx
->opcode
>> 6) & 0x1f;
17711 op1
= MASK_SPECIAL3(ctx
->opcode
);
17715 check_insn(ctx
, ISA_MIPS32R2
);
17716 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17719 op2
= MASK_BSHFL(ctx
->opcode
);
17721 case OPC_ALIGN
... OPC_ALIGN_END
:
17723 check_insn(ctx
, ISA_MIPS32R6
);
17724 decode_opc_special3_r6(env
, ctx
);
17727 check_insn(ctx
, ISA_MIPS32R2
);
17728 gen_bshfl(ctx
, op2
, rt
, rd
);
17732 #if defined(TARGET_MIPS64)
17733 case OPC_DEXTM
... OPC_DEXT
:
17734 case OPC_DINSM
... OPC_DINS
:
17735 check_insn(ctx
, ISA_MIPS64R2
);
17736 check_mips_64(ctx
);
17737 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17740 op2
= MASK_DBSHFL(ctx
->opcode
);
17742 case OPC_DALIGN
... OPC_DALIGN_END
:
17744 check_insn(ctx
, ISA_MIPS32R6
);
17745 decode_opc_special3_r6(env
, ctx
);
17748 check_insn(ctx
, ISA_MIPS64R2
);
17749 check_mips_64(ctx
);
17750 op2
= MASK_DBSHFL(ctx
->opcode
);
17751 gen_bshfl(ctx
, op2
, rt
, rd
);
17757 gen_rdhwr(ctx
, rt
, rd
);
17760 check_insn(ctx
, ASE_MT
);
17762 TCGv t0
= tcg_temp_new();
17763 TCGv t1
= tcg_temp_new();
17765 gen_load_gpr(t0
, rt
);
17766 gen_load_gpr(t1
, rs
);
17767 gen_helper_fork(t0
, t1
);
17773 check_insn(ctx
, ASE_MT
);
17775 TCGv t0
= tcg_temp_new();
17777 save_cpu_state(ctx
, 1);
17778 gen_load_gpr(t0
, rs
);
17779 gen_helper_yield(t0
, cpu_env
, t0
);
17780 gen_store_gpr(t0
, rd
);
17785 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17786 decode_opc_special3_r6(env
, ctx
);
17788 decode_opc_special3_legacy(env
, ctx
);
17793 /* MIPS SIMD Architecture (MSA) */
17794 static inline int check_msa_access(DisasContext
*ctx
)
17796 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17797 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17798 generate_exception(ctx
, EXCP_RI
);
17802 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17803 if (ctx
->insn_flags
& ASE_MSA
) {
17804 generate_exception(ctx
, EXCP_MSADIS
);
17807 generate_exception(ctx
, EXCP_RI
);
17814 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17816 /* generates tcg ops to check if any element is 0 */
17817 /* Note this function only works with MSA_WRLEN = 128 */
17818 uint64_t eval_zero_or_big
= 0;
17819 uint64_t eval_big
= 0;
17820 TCGv_i64 t0
= tcg_temp_new_i64();
17821 TCGv_i64 t1
= tcg_temp_new_i64();
17824 eval_zero_or_big
= 0x0101010101010101ULL
;
17825 eval_big
= 0x8080808080808080ULL
;
17828 eval_zero_or_big
= 0x0001000100010001ULL
;
17829 eval_big
= 0x8000800080008000ULL
;
17832 eval_zero_or_big
= 0x0000000100000001ULL
;
17833 eval_big
= 0x8000000080000000ULL
;
17836 eval_zero_or_big
= 0x0000000000000001ULL
;
17837 eval_big
= 0x8000000000000000ULL
;
17840 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17841 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17842 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17843 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17844 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17845 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17846 tcg_gen_or_i64(t0
, t0
, t1
);
17847 /* if all bits are zero then all elements are not zero */
17848 /* if some bit is non-zero then some element is zero */
17849 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17850 tcg_gen_trunc_i64_tl(tresult
, t0
);
17851 tcg_temp_free_i64(t0
);
17852 tcg_temp_free_i64(t1
);
17855 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17857 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17858 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17859 int64_t s16
= (int16_t)ctx
->opcode
;
17861 check_msa_access(ctx
);
17863 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17864 generate_exception(ctx
, EXCP_RI
);
17871 TCGv_i64 t0
= tcg_temp_new_i64();
17872 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17873 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17874 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17875 tcg_gen_trunc_i64_tl(bcond
, t0
);
17876 tcg_temp_free_i64(t0
);
17883 gen_check_zero_element(bcond
, df
, wt
);
17889 gen_check_zero_element(bcond
, df
, wt
);
17890 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17894 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17896 ctx
->hflags
|= MIPS_HFLAG_BC
;
17897 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17900 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17902 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17903 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17904 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17905 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17907 TCGv_i32 twd
= tcg_const_i32(wd
);
17908 TCGv_i32 tws
= tcg_const_i32(ws
);
17909 TCGv_i32 ti8
= tcg_const_i32(i8
);
17911 switch (MASK_MSA_I8(ctx
->opcode
)) {
17913 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17916 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17919 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17922 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17925 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17928 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17931 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17937 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17938 if (df
== DF_DOUBLE
) {
17939 generate_exception(ctx
, EXCP_RI
);
17941 TCGv_i32 tdf
= tcg_const_i32(df
);
17942 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17943 tcg_temp_free_i32(tdf
);
17948 MIPS_INVAL("MSA instruction");
17949 generate_exception(ctx
, EXCP_RI
);
17953 tcg_temp_free_i32(twd
);
17954 tcg_temp_free_i32(tws
);
17955 tcg_temp_free_i32(ti8
);
17958 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17960 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17961 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17962 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17963 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17964 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17965 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17967 TCGv_i32 tdf
= tcg_const_i32(df
);
17968 TCGv_i32 twd
= tcg_const_i32(wd
);
17969 TCGv_i32 tws
= tcg_const_i32(ws
);
17970 TCGv_i32 timm
= tcg_temp_new_i32();
17971 tcg_gen_movi_i32(timm
, u5
);
17973 switch (MASK_MSA_I5(ctx
->opcode
)) {
17975 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17978 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17980 case OPC_MAXI_S_df
:
17981 tcg_gen_movi_i32(timm
, s5
);
17982 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17984 case OPC_MAXI_U_df
:
17985 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17987 case OPC_MINI_S_df
:
17988 tcg_gen_movi_i32(timm
, s5
);
17989 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17991 case OPC_MINI_U_df
:
17992 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17995 tcg_gen_movi_i32(timm
, s5
);
17996 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17998 case OPC_CLTI_S_df
:
17999 tcg_gen_movi_i32(timm
, s5
);
18000 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18002 case OPC_CLTI_U_df
:
18003 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18005 case OPC_CLEI_S_df
:
18006 tcg_gen_movi_i32(timm
, s5
);
18007 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18009 case OPC_CLEI_U_df
:
18010 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18014 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18015 tcg_gen_movi_i32(timm
, s10
);
18016 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18020 MIPS_INVAL("MSA instruction");
18021 generate_exception(ctx
, EXCP_RI
);
18025 tcg_temp_free_i32(tdf
);
18026 tcg_temp_free_i32(twd
);
18027 tcg_temp_free_i32(tws
);
18028 tcg_temp_free_i32(timm
);
18031 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18033 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18034 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18035 uint32_t df
= 0, m
= 0;
18036 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18037 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18044 if ((dfm
& 0x40) == 0x00) {
18047 } else if ((dfm
& 0x60) == 0x40) {
18050 } else if ((dfm
& 0x70) == 0x60) {
18053 } else if ((dfm
& 0x78) == 0x70) {
18057 generate_exception(ctx
, EXCP_RI
);
18061 tdf
= tcg_const_i32(df
);
18062 tm
= tcg_const_i32(m
);
18063 twd
= tcg_const_i32(wd
);
18064 tws
= tcg_const_i32(ws
);
18066 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18068 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18071 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18074 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18077 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18080 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18083 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18085 case OPC_BINSLI_df
:
18086 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18088 case OPC_BINSRI_df
:
18089 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18092 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18095 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18098 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18101 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18104 MIPS_INVAL("MSA instruction");
18105 generate_exception(ctx
, EXCP_RI
);
18109 tcg_temp_free_i32(tdf
);
18110 tcg_temp_free_i32(tm
);
18111 tcg_temp_free_i32(twd
);
18112 tcg_temp_free_i32(tws
);
18115 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18117 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18118 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18119 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18120 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18121 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18123 TCGv_i32 tdf
= tcg_const_i32(df
);
18124 TCGv_i32 twd
= tcg_const_i32(wd
);
18125 TCGv_i32 tws
= tcg_const_i32(ws
);
18126 TCGv_i32 twt
= tcg_const_i32(wt
);
18128 switch (MASK_MSA_3R(ctx
->opcode
)) {
18130 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18133 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18136 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18139 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18141 case OPC_SUBS_S_df
:
18142 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18145 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18148 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18151 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18154 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18157 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18159 case OPC_ADDS_A_df
:
18160 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18162 case OPC_SUBS_U_df
:
18163 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18166 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18169 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18172 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18175 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18178 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18181 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18183 case OPC_ADDS_S_df
:
18184 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18186 case OPC_SUBSUS_U_df
:
18187 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18190 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18193 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18196 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18199 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18202 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18205 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18207 case OPC_ADDS_U_df
:
18208 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18210 case OPC_SUBSUU_S_df
:
18211 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18214 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18217 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18220 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18223 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18228 case OPC_ASUB_S_df
:
18229 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18232 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18235 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18238 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18241 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18244 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18247 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18249 case OPC_ASUB_U_df
:
18250 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18253 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18256 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18259 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18262 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18264 case OPC_AVER_S_df
:
18265 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18268 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18271 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18274 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18277 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18279 case OPC_AVER_U_df
:
18280 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18283 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18286 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18289 case OPC_DOTP_S_df
:
18290 case OPC_DOTP_U_df
:
18291 case OPC_DPADD_S_df
:
18292 case OPC_DPADD_U_df
:
18293 case OPC_DPSUB_S_df
:
18294 case OPC_HADD_S_df
:
18295 case OPC_DPSUB_U_df
:
18296 case OPC_HADD_U_df
:
18297 case OPC_HSUB_S_df
:
18298 case OPC_HSUB_U_df
:
18299 if (df
== DF_BYTE
) {
18300 generate_exception(ctx
, EXCP_RI
);
18302 switch (MASK_MSA_3R(ctx
->opcode
)) {
18303 case OPC_DOTP_S_df
:
18304 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18306 case OPC_DOTP_U_df
:
18307 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18309 case OPC_DPADD_S_df
:
18310 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18312 case OPC_DPADD_U_df
:
18313 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18315 case OPC_DPSUB_S_df
:
18316 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18318 case OPC_HADD_S_df
:
18319 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18321 case OPC_DPSUB_U_df
:
18322 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18324 case OPC_HADD_U_df
:
18325 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18327 case OPC_HSUB_S_df
:
18328 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18330 case OPC_HSUB_U_df
:
18331 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18336 MIPS_INVAL("MSA instruction");
18337 generate_exception(ctx
, EXCP_RI
);
18340 tcg_temp_free_i32(twd
);
18341 tcg_temp_free_i32(tws
);
18342 tcg_temp_free_i32(twt
);
18343 tcg_temp_free_i32(tdf
);
18346 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18348 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18349 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18350 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18351 TCGv telm
= tcg_temp_new();
18352 TCGv_i32 tsr
= tcg_const_i32(source
);
18353 TCGv_i32 tdt
= tcg_const_i32(dest
);
18355 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18357 gen_load_gpr(telm
, source
);
18358 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18361 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18362 gen_store_gpr(telm
, dest
);
18365 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18368 MIPS_INVAL("MSA instruction");
18369 generate_exception(ctx
, EXCP_RI
);
18373 tcg_temp_free(telm
);
18374 tcg_temp_free_i32(tdt
);
18375 tcg_temp_free_i32(tsr
);
18378 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18381 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18382 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18383 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18385 TCGv_i32 tws
= tcg_const_i32(ws
);
18386 TCGv_i32 twd
= tcg_const_i32(wd
);
18387 TCGv_i32 tn
= tcg_const_i32(n
);
18388 TCGv_i32 tdf
= tcg_const_i32(df
);
18390 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18392 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18394 case OPC_SPLATI_df
:
18395 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18398 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18400 case OPC_COPY_S_df
:
18401 case OPC_COPY_U_df
:
18402 case OPC_INSERT_df
:
18403 #if !defined(TARGET_MIPS64)
18404 /* Double format valid only for MIPS64 */
18405 if (df
== DF_DOUBLE
) {
18406 generate_exception(ctx
, EXCP_RI
);
18410 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18411 case OPC_COPY_S_df
:
18412 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18414 case OPC_COPY_U_df
:
18415 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18417 case OPC_INSERT_df
:
18418 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18423 MIPS_INVAL("MSA instruction");
18424 generate_exception(ctx
, EXCP_RI
);
18426 tcg_temp_free_i32(twd
);
18427 tcg_temp_free_i32(tws
);
18428 tcg_temp_free_i32(tn
);
18429 tcg_temp_free_i32(tdf
);
18432 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18434 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18435 uint32_t df
= 0, n
= 0;
18437 if ((dfn
& 0x30) == 0x00) {
18440 } else if ((dfn
& 0x38) == 0x20) {
18443 } else if ((dfn
& 0x3c) == 0x30) {
18446 } else if ((dfn
& 0x3e) == 0x38) {
18449 } else if (dfn
== 0x3E) {
18450 /* CTCMSA, CFCMSA, MOVE.V */
18451 gen_msa_elm_3e(env
, ctx
);
18454 generate_exception(ctx
, EXCP_RI
);
18458 gen_msa_elm_df(env
, ctx
, df
, n
);
18461 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18463 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18464 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18465 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18466 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18467 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18469 TCGv_i32 twd
= tcg_const_i32(wd
);
18470 TCGv_i32 tws
= tcg_const_i32(ws
);
18471 TCGv_i32 twt
= tcg_const_i32(wt
);
18472 TCGv_i32 tdf
= tcg_temp_new_i32();
18474 /* adjust df value for floating-point instruction */
18475 tcg_gen_movi_i32(tdf
, df
+ 2);
18477 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18479 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18482 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18485 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18488 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18491 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18494 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18497 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18500 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18503 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18506 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18509 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18512 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18515 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18518 tcg_gen_movi_i32(tdf
, df
+ 1);
18519 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18522 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18525 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18527 case OPC_MADD_Q_df
:
18528 tcg_gen_movi_i32(tdf
, df
+ 1);
18529 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18534 case OPC_MSUB_Q_df
:
18535 tcg_gen_movi_i32(tdf
, df
+ 1);
18536 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18539 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18542 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18545 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18548 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18551 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18554 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18557 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18560 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18563 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18566 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18569 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18572 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18575 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18577 case OPC_MULR_Q_df
:
18578 tcg_gen_movi_i32(tdf
, df
+ 1);
18579 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18582 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18584 case OPC_FMIN_A_df
:
18585 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18587 case OPC_MADDR_Q_df
:
18588 tcg_gen_movi_i32(tdf
, df
+ 1);
18589 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18592 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18595 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18597 case OPC_MSUBR_Q_df
:
18598 tcg_gen_movi_i32(tdf
, df
+ 1);
18599 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18602 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18604 case OPC_FMAX_A_df
:
18605 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18608 MIPS_INVAL("MSA instruction");
18609 generate_exception(ctx
, EXCP_RI
);
18613 tcg_temp_free_i32(twd
);
18614 tcg_temp_free_i32(tws
);
18615 tcg_temp_free_i32(twt
);
18616 tcg_temp_free_i32(tdf
);
18619 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18621 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18622 (op & (0x7 << 18)))
18623 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18624 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18625 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18626 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18627 TCGv_i32 twd
= tcg_const_i32(wd
);
18628 TCGv_i32 tws
= tcg_const_i32(ws
);
18629 TCGv_i32 twt
= tcg_const_i32(wt
);
18630 TCGv_i32 tdf
= tcg_const_i32(df
);
18632 switch (MASK_MSA_2R(ctx
->opcode
)) {
18634 #if !defined(TARGET_MIPS64)
18635 /* Double format valid only for MIPS64 */
18636 if (df
== DF_DOUBLE
) {
18637 generate_exception(ctx
, EXCP_RI
);
18641 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18644 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18647 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18650 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18653 MIPS_INVAL("MSA instruction");
18654 generate_exception(ctx
, EXCP_RI
);
18658 tcg_temp_free_i32(twd
);
18659 tcg_temp_free_i32(tws
);
18660 tcg_temp_free_i32(twt
);
18661 tcg_temp_free_i32(tdf
);
18664 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18666 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18667 (op & (0xf << 17)))
18668 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18669 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18670 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18671 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18672 TCGv_i32 twd
= tcg_const_i32(wd
);
18673 TCGv_i32 tws
= tcg_const_i32(ws
);
18674 TCGv_i32 twt
= tcg_const_i32(wt
);
18675 /* adjust df value for floating-point instruction */
18676 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18678 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18679 case OPC_FCLASS_df
:
18680 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18682 case OPC_FTRUNC_S_df
:
18683 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18685 case OPC_FTRUNC_U_df
:
18686 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18689 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18691 case OPC_FRSQRT_df
:
18692 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18695 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18698 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18701 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18703 case OPC_FEXUPL_df
:
18704 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18706 case OPC_FEXUPR_df
:
18707 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18710 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18713 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18715 case OPC_FTINT_S_df
:
18716 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18718 case OPC_FTINT_U_df
:
18719 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18721 case OPC_FFINT_S_df
:
18722 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18724 case OPC_FFINT_U_df
:
18725 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18729 tcg_temp_free_i32(twd
);
18730 tcg_temp_free_i32(tws
);
18731 tcg_temp_free_i32(twt
);
18732 tcg_temp_free_i32(tdf
);
18735 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18737 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18738 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18739 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18740 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18741 TCGv_i32 twd
= tcg_const_i32(wd
);
18742 TCGv_i32 tws
= tcg_const_i32(ws
);
18743 TCGv_i32 twt
= tcg_const_i32(wt
);
18745 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18747 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18750 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18753 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18756 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18759 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18762 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18765 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18768 MIPS_INVAL("MSA instruction");
18769 generate_exception(ctx
, EXCP_RI
);
18773 tcg_temp_free_i32(twd
);
18774 tcg_temp_free_i32(tws
);
18775 tcg_temp_free_i32(twt
);
18778 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18780 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18788 gen_msa_vec_v(env
, ctx
);
18791 gen_msa_2r(env
, ctx
);
18794 gen_msa_2rf(env
, ctx
);
18797 MIPS_INVAL("MSA instruction");
18798 generate_exception(ctx
, EXCP_RI
);
18803 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18805 uint32_t opcode
= ctx
->opcode
;
18806 check_insn(ctx
, ASE_MSA
);
18807 check_msa_access(ctx
);
18809 switch (MASK_MSA_MINOR(opcode
)) {
18810 case OPC_MSA_I8_00
:
18811 case OPC_MSA_I8_01
:
18812 case OPC_MSA_I8_02
:
18813 gen_msa_i8(env
, ctx
);
18815 case OPC_MSA_I5_06
:
18816 case OPC_MSA_I5_07
:
18817 gen_msa_i5(env
, ctx
);
18819 case OPC_MSA_BIT_09
:
18820 case OPC_MSA_BIT_0A
:
18821 gen_msa_bit(env
, ctx
);
18823 case OPC_MSA_3R_0D
:
18824 case OPC_MSA_3R_0E
:
18825 case OPC_MSA_3R_0F
:
18826 case OPC_MSA_3R_10
:
18827 case OPC_MSA_3R_11
:
18828 case OPC_MSA_3R_12
:
18829 case OPC_MSA_3R_13
:
18830 case OPC_MSA_3R_14
:
18831 case OPC_MSA_3R_15
:
18832 gen_msa_3r(env
, ctx
);
18835 gen_msa_elm(env
, ctx
);
18837 case OPC_MSA_3RF_1A
:
18838 case OPC_MSA_3RF_1B
:
18839 case OPC_MSA_3RF_1C
:
18840 gen_msa_3rf(env
, ctx
);
18843 gen_msa_vec(env
, ctx
);
18854 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18855 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18856 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18857 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18859 TCGv_i32 twd
= tcg_const_i32(wd
);
18860 TCGv taddr
= tcg_temp_new();
18861 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18863 switch (MASK_MSA_MINOR(opcode
)) {
18865 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18868 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18871 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18874 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18877 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18880 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18883 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18886 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18890 tcg_temp_free_i32(twd
);
18891 tcg_temp_free(taddr
);
18895 MIPS_INVAL("MSA instruction");
18896 generate_exception(ctx
, EXCP_RI
);
18902 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18905 int rs
, rt
, rd
, sa
;
18909 /* make sure instructions are on a word boundary */
18910 if (ctx
->pc
& 0x3) {
18911 env
->CP0_BadVAddr
= ctx
->pc
;
18912 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18913 ctx
->bstate
= BS_STOP
;
18917 /* Handle blikely not taken case */
18918 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18919 TCGLabel
*l1
= gen_new_label();
18921 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18922 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18923 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18927 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
18928 tcg_gen_debug_insn_start(ctx
->pc
);
18931 op
= MASK_OP_MAJOR(ctx
->opcode
);
18932 rs
= (ctx
->opcode
>> 21) & 0x1f;
18933 rt
= (ctx
->opcode
>> 16) & 0x1f;
18934 rd
= (ctx
->opcode
>> 11) & 0x1f;
18935 sa
= (ctx
->opcode
>> 6) & 0x1f;
18936 imm
= (int16_t)ctx
->opcode
;
18939 decode_opc_special(env
, ctx
);
18942 decode_opc_special2_legacy(env
, ctx
);
18945 decode_opc_special3(env
, ctx
);
18948 op1
= MASK_REGIMM(ctx
->opcode
);
18950 case OPC_BLTZL
: /* REGIMM branches */
18954 check_insn(ctx
, ISA_MIPS2
);
18955 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18959 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18963 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18965 /* OPC_NAL, OPC_BAL */
18966 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18968 generate_exception(ctx
, EXCP_RI
);
18971 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18974 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18976 check_insn(ctx
, ISA_MIPS2
);
18977 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18978 gen_trap(ctx
, op1
, rs
, -1, imm
);
18981 check_insn(ctx
, ISA_MIPS32R2
);
18982 /* Break the TB to be able to sync copied instructions
18984 ctx
->bstate
= BS_STOP
;
18986 case OPC_BPOSGE32
: /* MIPS DSP branch */
18987 #if defined(TARGET_MIPS64)
18991 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
18993 #if defined(TARGET_MIPS64)
18995 check_insn(ctx
, ISA_MIPS32R6
);
18996 check_mips_64(ctx
);
18998 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19002 check_insn(ctx
, ISA_MIPS32R6
);
19003 check_mips_64(ctx
);
19005 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19009 default: /* Invalid */
19010 MIPS_INVAL("regimm");
19011 generate_exception(ctx
, EXCP_RI
);
19016 check_cp0_enabled(ctx
);
19017 op1
= MASK_CP0(ctx
->opcode
);
19025 #if defined(TARGET_MIPS64)
19029 #ifndef CONFIG_USER_ONLY
19030 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19031 #endif /* !CONFIG_USER_ONLY */
19033 case OPC_C0_FIRST
... OPC_C0_LAST
:
19034 #ifndef CONFIG_USER_ONLY
19035 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19036 #endif /* !CONFIG_USER_ONLY */
19039 #ifndef CONFIG_USER_ONLY
19042 TCGv t0
= tcg_temp_new();
19044 op2
= MASK_MFMC0(ctx
->opcode
);
19047 check_insn(ctx
, ASE_MT
);
19048 gen_helper_dmt(t0
);
19049 gen_store_gpr(t0
, rt
);
19052 check_insn(ctx
, ASE_MT
);
19053 gen_helper_emt(t0
);
19054 gen_store_gpr(t0
, rt
);
19057 check_insn(ctx
, ASE_MT
);
19058 gen_helper_dvpe(t0
, cpu_env
);
19059 gen_store_gpr(t0
, rt
);
19062 check_insn(ctx
, ASE_MT
);
19063 gen_helper_evpe(t0
, cpu_env
);
19064 gen_store_gpr(t0
, rt
);
19067 check_insn(ctx
, ISA_MIPS32R2
);
19068 save_cpu_state(ctx
, 1);
19069 gen_helper_di(t0
, cpu_env
);
19070 gen_store_gpr(t0
, rt
);
19071 /* Stop translation as we may have switched
19072 the execution mode. */
19073 ctx
->bstate
= BS_STOP
;
19076 check_insn(ctx
, ISA_MIPS32R2
);
19077 save_cpu_state(ctx
, 1);
19078 gen_helper_ei(t0
, cpu_env
);
19079 gen_store_gpr(t0
, rt
);
19080 /* Stop translation as we may have switched
19081 the execution mode. */
19082 ctx
->bstate
= BS_STOP
;
19084 default: /* Invalid */
19085 MIPS_INVAL("mfmc0");
19086 generate_exception(ctx
, EXCP_RI
);
19091 #endif /* !CONFIG_USER_ONLY */
19094 check_insn(ctx
, ISA_MIPS32R2
);
19095 gen_load_srsgpr(rt
, rd
);
19098 check_insn(ctx
, ISA_MIPS32R2
);
19099 gen_store_srsgpr(rt
, rd
);
19103 generate_exception(ctx
, EXCP_RI
);
19107 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19108 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19109 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19110 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19113 /* Arithmetic with immediate opcode */
19114 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19118 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19120 case OPC_SLTI
: /* Set on less than with immediate opcode */
19122 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19124 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19125 case OPC_LUI
: /* OPC_AUI */
19128 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19130 case OPC_J
... OPC_JAL
: /* Jump */
19131 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19132 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19135 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19136 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19138 generate_exception(ctx
, EXCP_RI
);
19141 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19142 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19145 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19148 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19149 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19151 generate_exception(ctx
, EXCP_RI
);
19154 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19155 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19158 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19161 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19164 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19166 check_insn(ctx
, ISA_MIPS32R6
);
19167 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19168 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19171 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19174 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19176 check_insn(ctx
, ISA_MIPS32R6
);
19177 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19178 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19183 check_insn(ctx
, ISA_MIPS2
);
19184 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19188 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19190 case OPC_LL
: /* Load and stores */
19191 check_insn(ctx
, ISA_MIPS2
);
19195 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19197 case OPC_LB
... OPC_LH
:
19198 case OPC_LW
... OPC_LHU
:
19199 gen_ld(ctx
, op
, rt
, rs
, imm
);
19203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19205 case OPC_SB
... OPC_SH
:
19207 gen_st(ctx
, op
, rt
, rs
, imm
);
19210 check_insn(ctx
, ISA_MIPS2
);
19211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19212 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19216 check_cp0_enabled(ctx
);
19217 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19218 /* Treat as NOP. */
19221 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19222 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19223 /* Treat as NOP. */
19226 /* Floating point (COP1). */
19231 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19235 op1
= MASK_CP1(ctx
->opcode
);
19240 check_cp1_enabled(ctx
);
19241 check_insn(ctx
, ISA_MIPS32R2
);
19246 check_cp1_enabled(ctx
);
19247 gen_cp1(ctx
, op1
, rt
, rd
);
19249 #if defined(TARGET_MIPS64)
19252 check_cp1_enabled(ctx
);
19253 check_insn(ctx
, ISA_MIPS3
);
19254 check_mips_64(ctx
);
19255 gen_cp1(ctx
, op1
, rt
, rd
);
19258 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19259 check_cp1_enabled(ctx
);
19260 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19262 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19267 check_insn(ctx
, ASE_MIPS3D
);
19268 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19269 (rt
>> 2) & 0x7, imm
<< 2);
19273 check_cp1_enabled(ctx
);
19274 check_insn(ctx
, ISA_MIPS32R6
);
19275 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19279 check_cp1_enabled(ctx
);
19280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19282 check_insn(ctx
, ASE_MIPS3D
);
19285 check_cp1_enabled(ctx
);
19286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19287 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19288 (rt
>> 2) & 0x7, imm
<< 2);
19295 check_cp1_enabled(ctx
);
19296 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19302 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19303 check_cp1_enabled(ctx
);
19304 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19306 case R6_OPC_CMP_AF_S
:
19307 case R6_OPC_CMP_UN_S
:
19308 case R6_OPC_CMP_EQ_S
:
19309 case R6_OPC_CMP_UEQ_S
:
19310 case R6_OPC_CMP_LT_S
:
19311 case R6_OPC_CMP_ULT_S
:
19312 case R6_OPC_CMP_LE_S
:
19313 case R6_OPC_CMP_ULE_S
:
19314 case R6_OPC_CMP_SAF_S
:
19315 case R6_OPC_CMP_SUN_S
:
19316 case R6_OPC_CMP_SEQ_S
:
19317 case R6_OPC_CMP_SEUQ_S
:
19318 case R6_OPC_CMP_SLT_S
:
19319 case R6_OPC_CMP_SULT_S
:
19320 case R6_OPC_CMP_SLE_S
:
19321 case R6_OPC_CMP_SULE_S
:
19322 case R6_OPC_CMP_OR_S
:
19323 case R6_OPC_CMP_UNE_S
:
19324 case R6_OPC_CMP_NE_S
:
19325 case R6_OPC_CMP_SOR_S
:
19326 case R6_OPC_CMP_SUNE_S
:
19327 case R6_OPC_CMP_SNE_S
:
19328 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19330 case R6_OPC_CMP_AF_D
:
19331 case R6_OPC_CMP_UN_D
:
19332 case R6_OPC_CMP_EQ_D
:
19333 case R6_OPC_CMP_UEQ_D
:
19334 case R6_OPC_CMP_LT_D
:
19335 case R6_OPC_CMP_ULT_D
:
19336 case R6_OPC_CMP_LE_D
:
19337 case R6_OPC_CMP_ULE_D
:
19338 case R6_OPC_CMP_SAF_D
:
19339 case R6_OPC_CMP_SUN_D
:
19340 case R6_OPC_CMP_SEQ_D
:
19341 case R6_OPC_CMP_SEUQ_D
:
19342 case R6_OPC_CMP_SLT_D
:
19343 case R6_OPC_CMP_SULT_D
:
19344 case R6_OPC_CMP_SLE_D
:
19345 case R6_OPC_CMP_SULE_D
:
19346 case R6_OPC_CMP_OR_D
:
19347 case R6_OPC_CMP_UNE_D
:
19348 case R6_OPC_CMP_NE_D
:
19349 case R6_OPC_CMP_SOR_D
:
19350 case R6_OPC_CMP_SUNE_D
:
19351 case R6_OPC_CMP_SNE_D
:
19352 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19355 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19356 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19361 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19376 check_insn(ctx
, ASE_MSA
);
19377 gen_msa_branch(env
, ctx
, op1
);
19381 generate_exception(ctx
, EXCP_RI
);
19386 /* Compact branches [R6] and COP2 [non-R6] */
19387 case OPC_BC
: /* OPC_LWC2 */
19388 case OPC_BALC
: /* OPC_SWC2 */
19389 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19390 /* OPC_BC, OPC_BALC */
19391 gen_compute_compact_branch(ctx
, op
, 0, 0,
19392 sextract32(ctx
->opcode
<< 2, 0, 28));
19394 /* OPC_LWC2, OPC_SWC2 */
19395 /* COP2: Not implemented. */
19396 generate_exception_err(ctx
, EXCP_CpU
, 2);
19399 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19400 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19401 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19403 /* OPC_BEQZC, OPC_BNEZC */
19404 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19405 sextract32(ctx
->opcode
<< 2, 0, 23));
19407 /* OPC_JIC, OPC_JIALC */
19408 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19411 /* OPC_LWC2, OPC_SWC2 */
19412 /* COP2: Not implemented. */
19413 generate_exception_err(ctx
, EXCP_CpU
, 2);
19417 check_insn(ctx
, INSN_LOONGSON2F
);
19418 /* Note that these instructions use different fields. */
19419 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19423 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19424 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19425 check_cp1_enabled(ctx
);
19426 op1
= MASK_CP3(ctx
->opcode
);
19430 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19436 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19437 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19440 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19441 /* Treat as NOP. */
19444 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19458 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19459 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19463 generate_exception (ctx
, EXCP_RI
);
19467 generate_exception_err(ctx
, EXCP_CpU
, 1);
19471 #if defined(TARGET_MIPS64)
19472 /* MIPS64 opcodes */
19473 case OPC_LDL
... OPC_LDR
:
19475 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19479 check_insn(ctx
, ISA_MIPS3
);
19480 check_mips_64(ctx
);
19481 gen_ld(ctx
, op
, rt
, rs
, imm
);
19483 case OPC_SDL
... OPC_SDR
:
19484 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19487 check_insn(ctx
, ISA_MIPS3
);
19488 check_mips_64(ctx
);
19489 gen_st(ctx
, op
, rt
, rs
, imm
);
19492 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19493 check_insn(ctx
, ISA_MIPS3
);
19494 check_mips_64(ctx
);
19495 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19497 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19498 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19499 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19500 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19503 check_insn(ctx
, ISA_MIPS3
);
19504 check_mips_64(ctx
);
19505 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19509 check_insn(ctx
, ISA_MIPS3
);
19510 check_mips_64(ctx
);
19511 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19514 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19515 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19516 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19518 MIPS_INVAL("major opcode");
19519 generate_exception(ctx
, EXCP_RI
);
19523 case OPC_DAUI
: /* OPC_JALX */
19524 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19525 #if defined(TARGET_MIPS64)
19527 check_mips_64(ctx
);
19529 TCGv t0
= tcg_temp_new();
19530 gen_load_gpr(t0
, rs
);
19531 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19535 generate_exception(ctx
, EXCP_RI
);
19536 MIPS_INVAL("major opcode");
19540 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19541 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19542 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19545 case OPC_MSA
: /* OPC_MDMX */
19546 /* MDMX: Not implemented. */
19550 check_insn(ctx
, ISA_MIPS32R6
);
19551 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19553 default: /* Invalid */
19554 MIPS_INVAL("major opcode");
19555 generate_exception(ctx
, EXCP_RI
);
19561 gen_intermediate_code_internal(MIPSCPU
*cpu
, TranslationBlock
*tb
,
19564 CPUState
*cs
= CPU(cpu
);
19565 CPUMIPSState
*env
= &cpu
->env
;
19567 target_ulong pc_start
;
19568 target_ulong next_page_start
;
19577 qemu_log("search pc %d\n", search_pc
);
19580 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19583 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19584 ctx
.insn_flags
= env
->insn_flags
;
19585 ctx
.CP0_Config1
= env
->CP0_Config1
;
19587 ctx
.bstate
= BS_NONE
;
19588 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19589 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19590 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19591 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19592 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19593 ctx
.PAMask
= env
->PAMask
;
19594 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19595 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19596 /* Restore delay slot state from the tb context. */
19597 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19598 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19599 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19600 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19601 restore_cpu_state(env
, &ctx
);
19602 #ifdef CONFIG_USER_ONLY
19603 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19605 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19607 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19608 MO_UNALN
: MO_ALIGN
;
19610 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19611 if (max_insns
== 0)
19612 max_insns
= CF_COUNT_MASK
;
19613 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19615 while (ctx
.bstate
== BS_NONE
) {
19616 if (unlikely(!QTAILQ_EMPTY(&cs
->breakpoints
))) {
19617 QTAILQ_FOREACH(bp
, &cs
->breakpoints
, entry
) {
19618 if (bp
->pc
== ctx
.pc
) {
19619 save_cpu_state(&ctx
, 1);
19620 ctx
.bstate
= BS_BRANCH
;
19621 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
19622 /* Include the breakpoint location or the tb won't
19623 * be flushed when it must be. */
19625 goto done_generating
;
19631 j
= tcg_op_buf_count();
19635 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
19637 tcg_ctx
.gen_opc_pc
[lj
] = ctx
.pc
;
19638 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
19639 gen_opc_btarget
[lj
] = ctx
.btarget
;
19640 tcg_ctx
.gen_opc_instr_start
[lj
] = 1;
19641 tcg_ctx
.gen_opc_icount
[lj
] = num_insns
;
19643 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
19646 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19647 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19648 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19650 decode_opc(env
, &ctx
);
19651 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19652 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19653 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19654 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19655 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19656 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19658 generate_exception(&ctx
, EXCP_RI
);
19659 ctx
.bstate
= BS_STOP
;
19663 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19664 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19665 MIPS_HFLAG_FBNSLOT
))) {
19666 /* force to generate branch as there is neither delay nor
19670 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19671 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19672 /* Force to generate branch as microMIPS R6 doesn't restrict
19673 branches in the forbidden slot. */
19678 gen_branch(&ctx
, insn_bytes
);
19680 ctx
.pc
+= insn_bytes
;
19684 /* Execute a branch and its delay slot as a single instruction.
19685 This is what GDB expects and is consistent with what the
19686 hardware does (e.g. if a delay slot instruction faults, the
19687 reported PC is the PC of the branch). */
19688 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19692 if (ctx
.pc
>= next_page_start
) {
19696 if (tcg_op_buf_full()) {
19700 if (num_insns
>= max_insns
)
19706 if (tb
->cflags
& CF_LAST_IO
) {
19709 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19710 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19711 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
19713 switch (ctx
.bstate
) {
19715 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19718 save_cpu_state(&ctx
, 0);
19719 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19722 tcg_gen_exit_tb(0);
19730 gen_tb_end(tb
, num_insns
);
19733 j
= tcg_op_buf_count();
19736 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
19738 tb
->size
= ctx
.pc
- pc_start
;
19739 tb
->icount
= num_insns
;
19743 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19744 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19745 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19751 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19753 gen_intermediate_code_internal(mips_env_get_cpu(env
), tb
, false);
19756 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19758 gen_intermediate_code_internal(mips_env_get_cpu(env
), tb
, true);
19761 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19765 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19767 #define printfpr(fp) \
19770 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19771 " fd:%13g fs:%13g psu: %13g\n", \
19772 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19773 (double)(fp)->fd, \
19774 (double)(fp)->fs[FP_ENDIAN_IDX], \
19775 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19778 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19779 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19780 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19781 " fd:%13g fs:%13g psu:%13g\n", \
19782 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19784 (double)tmp.fs[FP_ENDIAN_IDX], \
19785 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19790 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19791 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19792 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19793 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19794 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19795 printfpr(&env
->active_fpu
.fpr
[i
]);
19801 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19804 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19805 CPUMIPSState
*env
= &cpu
->env
;
19808 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19809 " LO=0x" TARGET_FMT_lx
" ds %04x "
19810 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19811 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19812 env
->hflags
, env
->btarget
, env
->bcond
);
19813 for (i
= 0; i
< 32; i
++) {
19815 cpu_fprintf(f
, "GPR%02d:", i
);
19816 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19818 cpu_fprintf(f
, "\n");
19821 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19822 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19823 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19825 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19826 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19827 env
->CP0_Config2
, env
->CP0_Config3
);
19828 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19829 env
->CP0_Config4
, env
->CP0_Config5
);
19830 if (env
->hflags
& MIPS_HFLAG_FPU
)
19831 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19834 void mips_tcg_init(void)
19839 /* Initialize various static tables. */
19843 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19844 TCGV_UNUSED(cpu_gpr
[0]);
19845 for (i
= 1; i
< 32; i
++)
19846 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
19847 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19850 for (i
= 0; i
< 32; i
++) {
19851 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19853 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2]);
19854 /* The scalar floating-point unit (FPU) registers are mapped on
19855 * the MSA vector registers. */
19856 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19857 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19858 msa_wr_d
[i
* 2 + 1] =
19859 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2 + 1]);
19862 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
19863 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19864 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19865 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
19866 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19868 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
19869 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19872 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
19873 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19875 bcond
= tcg_global_mem_new(TCG_AREG0
,
19876 offsetof(CPUMIPSState
, bcond
), "bcond");
19877 btarget
= tcg_global_mem_new(TCG_AREG0
,
19878 offsetof(CPUMIPSState
, btarget
), "btarget");
19879 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
19880 offsetof(CPUMIPSState
, hflags
), "hflags");
19882 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
19883 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19885 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
19886 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19892 #include "translate_init.c"
19894 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19898 const mips_def_t
*def
;
19900 def
= cpu_mips_find_by_name(cpu_model
);
19903 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19905 env
->cpu_model
= def
;
19907 #ifndef CONFIG_USER_ONLY
19908 mmu_init(env
, def
);
19910 fpu_init(env
, def
);
19911 mvp_init(env
, def
);
19913 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19918 void cpu_state_reset(CPUMIPSState
*env
)
19920 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19921 CPUState
*cs
= CPU(cpu
);
19923 /* Reset registers to their default values */
19924 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19925 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19926 #ifdef TARGET_WORDS_BIGENDIAN
19927 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19929 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19930 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19931 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19932 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19933 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19934 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19935 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19936 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19937 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19938 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19939 << env
->cpu_model
->CP0_LLAddr_shift
;
19940 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19941 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19942 env
->CCRes
= env
->cpu_model
->CCRes
;
19943 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19944 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19945 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19946 env
->current_tc
= 0;
19947 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19948 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19949 #if defined(TARGET_MIPS64)
19950 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19951 env
->SEGMask
|= 3ULL << 62;
19954 env
->PABITS
= env
->cpu_model
->PABITS
;
19955 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19956 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19957 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19958 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19959 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19960 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19961 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19962 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19963 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19964 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19965 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19966 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19967 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19968 env
->msair
= env
->cpu_model
->MSAIR
;
19969 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19971 #if defined(CONFIG_USER_ONLY)
19972 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19973 # ifdef TARGET_MIPS64
19974 /* Enable 64-bit register mode. */
19975 env
->CP0_Status
|= (1 << CP0St_PX
);
19977 # ifdef TARGET_ABI_MIPSN64
19978 /* Enable 64-bit address mode. */
19979 env
->CP0_Status
|= (1 << CP0St_UX
);
19981 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19982 hardware registers. */
19983 env
->CP0_HWREna
|= 0x0000000F;
19984 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19985 env
->CP0_Status
|= (1 << CP0St_CU1
);
19987 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19988 env
->CP0_Status
|= (1 << CP0St_MX
);
19990 # if defined(TARGET_MIPS64)
19991 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19992 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19993 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19994 env
->CP0_Status
|= (1 << CP0St_FR
);
19998 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19999 /* If the exception was raised from a delay slot,
20000 come back to the jump. */
20001 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20002 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20004 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20006 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20007 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20008 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20009 env
->CP0_Wired
= 0;
20010 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20011 if (kvm_enabled()) {
20012 env
->CP0_EBase
|= 0x40000000;
20014 env
->CP0_EBase
|= 0x80000000;
20016 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20017 /* vectored interrupts not implemented, timer on int 7,
20018 no performance counters. */
20019 env
->CP0_IntCtl
= 0xe0000000;
20023 for (i
= 0; i
< 7; i
++) {
20024 env
->CP0_WatchLo
[i
] = 0;
20025 env
->CP0_WatchHi
[i
] = 0x80000000;
20027 env
->CP0_WatchLo
[7] = 0;
20028 env
->CP0_WatchHi
[7] = 0;
20030 /* Count register increments in debug mode, EJTAG version 1 */
20031 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20033 cpu_mips_store_count(env
, 1);
20035 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20038 /* Only TC0 on VPE 0 starts as active. */
20039 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20040 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20041 env
->tcs
[i
].CP0_TCHalt
= 1;
20043 env
->active_tc
.CP0_TCHalt
= 1;
20046 if (cs
->cpu_index
== 0) {
20047 /* VPE0 starts up enabled. */
20048 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20049 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20051 /* TC0 starts up unhalted. */
20053 env
->active_tc
.CP0_TCHalt
= 0;
20054 env
->tcs
[0].CP0_TCHalt
= 0;
20055 /* With thread 0 active. */
20056 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20057 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20061 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20062 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20063 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20064 env
->CP0_Status
|= (1 << CP0St_FR
);
20068 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20072 compute_hflags(env
);
20073 restore_rounding_mode(env
);
20074 restore_flush_mode(env
);
20075 restore_pamask(env
);
20076 cs
->exception_index
= EXCP_NONE
;
20078 if (semihosting_get_argc()) {
20079 /* UHI interface can be used to obtain argc and argv */
20080 env
->active_tc
.gpr
[4] = -1;
20084 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
20086 env
->active_tc
.PC
= tcg_ctx
.gen_opc_pc
[pc_pos
];
20087 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20088 env
->hflags
|= gen_opc_hflags
[pc_pos
];
20089 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20090 case MIPS_HFLAG_BR
:
20092 case MIPS_HFLAG_BC
:
20093 case MIPS_HFLAG_BL
:
20095 env
->btarget
= gen_opc_btarget
[pc_pos
];